From c6b37d0d541bd3ffa7805116e02356501c22e7a7 Mon Sep 17 00:00:00 2001 From: Mitchell Gale Date: Tue, 22 Aug 2023 16:55:23 -0700 Subject: [PATCH] [Backport 2.x] [Spotless] Entire project running spotless (#2016) * Spotless apply on entire 2.x branch Signed-off-by: Mitchell Gale * Removing checkstyle Signed-off-by: Mitchell Gale --------- Signed-off-by: Mitchell Gale --- .../ComparisonOperatorBenchmark.java | 6 +- build.gradle | 2 +- .../antlr/CaseInsensitiveCharStream.java | 1 - .../antlr/SyntaxAnalysisErrorListener.java | 1 - .../common/antlr/SyntaxCheckException.java | 1 - .../AwsSigningInterceptor.java | 32 +- .../BasicAuthenticationInterceptor.java | 6 +- .../opensearch/sql/common/grok/Converter.java | 57 +- .../org/opensearch/sql/common/grok/Grok.java | 66 +- .../sql/common/grok/GrokCompiler.java | 62 +- .../opensearch/sql/common/grok/GrokUtils.java | 47 +- .../org/opensearch/sql/common/grok/Match.java | 143 +- .../common/grok/exception/GrokException.java | 12 +- .../sql/common/response/ResponseListener.java | 1 - .../sql/common/setting/LegacySettings.java | 24 +- .../sql/common/setting/Settings.java | 24 +- .../sql/common/utils/QueryContext.java | 28 +- .../sql/common/utils/StringUtils.java | 34 +- .../AwsSigningInterceptorTest.java | 65 +- .../BasicAuthenticationInterceptorTest.java | 25 +- .../sql/common/grok/ApacheDataTypeTest.java | 29 +- .../sql/common/grok/ApacheTest.java | 2 - .../opensearch/sql/common/grok/BasicTest.java | 8 +- .../sql/common/grok/CaptureTest.java | 6 +- .../common/grok/GrokDocumentationTest.java | 44 +- .../opensearch/sql/common/grok/GrokTest.java | 175 +- .../sql/common/grok/MessagesTest.java | 7 +- .../sql/common/grok/ResourceManager.java | 4 +- .../datasources/auth/AuthenticationType.java | 4 +- .../DataSourceUserAuthorizationHelper.java | 5 +- ...DataSourceUserAuthorizationHelperImpl.java | 23 +- .../sql/datasources/encryptor/Encryptor.java | 1 - .../datasources/encryptor/EncryptorImpl.java | 42 +- .../DataSourceNotFoundException.java | 5 +- .../datasources/exceptions/ErrorMessage.java | 18 +- .../CreateDataSourceActionRequest.java | 16 +- .../CreateDataSourceActionResponse.java | 6 +- .../DeleteDataSourceActionRequest.java | 12 +- .../DeleteDataSourceActionResponse.java | 4 +- .../transport/GetDataSourceActionRequest.java | 13 +- .../GetDataSourceActionResponse.java | 4 +- .../UpdateDataSourceActionRequest.java | 12 +- .../UpdateDataSourceActionResponse.java | 6 +- .../rest/RestDataSourceQueryAction.java | 206 +- .../service/DataSourceLoaderCache.java | 5 +- .../service/DataSourceLoaderCacheImpl.java | 25 +- .../service/DataSourceMetadataStorage.java | 19 +- .../service/DataSourceServiceImpl.java | 41 +- .../OpenSearchDataSourceMetadataStorage.java | 142 +- .../TransportCreateDataSourceAction.java | 32 +- .../TransportDeleteDataSourceAction.java | 32 +- .../TransportGetDataSourceAction.java | 61 +- .../TransportUpdateDataSourceAction.java | 28 +- .../utils/XContentParserUtils.java | 16 +- .../auth/AuthenticationTypeTest.java | 1 - ...SourceUserAuthorizationHelperImplTest.java | 60 +- .../encryptor/EncryptorImplTest.java | 66 +- .../DataSourceLoaderCacheImplTest.java | 11 +- .../service/DataSourceServiceImplTest.java | 235 +- ...enSearchDataSourceMetadataStorageTest.java | 268 +- .../TransportCreateDataSourceActionTest.java | 38 +- .../TransportDeleteDataSourceActionTest.java | 34 +- .../TransportGetDataSourceActionTest.java | 49 +- .../TransportUpdateDataSourceActionTest.java | 37 +- .../sql/datasources/utils/SchedulerTest.java | 17 +- .../utils/XContentParserUtilsTest.java | 32 +- .../sql/bwc/SQLBackwardsCompatibilityIT.java | 383 +-- .../sql/correctness/CorrectnessIT.java | 30 +- .../sql/correctness/TestConfig.java | 58 +- .../sql/correctness/report/ErrorTestCase.java | 10 +- .../correctness/report/FailedTestCase.java | 27 +- .../correctness/report/SuccessTestCase.java | 5 +- .../correctness/report/TestCaseReport.java | 15 +- .../sql/correctness/report/TestReport.java | 6 +- .../sql/correctness/report/TestSummary.java | 6 +- .../correctness/runner/ComparisonTest.java | 40 +- .../runner/connection/DBConnection.java | 20 +- .../runner/connection/JDBCConnection.java | 62 +- .../connection/OpenSearchConnection.java | 16 +- .../runner/resultset/DBResult.java | 93 +- .../sql/correctness/runner/resultset/Row.java | 6 +- .../correctness/runner/resultset/Type.java | 14 +- .../correctness/tests/ComparisonTestTest.java | 156 +- .../sql/correctness/tests/DBResultTest.java | 55 +- .../correctness/tests/JDBCConnectionTest.java | 89 +- .../tests/OpenSearchConnectionTest.java | 33 +- .../sql/correctness/tests/RowTest.java | 6 +- .../sql/correctness/tests/TestConfigTest.java | 23 +- .../correctness/tests/TestDataSetTest.java | 34 +- .../correctness/tests/TestQuerySetTest.java | 16 +- .../sql/correctness/tests/TestReportTest.java | 146 +- .../sql/correctness/tests/UnitTests.java | 22 +- .../sql/correctness/testset/TestDataSet.java | 38 +- .../sql/correctness/testset/TestQuerySet.java | 14 +- .../sql/datasource/DataSourceAPIsIT.java | 124 +- .../DatasourceClusterSettingsIT.java | 24 +- .../org/opensearch/sql/jdbc/CursorIT.java | 52 +- .../sql/legacy/AggregationExpressionIT.java | 254 +- .../opensearch/sql/legacy/AggregationIT.java | 1279 ++++---- .../sql/legacy/CsvFormatResponseIT.java | 328 +- .../org/opensearch/sql/legacy/CursorIT.java | 161 +- .../opensearch/sql/legacy/DateFormatIT.java | 193 +- .../sql/legacy/DateFunctionsIT.java | 108 +- .../org/opensearch/sql/legacy/DeleteIT.java | 49 +- .../org/opensearch/sql/legacy/ExplainIT.java | 235 +- .../sql/legacy/GetEndpointQueryIT.java | 8 +- .../org/opensearch/sql/legacy/HashJoinIT.java | 78 +- .../org/opensearch/sql/legacy/HavingIT.java | 111 +- .../opensearch/sql/legacy/JSONRequestIT.java | 84 +- .../org/opensearch/sql/legacy/JdbcTestIT.java | 151 +- .../sql/legacy/JoinAliasWriterRuleIT.java | 115 +- .../org/opensearch/sql/legacy/JoinIT.java | 569 ++-- .../sql/legacy/MathFunctionsIT.java | 103 +- .../sql/legacy/MetaDataQueriesIT.java | 220 +- .../opensearch/sql/legacy/MethodQueryIT.java | 197 +- .../org/opensearch/sql/legacy/MetricsIT.java | 6 +- .../opensearch/sql/legacy/MultiQueryIT.java | 135 +- .../sql/legacy/NestedFieldQueryIT.java | 510 ++- .../sql/legacy/ObjectFieldSelectIT.java | 63 +- .../sql/legacy/OpenSearchSQLRestTestCase.java | 114 +- .../org/opensearch/sql/legacy/OrderIT.java | 39 +- .../sql/legacy/OrdinalAliasRewriterIT.java | 219 +- .../org/opensearch/sql/legacy/PluginIT.java | 586 ++-- .../sql/legacy/PreparedStatementIT.java | 86 +- .../sql/legacy/PrettyFormatResponseIT.java | 344 +- .../sql/legacy/PrettyFormatterIT.java | 11 +- .../sql/legacy/QueryAnalysisIT.java | 105 +- .../sql/legacy/QueryFunctionsIT.java | 171 +- .../org/opensearch/sql/legacy/QueryIT.java | 1568 ++++++---- .../sql/legacy/RestIntegTestCase.java | 160 +- .../opensearch/sql/legacy/SQLFunctionsIT.java | 666 ++-- .../sql/legacy/SQLIntegTestCase.java | 306 +- .../org/opensearch/sql/legacy/ShowIT.java | 4 +- .../opensearch/sql/legacy/SourceFieldIT.java | 58 +- .../org/opensearch/sql/legacy/SubqueryIT.java | 310 +- .../sql/legacy/TermQueryExplainIT.java | 302 +- .../org/opensearch/sql/legacy/TestUtils.java | 78 +- .../opensearch/sql/legacy/TestsConstants.java | 101 +- .../sql/legacy/TypeInformationIT.java | 85 +- .../sql/ppl/ConvertTZFunctionIT.java | 364 +-- .../sql/ppl/CrossClusterSearchIT.java | 39 +- .../org/opensearch/sql/ppl/CsvFormatIT.java | 48 +- .../org/opensearch/sql/ppl/DataTypeIT.java | 34 +- .../sql/ppl/DateTimeComparisonIT.java | 992 +++--- .../sql/ppl/DateTimeFunctionIT.java | 1372 +++++--- .../sql/ppl/DateTimeImplementationIT.java | 132 +- .../opensearch/sql/ppl/DedupCommandIT.java | 1 - .../opensearch/sql/ppl/DescribeCommandIT.java | 15 +- .../org/opensearch/sql/ppl/ExplainIT.java | 13 +- .../opensearch/sql/ppl/FieldsCommandIT.java | 11 +- .../org/opensearch/sql/ppl/HeadCommandIT.java | 41 +- .../sql/ppl/InformationSchemaCommandIT.java | 116 +- .../sql/ppl/LegacyAPICompatibilityIT.java | 23 +- .../org/opensearch/sql/ppl/LikeQueryIT.java | 46 +- .../opensearch/sql/ppl/MatchBoolPrefixIT.java | 11 +- .../java/org/opensearch/sql/ppl/MatchIT.java | 1 - .../org/opensearch/sql/ppl/MatchPhraseIT.java | 17 +- .../sql/ppl/MatchPhrasePrefixIT.java | 60 +- .../sql/ppl/MathematicalFunctionIT.java | 320 +- .../org/opensearch/sql/ppl/MetricsIT.java | 6 +- .../org/opensearch/sql/ppl/MultiMatchIT.java | 35 +- .../opensearch/sql/ppl/NowLikeFunctionIT.java | 207 +- .../sql/ppl/ObjectFieldOperateIT.java | 67 +- .../org/opensearch/sql/ppl/OperatorIT.java | 116 +- .../opensearch/sql/ppl/PPLIntegTestCase.java | 28 +- .../org/opensearch/sql/ppl/PPLPluginIT.java | 12 +- .../opensearch/sql/ppl/ParseCommandIT.java | 22 +- .../sql/ppl/PositionFunctionIT.java | 169 +- .../ppl/PrometheusDataSourceCommandsIT.java | 143 +- .../opensearch/sql/ppl/QueryAnalysisIT.java | 17 +- .../org/opensearch/sql/ppl/QueryStringIT.java | 27 +- .../org/opensearch/sql/ppl/RareCommandIT.java | 11 +- .../sql/ppl/RelevanceFunctionIT.java | 75 +- .../opensearch/sql/ppl/RenameCommandIT.java | 5 +- .../opensearch/sql/ppl/ResourceMonitorIT.java | 9 +- .../opensearch/sql/ppl/SearchCommandIT.java | 1 - .../org/opensearch/sql/ppl/SettingsIT.java | 9 +- .../sql/ppl/ShowDataSourcesCommandIT.java | 37 +- .../sql/ppl/SimpleQueryStringIT.java | 40 +- .../org/opensearch/sql/ppl/SortCommandIT.java | 2 - .../org/opensearch/sql/ppl/StandaloneIT.java | 70 +- .../opensearch/sql/ppl/StatsCommandIT.java | 100 +- .../opensearch/sql/ppl/SystemFunctionIT.java | 85 +- .../opensearch/sql/ppl/TextFunctionIT.java | 59 +- .../org/opensearch/sql/ppl/TopCommandIT.java | 25 +- .../sql/ppl/VisualizationFormatIT.java | 11 +- .../opensearch/sql/ppl/WhereCommandIT.java | 1 - .../java/org/opensearch/sql/sql/AdminIT.java | 6 +- .../org/opensearch/sql/sql/AggregationIT.java | 509 +-- .../sql/sql/ArithmeticFunctionIT.java | 439 ++- .../org/opensearch/sql/sql/ConditionalIT.java | 224 +- .../sql/sql/ConvertTZFunctionIT.java | 151 +- .../sql/sql/CorrectnessTestBase.java | 44 +- .../org/opensearch/sql/sql/CsvFormatIT.java | 51 +- .../sql/sql/DateTimeComparisonIT.java | 984 +++--- .../opensearch/sql/sql/DateTimeFormatsIT.java | 85 +- .../sql/sql/DateTimeFunctionIT.java | 863 ++--- .../sql/sql/DateTimeImplementationIT.java | 120 +- .../org/opensearch/sql/sql/ExpressionIT.java | 23 +- .../sql/sql/HighlightFunctionIT.java | 147 +- .../org/opensearch/sql/sql/IdentifierIT.java | 109 +- .../org/opensearch/sql/sql/JdbcFormatIT.java | 22 +- .../sql/sql/LegacyAPICompatibilityIT.java | 42 +- .../org/opensearch/sql/sql/LikeQueryIT.java | 57 +- .../opensearch/sql/sql/MatchBoolPrefixIT.java | 22 +- .../java/org/opensearch/sql/sql/MatchIT.java | 110 +- .../org/opensearch/sql/sql/MatchPhraseIT.java | 1 - .../sql/sql/MatchPhrasePrefixIT.java | 44 +- .../sql/sql/MathematicalFunctionIT.java | 27 +- .../org/opensearch/sql/sql/MetricsIT.java | 5 +- .../org/opensearch/sql/sql/MultiMatchIT.java | 113 +- .../java/org/opensearch/sql/sql/NestedIT.java | 388 +-- .../opensearch/sql/sql/NowLikeFunctionIT.java | 195 +- .../org/opensearch/sql/sql/NullLiteralIT.java | 13 +- .../sql/sql/PaginationBlackboxIT.java | 42 +- .../sql/sql/PaginationFallbackIT.java | 37 +- .../sql/sql/PaginationFilterIT.java | 106 +- .../org/opensearch/sql/sql/PaginationIT.java | 57 +- .../sql/sql/PaginationWindowIT.java | 32 +- .../sql/sql/PositionFunctionIT.java | 100 +- .../sql/sql/PreparedStatementIT.java | 27 +- .../java/org/opensearch/sql/sql/QueryIT.java | 116 +- .../org/opensearch/sql/sql/QueryStringIT.java | 42 +- .../opensearch/sql/sql/QueryValidationIT.java | 36 +- .../org/opensearch/sql/sql/RawFormatIT.java | 28 +- .../sql/sql/RelevanceFunctionIT.java | 64 +- .../opensearch/sql/sql/SQLCorrectnessIT.java | 31 +- .../org/opensearch/sql/sql/ScoreQueryIT.java | 158 +- .../sql/sql/SimpleQueryStringIT.java | 42 +- .../sql/sql/StandalonePaginationIT.java | 68 +- .../opensearch/sql/sql/SystemFunctionIT.java | 66 +- .../opensearch/sql/sql/TextFunctionIT.java | 4 +- .../opensearch/sql/sql/WildcardQueryIT.java | 129 +- .../opensearch/sql/sql/WindowFunctionIT.java | 68 +- .../ExecuteOnCallerThreadQueryManager.java | 2 +- .../sql/util/InternalRestHighLevelClient.java | 4 +- .../org/opensearch/sql/util/MatcherUtils.java | 92 +- .../opensearch/sql/util/StandaloneModule.java | 13 +- .../org/opensearch/sql/util/TestUtils.java | 1105 +++---- .../antlr/OpenSearchLegacySqlAnalyzer.java | 138 +- .../sql/legacy/antlr/SimilarSymbols.java | 82 +- .../sql/legacy/antlr/SqlAnalysisConfig.java | 81 +- .../legacy/antlr/SqlAnalysisException.java | 11 +- .../semantic/SemanticAnalysisException.java | 12 +- .../antlr/semantic/scope/Environment.java | 134 +- .../antlr/semantic/scope/Namespace.java | 29 +- .../antlr/semantic/scope/SemanticContext.java | 66 +- .../legacy/antlr/semantic/scope/Symbol.java | 38 +- .../antlr/semantic/scope/SymbolTable.java | 133 +- .../antlr/semantic/scope/TypeSupplier.java | 58 +- .../sql/legacy/antlr/semantic/types/Type.java | 107 +- .../antlr/semantic/types/TypeExpression.java | 173 +- .../antlr/semantic/types/base/BaseType.java | 21 +- .../types/base/OpenSearchDataType.java | 178 +- .../semantic/types/base/OpenSearchIndex.java | 92 +- .../types/function/AggregateFunction.java | 70 +- .../function/OpenSearchScalarFunction.java | 139 +- .../types/function/ScalarFunction.java | 200 +- .../types/operator/ComparisonOperator.java | 88 +- .../semantic/types/operator/JoinOperator.java | 56 +- .../semantic/types/operator/SetOperator.java | 71 +- .../antlr/semantic/types/special/Generic.java | 148 +- .../antlr/semantic/types/special/Product.java | 85 +- .../visitor/OpenSearchMappingLoader.java | 333 +- .../semantic/visitor/SemanticAnalyzer.java | 237 +- .../antlr/semantic/visitor/TypeChecker.java | 353 ++- .../syntax/CaseInsensitiveCharStream.java | 92 +- .../syntax/SyntaxAnalysisErrorListener.java | 78 +- .../antlr/syntax/SyntaxAnalysisException.java | 11 +- .../visitor/AntlrSqlParseTreeVisitor.java | 692 ++-- .../visitor/EarlyExitAnalysisException.java | 11 +- .../visitor/GenericSqlParseTreeVisitor.java | 98 +- .../sql/legacy/antlr/visitor/Reducible.java | 19 +- .../visitor/UnsupportedSemanticVerifier.java | 110 +- .../opensearch/sql/legacy/cursor/Cursor.java | 16 +- .../sql/legacy/cursor/CursorType.java | 45 +- .../sql/legacy/cursor/DefaultCursor.java | 251 +- .../sql/legacy/cursor/NullCursor.java | 29 +- .../sql/legacy/domain/ColumnTypeProvider.java | 103 +- .../sql/legacy/domain/Condition.java | 759 ++--- .../opensearch/sql/legacy/domain/Delete.java | 9 +- .../opensearch/sql/legacy/domain/Field.java | 262 +- .../opensearch/sql/legacy/domain/From.java | 88 +- .../opensearch/sql/legacy/domain/Having.java | 347 +- .../sql/legacy/domain/IndexStatement.java | 136 +- .../sql/legacy/domain/JoinSelect.java | 107 +- .../opensearch/sql/legacy/domain/KVValue.java | 35 +- .../sql/legacy/domain/MethodField.java | 151 +- .../opensearch/sql/legacy/domain/Order.java | 89 +- .../opensearch/sql/legacy/domain/Paramer.java | 266 +- .../opensearch/sql/legacy/domain/Query.java | 60 +- .../sql/legacy/domain/QueryActionRequest.java | 11 +- .../sql/legacy/domain/QueryStatement.java | 8 +- .../sql/legacy/domain/ScriptMethodField.java | 30 +- .../sql/legacy/domain/SearchResult.java | 215 +- .../opensearch/sql/legacy/domain/Select.java | 314 +- .../sql/legacy/domain/TableOnJoinSelect.java | 51 +- .../opensearch/sql/legacy/domain/Where.java | 89 +- .../legacy/domain/bucketpath/BucketPath.java | 42 +- .../sql/legacy/domain/bucketpath/Path.java | 84 +- .../sql/legacy/domain/hints/Hint.java | 29 +- .../sql/legacy/domain/hints/HintFactory.java | 398 +-- .../sql/legacy/domain/hints/HintType.java | 45 +- .../legacy/esdomain/LocalClusterState.java | 325 +- .../sql/legacy/esdomain/OpenSearchClient.java | 85 +- .../legacy/esdomain/mapping/FieldMapping.java | 221 +- .../esdomain/mapping/FieldMappings.java | 205 +- .../esdomain/mapping/IndexMappings.java | 98 +- .../sql/legacy/esdomain/mapping/Mappings.java | 53 +- .../SQLFeatureDisabledException.java | 10 +- .../SqlFeatureNotImplementedException.java | 19 +- .../legacy/exception/SqlParseException.java | 11 +- .../ActionRequestRestExecutorFactory.java | 63 +- .../legacy/executor/AsyncRestExecutor.java | 255 +- .../executor/ElasticDefaultRestExecutor.java | 152 +- .../legacy/executor/ElasticHitsExecutor.java | 9 +- .../legacy/executor/ElasticResultHandler.java | 44 +- .../sql/legacy/executor/Format.java | 35 +- .../executor/GetIndexRequestRestListener.java | 143 +- .../executor/QueryActionElasticExecutor.java | 139 +- .../sql/legacy/executor/RestExecutor.java | 13 +- .../adapter/QueryPlanQueryAction.java | 39 +- .../adapter/QueryPlanRequestBuilder.java | 63 +- .../sql/legacy/executor/csv/CSVResult.java | 148 +- .../executor/csv/CSVResultRestExecutor.java | 91 +- .../executor/csv/CSVResultsExtractor.java | 585 ++-- .../executor/csv/CsvExtractorException.java | 11 +- ...ursorActionRequestRestExecutorFactory.java | 20 +- .../cursor/CursorAsyncRestExecutor.java | 142 +- .../executor/cursor/CursorCloseExecutor.java | 116 +- .../executor/cursor/CursorRestExecutor.java | 11 +- .../executor/cursor/CursorResultExecutor.java | 180 +- .../format/BindingTupleResultSet.java | 76 +- .../sql/legacy/executor/format/DataRows.java | 127 +- .../executor/format/DateFieldFormatter.java | 305 +- .../legacy/executor/format/DateFormat.java | 226 +- .../executor/format/DeleteResultSet.java | 49 +- .../executor/format/DescribeResultSet.java | 264 +- .../legacy/executor/format/ErrorMessage.java | 85 +- .../executor/format/ErrorMessageFactory.java | 58 +- .../format/OpenSearchErrorMessage.java | 80 +- .../format/PrettyFormatRestExecutor.java | 147 +- .../sql/legacy/executor/format/Protocol.java | 395 +-- .../sql/legacy/executor/format/ResultSet.java | 84 +- .../sql/legacy/executor/format/Schema.java | 248 +- .../executor/format/SelectResultSet.java | 1570 +++++----- .../legacy/executor/format/ShowResultSet.java | 105 +- .../executor/join/ElasticJoinExecutor.java | 423 +-- .../legacy/executor/join/ElasticUtils.java | 253 +- .../join/HashJoinComparisonStructure.java | 84 +- .../join/HashJoinElasticExecutor.java | 665 ++-- .../executor/join/MetaSearchResult.java | 110 +- .../join/NestedLoopsElasticExecutor.java | 596 ++-- .../join/QueryPlanElasticExecutor.java | 48 +- .../executor/join/SearchHitsResult.java | 47 +- .../executor/multi/ComperableHitResult.java | 109 +- .../legacy/executor/multi/MinusExecutor.java | 789 ++--- .../MinusOneFieldAndOptimizationResult.java | 30 +- .../multi/MultiRequestExecutorFactory.java | 28 +- .../legacy/executor/multi/UnionExecutor.java | 152 +- .../legacy/expression/core/Expression.java | 19 +- .../expression/core/ExpressionFactory.java | 145 +- .../builder/ArithmeticFunctionFactory.java | 326 +- .../core/builder/BinaryExpressionBuilder.java | 48 +- .../core/builder/ExpressionBuilder.java | 7 +- .../core/builder/UnaryExpressionBuilder.java | 46 +- .../core/operator/BinaryScalarOperator.java | 86 +- .../operator/DoubleBinaryScalarOperator.java | 59 +- .../operator/DoubleUnaryScalarOperator.java | 43 +- .../core/operator/ScalarOperation.java | 49 +- .../core/operator/ScalarOperator.java | 29 +- .../core/operator/UnaryScalarOperator.java | 54 +- .../expression/domain/BindingTuple.java | 67 +- .../expression/model/ExprBooleanValue.java | 33 +- .../expression/model/ExprCollectionValue.java | 35 +- .../expression/model/ExprDoubleValue.java | 27 +- .../expression/model/ExprFloatValue.java | 27 +- .../expression/model/ExprIntegerValue.java | 27 +- .../expression/model/ExprLongValue.java | 27 +- .../expression/model/ExprMissingValue.java | 13 +- .../expression/model/ExprStringValue.java | 27 +- .../expression/model/ExprTupleValue.java | 32 +- .../legacy/expression/model/ExprValue.java | 39 +- .../expression/model/ExprValueFactory.java | 95 +- .../expression/model/ExprValueUtils.java | 95 +- .../sql/legacy/metrics/BasicCounter.java | 35 +- .../sql/legacy/metrics/Counter.java | 9 +- .../sql/legacy/metrics/GaugeMetric.java | 28 +- .../opensearch/sql/legacy/metrics/Metric.java | 20 +- .../sql/legacy/metrics/MetricFactory.java | 43 +- .../sql/legacy/metrics/MetricName.java | 97 +- .../sql/legacy/metrics/Metrics.java | 92 +- .../sql/legacy/metrics/NumericMetric.java | 48 +- .../sql/legacy/metrics/RollingCounter.java | 141 +- .../sql/legacy/parser/CaseWhenParser.java | 191 +- .../sql/legacy/parser/ChildrenType.java | 84 +- .../sql/legacy/parser/ElasticLexer.java | 139 +- .../legacy/parser/ElasticSqlExprParser.java | 1648 +++++----- .../legacy/parser/ElasticSqlSelectParser.java | 509 ++- .../sql/legacy/parser/FieldMaker.java | 680 ++-- .../sql/legacy/parser/HavingParser.java | 153 +- .../sql/legacy/parser/NestedType.java | 215 +- .../sql/legacy/parser/SQLOdbcExpr.java | 64 +- .../parser/SQLParensIdentifierExpr.java | 23 +- .../sql/legacy/parser/ScriptFilter.java | 151 +- .../sql/legacy/parser/SelectParser.java | 8 +- .../sql/legacy/parser/SqlParser.java | 966 +++--- .../sql/legacy/parser/SubQueryExpression.java | 63 +- .../sql/legacy/parser/SubQueryParser.java | 142 +- .../sql/legacy/parser/WhereParser.java | 1268 ++++---- .../sql/legacy/plugin/RestSQLQueryAction.java | 48 +- .../sql/legacy/plugin/RestSqlAction.java | 477 +-- .../sql/legacy/plugin/RestSqlStatsAction.java | 122 +- .../sql/legacy/plugin/SearchDao.java | 64 +- .../legacy/query/AggregationQueryAction.java | 776 ++--- .../sql/legacy/query/DefaultQueryAction.java | 479 +-- .../sql/legacy/query/DeleteQueryAction.java | 86 +- .../sql/legacy/query/DescribeQueryAction.java | 30 +- .../legacy/query/OpenSearchActionFactory.java | 326 +- .../sql/legacy/query/QueryAction.java | 388 +-- .../sql/legacy/query/ShowQueryAction.java | 30 +- ...SqlElasticDeleteByQueryRequestBuilder.java | 63 +- .../query/SqlElasticRequestBuilder.java | 13 +- .../query/SqlOpenSearchRequestBuilder.java | 65 +- .../query/join/BackOffRetryStrategy.java | 311 +- .../join/HashJoinElasticRequestBuilder.java | 45 +- .../legacy/query/join/JoinRequestBuilder.java | 179 +- .../NestedLoopsElasticRequestBuilder.java | 156 +- .../join/OpenSearchHashJoinQueryAction.java | 218 +- .../query/join/OpenSearchJoinQueryAction.java | 199 +- .../OpenSearchJoinQueryActionFactory.java | 55 +- .../OpenSearchNestedLoopsQueryAction.java | 72 +- .../query/join/TableInJoinRequestBuilder.java | 78 +- .../sql/legacy/query/maker/AggMaker.java | 1437 ++++----- .../sql/legacy/query/maker/Maker.java | 869 +++--- .../sql/legacy/query/maker/QueryMaker.java | 126 +- .../legacy/query/multi/MultiQueryAction.java | 103 +- .../query/multi/MultiQueryRequestBuilder.java | 193 +- .../legacy/query/multi/MultiQuerySelect.java | 47 +- .../OpenSearchMultiQueryActionFactory.java | 23 +- .../HashJoinQueryPlanRequestBuilder.java | 101 +- .../converter/SQLAggregationParser.java | 477 +-- .../SQLExprToExpressionConverter.java | 145 +- .../converter/SQLToOperatorConverter.java | 75 +- .../core/BindingTupleQueryPlanner.java | 99 +- .../legacy/query/planner/core/ColumnNode.java | 19 +- .../sql/legacy/query/planner/core/Config.java | 224 +- .../query/planner/core/ExecuteParams.java | 35 +- .../sql/legacy/query/planner/core/Plan.java | 24 +- .../legacy/query/planner/core/PlanNode.java | 71 +- .../query/planner/core/QueryParams.java | 104 +- .../query/planner/core/QueryPlanner.java | 147 +- .../query/planner/explain/Explanation.java | 125 +- .../planner/explain/ExplanationFormat.java | 56 +- .../explain/JsonExplanationFormat.java | 154 +- .../planner/logical/LogicalOperator.java | 48 +- .../query/planner/logical/LogicalPlan.java | 287 +- .../planner/logical/LogicalPlanVisitor.java | 172 +- .../query/planner/logical/node/Filter.java | 80 +- .../query/planner/logical/node/Group.java | 90 +- .../query/planner/logical/node/Join.java | 258 +- .../query/planner/logical/node/Project.java | 197 +- .../query/planner/logical/node/Sort.java | 59 +- .../query/planner/logical/node/TableScan.java | 71 +- .../query/planner/logical/node/Top.java | 78 +- .../logical/rule/ProjectionPushDown.java | 93 +- .../logical/rule/SelectionPushDown.java | 42 +- .../planner/physical/PhysicalOperator.java | 61 +- .../query/planner/physical/PhysicalPlan.java | 117 +- .../legacy/query/planner/physical/Row.java | 172 +- .../planner/physical/estimation/Cost.java | 18 +- .../physical/estimation/Estimation.java | 61 +- .../physical/node/BatchPhysicalOperator.java | 119 +- .../physical/node/join/BlockHashJoin.java | 147 +- .../physical/node/join/CombinedRow.java | 39 +- .../physical/node/join/DefaultHashTable.java | 173 +- .../planner/physical/node/join/HashTable.java | 85 +- .../physical/node/join/HashTableGroup.java | 109 +- .../physical/node/join/JoinAlgorithm.java | 432 ++- .../physical/node/join/ListHashTable.java | 69 +- .../node/project/PhysicalProject.java | 57 +- .../physical/node/scroll/BindingTupleRow.java | 43 +- .../physical/node/scroll/PhysicalScroll.java | 80 +- .../planner/physical/node/scroll/Scroll.java | 302 +- .../SearchAggregationResponseHelper.java | 129 +- .../physical/node/scroll/SearchHitRow.java | 296 +- .../planner/physical/node/sort/QuickSort.java | 118 +- .../planner/resource/ResourceManager.java | 78 +- .../legacy/query/planner/resource/Stats.java | 80 +- .../resource/blocksize/AdaptiveBlockSize.java | 31 +- .../planner/resource/blocksize/BlockSize.java | 49 +- .../planner/resource/monitor/Monitor.java | 18 +- .../resource/monitor/TotalMemoryMonitor.java | 56 +- .../request/PreparedStatementRequest.java | 304 +- .../sql/legacy/request/SqlRequest.java | 176 +- .../sql/legacy/request/SqlRequestFactory.java | 223 +- .../sql/legacy/request/SqlRequestParam.java | 85 +- .../sql/legacy/rewriter/RewriteRule.java | 31 +- .../legacy/rewriter/RewriteRuleExecutor.java | 75 +- .../sql/legacy/rewriter/alias/Identifier.java | 49 +- .../sql/legacy/rewriter/alias/Table.java | 63 +- .../alias/TableAliasPrefixRemoveRule.java | 134 +- .../AnonymizeSensitiveDataRule.java | 81 +- .../identifier/UnquoteIdentifierRule.java | 86 +- .../legacy/rewriter/join/JoinRewriteRule.java | 289 +- .../matchtoterm/TermFieldRewriter.java | 390 ++- .../rewriter/matchtoterm/TermFieldScope.java | 93 +- .../matchtoterm/VerificationException.java | 15 +- .../sql/legacy/rewriter/nestedfield/From.java | 183 +- .../rewriter/nestedfield/Identifier.java | 143 +- .../nestedfield/NestedFieldProjection.java | 278 +- .../nestedfield/NestedFieldRewriter.java | 143 +- .../rewriter/nestedfield/SQLClause.java | 119 +- .../legacy/rewriter/nestedfield/Scope.java | 112 +- .../legacy/rewriter/nestedfield/Select.java | 55 +- .../legacy/rewriter/nestedfield/Where.java | 175 +- .../rewriter/ordinal/OrdinalRewriterRule.java | 246 +- .../rewriter/parent/SQLExprParentSetter.java | 47 +- .../parent/SQLExprParentSetterRule.java | 21 +- .../rewriter/subquery/NestedQueryContext.java | 96 +- .../rewriter/subquery/RewriterContext.java | 103 +- .../subquery/SubQueryRewriteRule.java | 73 +- .../rewriter/subquery/SubQueryRewriter.java | 113 +- .../subquery/rewriter/InRewriter.java | 132 +- .../rewriter/NestedExistsRewriter.java | 118 +- .../rewriter/subquery/rewriter/Rewriter.java | 23 +- .../subquery/rewriter/RewriterFactory.java | 41 +- .../rewriter/SubqueryAliasRewriter.java | 224 +- .../rewriter/subquery/utils/FindSubQuery.java | 73 +- .../spatial/BoundingBoxFilterParams.java | 29 +- .../sql/legacy/spatial/CellFilterParams.java | 55 +- .../legacy/spatial/DistanceFilterParams.java | 29 +- .../opensearch/sql/legacy/spatial/Point.java | 29 +- .../legacy/spatial/PolygonFilterParams.java | 19 +- .../spatial/RangeDistanceFilterParams.java | 27 +- .../legacy/spatial/SpatialParamsFactory.java | 172 +- .../legacy/spatial/WktToGeoJsonConverter.java | 285 +- .../sql/legacy/utils/JsonPrettyFormatter.java | 45 +- .../sql/legacy/utils/QueryDataAnonymizer.java | 55 +- .../sql/legacy/utils/SQLFunctions.java | 2137 +++++++------ .../sql/legacy/utils/StringUtils.java | 177 +- .../org/opensearch/sql/legacy/utils/Util.java | 407 +-- .../legacy/antlr/SymbolSimilarityTest.java | 70 +- .../sql/legacy/antlr/SyntaxAnalysisTest.java | 222 +- ...SemanticAnalyzerAggregateFunctionTest.java | 279 +- .../semantic/SemanticAnalyzerBasicTest.java | 1082 +++---- .../semantic/SemanticAnalyzerConfigTest.java | 90 +- .../SemanticAnalyzerConstantTest.java | 18 +- .../SemanticAnalyzerESScalarFunctionTest.java | 84 +- .../SemanticAnalyzerFieldTypeTest.java | 131 +- .../SemanticAnalyzerFromClauseTest.java | 338 +- .../SemanticAnalyzerIdentifierTest.java | 305 +- .../SemanticAnalyzerMultiQueryTest.java | 158 +- .../SemanticAnalyzerOperatorTest.java | 99 +- .../SemanticAnalyzerScalarFunctionTest.java | 453 ++- .../SemanticAnalyzerSubqueryTest.java | 177 +- .../semantic/SemanticAnalyzerTestBase.java | 69 +- .../antlr/semantic/SemanticAnalyzerTests.java | 30 +- .../antlr/semantic/scope/EnvironmentTest.java | 268 +- .../semantic/scope/SemanticContextTest.java | 57 +- .../antlr/semantic/scope/SymbolTableTest.java | 102 +- .../semantic/scope/TypeSupplierTest.java | 42 +- .../antlr/semantic/types/BaseTypeTest.java | 142 +- .../antlr/semantic/types/GenericTypeTest.java | 54 +- .../antlr/semantic/types/ProductTypeTest.java | 88 +- .../semantic/types/TypeExpressionTest.java | 78 +- .../visitor/AntlrSqlParseTreeVisitorTest.java | 167 +- .../esdomain/mapping/FieldMappingTest.java | 177 +- .../esdomain/mapping/FieldMappingsTest.java | 79 +- .../executor/AsyncRestExecutorTest.java | 103 +- .../legacy/executor/csv/CSVResultTest.java | 173 +- .../format/DateFieldFormatterTest.java | 1312 ++++---- .../legacy/executor/format/ResultSetTest.java | 51 +- .../RestSQLQueryActionCursorFallbackTest.java | 61 +- .../legacy/plugin/RestSQLQueryActionTest.java | 139 +- .../node/scroll/SearchHitRowTest.java | 4 +- .../legacy/rewriter/alias/IdentifierTest.java | 65 +- .../alias/TableAliasPrefixRemoveRuleTest.java | 209 +- .../sql/legacy/rewriter/alias/TableTest.java | 38 +- .../unittest/AggregationOptionTest.java | 79 +- .../sql/legacy/unittest/DateFormatTest.java | 419 +-- .../legacy/unittest/DateFunctionsTest.java | 306 +- .../unittest/ErrorMessageFactoryTest.java | 67 +- .../sql/legacy/unittest/FormatTest.java | 21 +- .../sql/legacy/unittest/HavingTest.java | 620 ++-- .../sql/legacy/unittest/JSONRequestTest.java | 809 ++--- .../unittest/LocalClusterStateTest.java | 300 +- .../legacy/unittest/MathFunctionsTest.java | 721 ++--- .../unittest/NestedFieldProjectionTest.java | 635 ++-- .../unittest/NestedFieldRewriterTest.java | 1230 ++++---- .../legacy/unittest/OpenSearchClientTest.java | 62 +- .../PreparedStatementRequestTest.java | 125 +- .../legacy/unittest/QueryFunctionsTest.java | 443 ++- .../unittest/SqlRequestFactoryTest.java | 348 ++- .../legacy/unittest/SqlRequestParamTest.java | 77 +- .../legacy/unittest/StringOperatorsTest.java | 343 +- .../unittest/WhereWithBoolConditionTest.java | 93 +- .../unittest/cursor/DefaultCursorTest.java | 74 +- .../domain/ColumnTypeProviderTest.java | 50 +- .../executor/DeleteResultSetTest.java | 82 +- .../format/BindingTupleResultSetTest.java | 69 +- .../format/CSVResultsExtractorTest.java | 39 +- .../executor/join/ElasticUtilsTest.java | 51 +- .../expression/core/BinaryExpressionTest.java | 127 +- .../core/CompoundExpressionTest.java | 15 +- .../expression/core/ExpressionTest.java | 61 +- .../expression/core/RefExpressionTest.java | 69 +- .../expression/core/UnaryExpressionTest.java | 199 +- .../expression/model/ExprValueUtilsTest.java | 78 +- .../unittest/metrics/BasicCounterTest.java | 28 +- .../unittest/metrics/GaugeMetricTest.java | 23 +- .../legacy/unittest/metrics/MetricsTest.java | 101 +- .../unittest/metrics/NumericMetricTest.java | 28 +- .../unittest/metrics/RollingCounterTest.java | 84 +- .../unittest/parser/BucketPathTest.java | 66 +- .../unittest/parser/FieldMakerTest.java | 69 +- .../legacy/unittest/parser/SqlParserTest.java | 2779 +++++++++-------- .../unittest/parser/SubQueryParserTest.java | 255 +- .../BindingTupleQueryPlannerExecuteTest.java | 130 +- .../planner/OpenSearchActionFactoryTest.java | 82 +- .../planner/QueryPlannerBatchTest.java | 356 +-- .../planner/QueryPlannerConfigTest.java | 528 ++-- .../planner/QueryPlannerExecuteTest.java | 1176 +++---- .../planner/QueryPlannerExplainTest.java | 64 +- .../planner/QueryPlannerMonitorTest.java | 189 +- .../unittest/planner/QueryPlannerTest.java | 386 ++- .../converter/SQLAggregationParserTest.java | 681 ++-- .../SQLExprToExpressionConverterTest.java | 236 +- .../converter/SQLToOperatorConverterTest.java | 79 +- .../SearchAggregationResponseHelperTest.java | 600 ++-- .../query/DefaultQueryActionTest.java | 378 ++- .../rewriter/RewriteRuleExecutorTest.java | 41 +- .../identifier/UnquoteIdentifierRuleTest.java | 128 +- .../rewriter/inline/AliasInliningTests.java | 205 +- .../ordinal/OrdinalRewriterRuleTest.java | 247 +- .../parent/SQLExprParentSetterRuleTest.java | 14 +- .../parent/SQLExprParentSetterTest.java | 54 +- .../subquery/ExistsSubQueryRewriterTest.java | 184 +- .../subquery/InSubqueryRewriterTest.java | 118 +- .../subquery/NestedQueryContextTest.java | 79 +- .../subquery/SubQueryRewriteRuleTest.java | 101 +- .../subquery/SubQueryRewriterTestBase.java | 27 +- .../rewriter/SubqueryAliasRewriterTest.java | 251 +- .../subquery/utils/FindSubQueryTest.java | 63 +- .../rewriter/term/TermFieldRewriterTest.java | 175 +- .../spatial/WktToGeoJsonConverterTest.java | 363 ++- .../unittest/utils/BackticksUnquoterTest.java | 44 +- .../unittest/utils/PrettyFormatterTest.java | 80 +- .../unittest/utils/QueryContextTest.java | 82 +- .../utils/QueryDataAnonymizerTest.java | 137 +- .../unittest/utils/SQLFunctionsTest.java | 126 +- .../unittest/utils/StringUtilsTest.java | 81 +- .../sql/legacy/unittest/utils/UtilTest.java | 93 +- .../sql/legacy/util/AggregationUtils.java | 83 +- .../sql/legacy/util/CheckScriptContents.java | 355 +-- .../sql/legacy/util/HasFieldWithValue.java | 70 +- .../sql/legacy/util/MatcherUtils.java | 485 +-- .../util/MultipleIndexClusterUtils.java | 318 +- .../sql/legacy/util/SqlExplainUtils.java | 25 +- .../sql/legacy/util/SqlParserUtils.java | 34 +- .../opensearch/sql/legacy/util/TestUtils.java | 1530 ++++----- .../sql/legacy/util/TestsConstants.java | 78 +- .../sql/opensearch/client/MLClient.java | 6 +- .../opensearch/client/OpenSearchClient.java | 10 +- .../client/OpenSearchNodeClient.java | 59 +- .../client/OpenSearchRestClient.java | 68 +- .../data/type/OpenSearchBinaryType.java | 5 +- .../data/type/OpenSearchDataType.java | 139 +- .../data/type/OpenSearchDateType.java | 285 +- .../data/type/OpenSearchGeoPointType.java | 5 +- .../data/type/OpenSearchIpType.java | 5 +- .../data/type/OpenSearchTextType.java | 12 +- .../sql/opensearch/data/utils/Content.java | 88 +- .../opensearch/data/utils/ObjectContent.java | 26 +- .../data/utils/OpenSearchJsonContent.java | 26 +- .../data/value/OpenSearchExprBinaryValue.java | 6 +- .../value/OpenSearchExprGeoPointValue.java | 8 +- .../data/value/OpenSearchExprIpValue.java | 5 +- .../data/value/OpenSearchExprTextValue.java | 5 +- .../value/OpenSearchExprValueFactory.java | 200 +- .../executor/OpenSearchExecutionEngine.java | 48 +- .../executor/OpenSearchQueryManager.java | 4 +- .../protector/ExecutionProtector.java | 9 +- .../protector/NoopExecutionProtector.java | 5 +- .../OpenSearchExecutionProtector.java | 91 +- .../protector/ResourceMonitorPlan.java | 28 +- .../sql/opensearch/mapping/IndexMapping.java | 19 +- .../monitor/OpenSearchMemoryHealthy.java | 21 +- .../monitor/OpenSearchResourceMonitor.java | 19 +- .../planner/physical/ADOperator.java | 179 +- .../planner/physical/MLCommonsOperator.java | 84 +- .../physical/MLCommonsOperatorActions.java | 140 +- .../planner/physical/MLOperator.java | 70 +- .../request/OpenSearchQueryRequest.java | 74 +- .../opensearch/request/OpenSearchRequest.java | 25 +- .../request/OpenSearchRequestBuilder.java | 141 +- .../request/OpenSearchScrollRequest.java | 54 +- .../system/OpenSearchCatIndicesRequest.java | 5 +- .../OpenSearchDescribeIndexRequest.java | 42 +- .../system/OpenSearchSystemRequest.java | 5 +- .../response/OpenSearchResponse.java | 174 +- .../agg/CompositeAggregationParser.java | 4 +- .../opensearch/response/agg/FilterParser.java | 5 +- .../opensearch/response/agg/MetricParser.java | 8 +- .../response/agg/MetricParserHelper.java | 9 +- .../agg/NoBucketAggregationParser.java | 4 +- .../OpenSearchAggregationResponseParser.java | 5 +- .../response/agg/SingleValueParser.java | 7 +- .../opensearch/response/agg/StatsParser.java | 4 +- .../response/agg/TopHitsParser.java | 11 +- .../sql/opensearch/response/agg/Utils.java | 1 + .../response/error/ErrorMessage.java | 18 +- .../response/error/ErrorMessageFactory.java | 9 +- .../error/OpenSearchErrorMessage.java | 23 +- .../opensearch/security/SecurityAccess.java | 5 +- .../setting/LegacyOpenDistroSettings.java | 194 +- .../setting/OpenSearchSettings.java | 305 +- .../storage/OpenSearchDataSourceFactory.java | 4 +- .../opensearch/storage/OpenSearchIndex.java | 97 +- .../storage/OpenSearchStorageEngine.java | 8 +- .../storage/scan/OpenSearchIndexScan.java | 35 +- ...OpenSearchIndexScanAggregationBuilder.java | 17 +- .../scan/OpenSearchIndexScanBuilder.java | 30 +- .../scan/OpenSearchIndexScanQueryBuilder.java | 45 +- .../storage/scan/PushDownQueryBuilder.java | 4 +- .../script/ExpressionScriptEngine.java | 31 +- .../storage/script/StringUtils.java | 2 +- .../aggregation/AggregationQueryBuilder.java | 98 +- .../ExpressionAggregationScript.java | 21 +- .../ExpressionAggregationScriptFactory.java | 5 +- ...xpressionAggregationScriptLeafFactory.java | 21 +- .../dsl/AggregationBuilderHelper.java | 24 +- .../dsl/BucketAggregationBuilder.java | 15 +- .../dsl/MetricAggregationBuilder.java | 50 +- .../storage/script/core/ExpressionScript.java | 108 +- .../script/filter/ExpressionFilterScript.java | 31 +- .../filter/ExpressionFilterScriptFactory.java | 10 +- .../ExpressionFilterScriptLeafFactory.java | 23 +- .../script/filter/FilterQueryBuilder.java | 54 +- .../script/filter/lucene/LikeQuery.java | 7 +- .../script/filter/lucene/LuceneQuery.java | 288 +- .../script/filter/lucene/NestedQuery.java | 35 +- .../script/filter/lucene/RangeQuery.java | 16 +- .../script/filter/lucene/TermQuery.java | 5 +- .../FunctionParameterRepository.java | 389 ++- .../relevance/MatchBoolPrefixQuery.java | 18 +- .../relevance/MatchPhrasePrefixQuery.java | 6 +- .../lucene/relevance/MatchPhraseQuery.java | 8 +- .../filter/lucene/relevance/MatchQuery.java | 8 +- .../lucene/relevance/MultiFieldQuery.java | 37 +- .../lucene/relevance/MultiMatchQuery.java | 4 +- .../filter/lucene/relevance/NoFieldQuery.java | 28 +- .../filter/lucene/relevance/QueryQuery.java | 8 +- .../lucene/relevance/QueryStringQuery.java | 12 +- .../lucene/relevance/RelevanceQuery.java | 55 +- .../relevance/SimpleQueryStringQuery.java | 8 +- .../lucene/relevance/SingleFieldQuery.java | 20 +- .../lucene/relevance/WildcardQuery.java | 9 +- .../storage/script/sort/SortQueryBuilder.java | 38 +- .../DefaultExpressionSerializer.java | 6 +- .../serialization/ExpressionSerializer.java | 16 +- .../storage/system/OpenSearchSystemIndex.java | 22 +- .../system/OpenSearchSystemIndexScan.java | 13 +- .../system/OpenSearchSystemIndexSchema.java | 88 +- .../client/OpenSearchNodeClientTest.java | 177 +- .../client/OpenSearchRestClientTest.java | 203 +- .../OpenSearchDataTypeRecognitionTest.java | 7 +- .../data/type/OpenSearchDataTypeTest.java | 240 +- .../data/type/OpenSearchDateTypeTest.java | 78 +- .../value/OpenSearchExprBinaryValueTest.java | 10 +- .../OpenSearchExprGeoPointValueTest.java | 1 - .../data/value/OpenSearchExprIpValueTest.java | 1 - .../value/OpenSearchExprTextValueTest.java | 76 +- .../value/OpenSearchExprValueFactoryTest.java | 614 ++-- .../OpenSearchExecutionEngineTest.java | 171 +- .../executor/OpenSearchQueryManagerTest.java | 35 +- .../executor/ResourceMonitorPlanTest.java | 13 +- .../protector/NoopExecutionProtectorTest.java | 4 +- .../OpenSearchExecutionProtectorTest.java | 173 +- .../monitor/OpenSearchMemoryHealthyTest.java | 16 +- .../OpenSearchResourceMonitorTest.java | 20 +- .../physical/MLCommonsOperatorTest.java | 78 +- .../planner/physical/MLOperatorTest.java | 74 +- .../request/OpenSearchQueryRequestTest.java | 102 +- .../request/OpenSearchRequestBuilderTest.java | 247 +- .../request/OpenSearchScrollRequestTest.java | 148 +- .../OpenSearchCatIndicesRequestTest.java | 12 +- .../OpenSearchDescribeIndexRequestTest.java | 40 +- .../response/AggregationResponseUtils.java | 80 +- ...enSearchAggregationResponseParserTest.java | 437 ++- .../response/OpenSearchResponseTest.java | 123 +- .../error/ErrorMessageFactoryTest.java | 1 - .../response/error/ErrorMessageTest.java | 65 +- .../error/OpenSearchErrorMessageTest.java | 19 +- .../setting/OpenSearchSettingsTest.java | 42 +- .../OpenSearchDefaultImplementorTest.java | 16 +- .../storage/OpenSearchIndexTest.java | 133 +- .../storage/OpenSearchStorageEngineTest.java | 25 +- ...SearchIndexScanAggregationBuilderTest.java | 7 +- .../OpenSearchIndexScanOptimizationTest.java | 586 ++-- .../OpenSearchIndexScanPaginationTest.java | 46 +- .../storage/scan/OpenSearchIndexScanTest.java | 194 +- .../scan/PushDownQueryBuilderTest.java | 19 +- .../script/ExpressionScriptEngineTest.java | 17 +- .../AggregationQueryBuilderTest.java | 865 ++--- ...xpressionAggregationScriptFactoryTest.java | 13 +- .../ExpressionAggregationScriptTest.java | 79 +- .../aggregation/GroupSortOrderTest.java | 11 +- .../dsl/BucketAggregationBuilderTest.java | 36 +- .../dsl/MetricAggregationBuilderTest.java | 471 +-- .../ExpressionFilterScriptFactoryTest.java | 14 +- .../filter/ExpressionFilterScriptTest.java | 75 +- .../script/filter/FilterQueryBuilderTest.java | 1536 +++++---- .../script/filter/lucene/LuceneQueryTest.java | 8 +- .../lucene/MatchBoolPrefixQueryTest.java | 48 +- .../lucene/MatchPhrasePrefixQueryTest.java | 90 +- .../filter/lucene/MatchPhraseQueryTest.java | 300 +- .../script/filter/lucene/MatchQueryTest.java | 211 +- .../script/filter/lucene/MultiMatchTest.java | 139 +- .../script/filter/lucene/QueryStringTest.java | 102 +- .../script/filter/lucene/QueryTest.java | 107 +- .../script/filter/lucene/RangeQueryTest.java | 10 +- .../filter/lucene/SimpleQueryStringTest.java | 112 +- .../filter/lucene/WildcardQueryTest.java | 43 +- .../lucene/relevance/MultiFieldQueryTest.java | 42 +- .../lucene/relevance/NoFieldQueryTest.java | 20 +- .../relevance/RelevanceQueryBuildTest.java | 31 +- .../relevance/SingleFieldQueryTest.java | 47 +- .../script/sort/SortQueryBuilderTest.java | 55 +- .../DefaultExpressionSerializerTest.java | 34 +- .../system/OpenSearchSystemIndexScanTest.java | 4 +- .../system/OpenSearchSystemIndexTest.java | 29 +- .../sql/opensearch/utils/Utils.java | 11 +- .../org/opensearch/sql/plugin/SQLPlugin.java | 77 +- .../plugin/config/OpenSearchPluginModule.java | 18 +- .../request/PPLQueryRequestFactory.java | 14 +- .../sql/plugin/rest/RestPPLQueryAction.java | 15 +- .../sql/plugin/rest/RestPPLStatsAction.java | 23 +- .../plugin/rest/RestQuerySettingsAction.java | 66 +- .../transport/TransportPPLQueryAction.java | 4 +- .../TransportPPLQueryRequestTest.java | 4 +- .../org/opensearch/sql/ppl/PPLService.java | 16 +- .../sql/ppl/antlr/PPLSyntaxParser.java | 15 +- .../sql/ppl/domain/PPLQueryRequest.java | 24 +- .../sql/ppl/domain/PPLQueryResponse.java | 4 +- .../opensearch/sql/ppl/parser/AstBuilder.java | 263 +- .../sql/ppl/parser/AstExpressionBuilder.java | 203 +- .../sql/ppl/parser/AstStatementBuilder.java | 4 +- .../sql/ppl/utils/ArgumentFactory.java | 41 +- .../sql/ppl/utils/PPLQueryDataAnonymizer.java | 92 +- .../sql/ppl/utils/UnresolvedPlanHelper.java | 9 +- .../opensearch/sql/ppl/PPLServiceTest.java | 122 +- .../ppl/antlr/NowLikeFunctionParserTest.java | 32 +- ...ntaxParserMatchBoolPrefixSamplesTests.java | 15 +- ...PPLSyntaxParserMatchPhraseSamplesTest.java | 16 +- .../sql/ppl/antlr/PPLSyntaxParserTest.java | 310 +- .../sql/ppl/domain/PPLQueryRequestTest.java | 20 +- .../sql/ppl/domain/PPLQueryResponseTest.java | 1 - .../sql/ppl/parser/AstBuilderTest.java | 607 ++-- .../ppl/parser/AstExpressionBuilderTest.java | 664 ++-- .../ppl/parser/AstNowLikeFunctionTest.java | 55 +- .../ppl/parser/AstStatementBuilderTest.java | 15 +- .../sql/ppl/utils/ArgumentFactoryTest.java | 46 +- .../ppl/utils/PPLQueryDataAnonymizerTest.java | 99 +- .../ppl/utils/UnresolvedPlanHelperTest.java | 1 - .../client/PrometheusClientImpl.java | 66 +- .../constants/PrometheusFieldConstants.java | 4 +- .../QueryExemplarFunctionImplementation.java | 73 +- .../QueryRangeFunctionImplementation.java | 79 +- .../QueryExemplarsTableFunctionResolver.java | 22 +- .../QueryRangeTableFunctionResolver.java | 14 +- .../PrometheusFunctionResponseHandle.java | 12 +- .../QueryExemplarsFunctionResponseHandle.java | 23 +- .../QueryRangeFunctionResponseHandle.java | 27 +- ...ueryExemplarsFunctionTableScanBuilder.java | 9 +- ...eryExemplarsFunctionTableScanOperator.java | 42 +- .../QueryRangeFunctionTableScanBuilder.java | 5 +- .../QueryRangeFunctionTableScanOperator.java | 40 +- .../logical/PrometheusLogicalMetricAgg.java | 41 +- .../logical/PrometheusLogicalMetricScan.java | 12 +- ...PrometheusLogicalPlanOptimizerFactory.java | 17 +- .../logical/rules/MergeAggAndIndexScan.java | 21 +- .../logical/rules/MergeAggAndRelation.java | 20 +- .../logical/rules/MergeFilterAndRelation.java | 20 +- .../PrometheusQueryExemplarsRequest.java | 18 +- .../request/PrometheusQueryRequest.java | 24 +- .../PrometheusDescribeMetricRequest.java | 71 +- .../system/PrometheusListMetricsRequest.java | 45 +- .../system/PrometheusSystemRequest.java | 5 +- .../response/PrometheusResponse.java | 33 +- .../PrometheusMetricDefaultSchema.java | 11 +- .../storage/PrometheusMetricScan.java | 43 +- .../storage/PrometheusMetricTable.java | 49 +- .../storage/PrometheusStorageEngine.java | 16 +- .../storage/PrometheusStorageFactory.java | 73 +- .../storage/QueryExemplarsTable.java | 15 +- .../PrometheusDefaultImplementor.java | 76 +- .../model/PrometheusResponseFieldNames.java | 2 - .../storage/model/QueryRangeParameters.java | 1 - .../querybuilder/AggregationQueryBuilder.java | 47 +- .../SeriesSelectionQueryBuilder.java | 22 +- .../querybuilder/StepParameterResolver.java | 18 +- .../TimeRangeParametersResolver.java | 15 +- .../storage/system/PrometheusSystemTable.java | 22 +- .../system/PrometheusSystemTableScan.java | 7 +- .../system/PrometheusSystemTableSchema.java | 33 +- .../prometheus/utils/TableFunctionUtils.java | 52 +- .../client/PrometheusClientImplTest.java | 87 +- ...ryExemplarsFunctionImplementationTest.java | 69 +- .../QueryRangeFunctionImplementationTest.java | 83 +- ...eryExemplarsTableFunctionResolverTest.java | 37 +- .../QueryRangeTableFunctionResolverTest.java | 191 +- ...ExemplarsFunctionTableScanBuilderTest.java | 32 +- ...xemplarsFunctionTableScanOperatorTest.java | 103 +- ...ueryRangeFunctionTableScanBuilderTest.java | 23 +- ...eryRangeFunctionTableScanOperatorTest.java | 116 +- .../logical/PrometheusLogicOptimizerTest.java | 63 +- .../PrometheusDescribeMetricRequestTest.java | 103 +- .../PrometheusListMetricsRequestTest.java | 44 +- .../storage/PrometheusMetricScanTest.java | 184 +- .../storage/PrometheusMetricTableTest.java | 889 +++--- .../storage/PrometheusStorageEngineTest.java | 26 +- .../storage/PrometheusStorageFactoryTest.java | 87 +- .../storage/QueryExemplarsTableTest.java | 13 +- .../StepParameterResolverTest.java | 6 +- .../TimeRangeParametersResolverTest.java | 8 +- .../system/PrometheusSystemTableScanTest.java | 3 +- .../system/PrometheusSystemTableTest.java | 46 +- .../prometheus/utils/LogicalPlanUtils.java | 42 +- .../sql/prometheus/utils/TestUtils.java | 2 +- .../sql/protocol/response/QueryResult.java | 35 +- .../format/CommandResponseFormatter.java | 6 +- .../response/format/CsvResponseFormatter.java | 2 - .../response/format/ErrorFormatter.java | 36 +- .../format/FlatResponseFormatter.java | 45 +- .../sql/protocol/response/format/Format.java | 4 +- .../format/JdbcResponseFormatter.java | 23 +- .../format/JsonResponseFormatter.java | 21 +- .../response/format/RawResponseFormatter.java | 8 +- .../response/format/ResponseFormatter.java | 6 +- .../format/SimpleJsonResponseFormatter.java | 9 +- .../VisualizationResponseFormatter.java | 26 +- .../protocol/response/QueryResultTest.java | 103 +- .../format/CommandResponseFormatterTest.java | 40 +- .../format/CsvResponseFormatterTest.java | 155 +- .../protocol/response/format/FormatTest.java | 6 +- .../format/JdbcResponseFormatterTest.java | 106 +- .../format/RawResponseFormatterTest.java | 191 +- .../SimpleJsonResponseFormatterTest.java | 40 +- .../VisualizationResponseFormatterTest.java | 77 +- .../sql/spark/client/EmrClientImpl.java | 64 +- .../sql/spark/client/SparkClient.java | 6 +- .../SparkSqlFunctionImplementation.java | 56 +- .../SparkSqlTableFunctionResolver.java | 63 +- ...DefaultSparkSqlFunctionResponseHandle.java | 21 +- .../SparkSqlFunctionResponseHandle.java | 16 +- .../SparkSqlFunctionTableScanBuilder.java | 4 +- .../SparkSqlFunctionTableScanOperator.java | 28 +- .../sql/spark/helper/FlintHelper.java | 27 +- .../sql/spark/request/SparkQueryRequest.java | 9 +- .../sql/spark/response/SparkResponse.java | 21 +- .../sql/spark/storage/SparkScan.java | 12 +- .../sql/spark/storage/SparkStorageEngine.java | 7 +- .../spark/storage/SparkStorageFactory.java | 60 +- .../sql/spark/storage/SparkTable.java | 15 +- .../sql/spark/client/EmrClientImplTest.java | 52 +- .../SparkSqlFunctionImplementationTest.java | 57 +- .../SparkSqlFunctionTableScanBuilderTest.java | 21 +- ...SparkSqlFunctionTableScanOperatorTest.java | 111 +- .../SparkSqlTableFunctionResolverTest.java | 107 +- .../sql/spark/response/SparkResponseTest.java | 45 +- .../sql/spark/storage/SparkScanTest.java | 7 +- .../spark/storage/SparkStorageEngineTest.java | 12 +- .../storage/SparkStorageFactoryTest.java | 62 +- .../sql/spark/storage/SparkTableTest.java | 23 +- .../opensearch/sql/spark/utils/TestUtils.java | 2 +- .../org/opensearch/sql/sql/SQLService.java | 24 +- .../sql/sql/antlr/AnonymizerListener.java | 22 +- .../sql/sql/antlr/SQLSyntaxParser.java | 10 +- .../sql/sql/domain/SQLQueryRequest.java | 74 +- .../sql/sql/parser/AstAggregationBuilder.java | 46 +- .../opensearch/sql/sql/parser/AstBuilder.java | 46 +- .../sql/sql/parser/AstExpressionBuilder.java | 317 +- .../sql/parser/AstHavingFilterBuilder.java | 9 +- .../sql/sql/parser/AstSortBuilder.java | 14 +- .../sql/sql/parser/ParserUtils.java | 25 +- .../sql/parser/context/ParsingContext.java | 13 +- .../parser/context/QuerySpecification.java | 49 +- .../common/antlr/SyntaxParserTestBase.java | 9 +- .../opensearch/sql/sql/SQLServiceTest.java | 38 +- .../sql/sql/antlr/BracketedTimestampTest.java | 1 - .../sql/sql/antlr/HighlightTest.java | 15 +- .../sql/antlr/MatchBoolPrefixParserTest.java | 5 +- .../sql/sql/antlr/SQLParserTest.java | 1 - .../sql/sql/antlr/SQLSyntaxParserTest.java | 571 ++-- .../sql/sql/domain/SQLQueryRequestTest.java | 156 +- .../sql/parser/AnonymizerListenerTest.java | 47 +- .../sql/parser/AstAggregationBuilderTest.java | 123 +- .../sql/sql/parser/AstBuilderTest.java | 352 +-- .../sql/sql/parser/AstBuilderTestBase.java | 4 +- .../sql/parser/AstExpressionBuilderTest.java | 690 ++-- .../parser/AstHavingFilterBuilderTest.java | 4 +- .../sql/parser/AstNowLikeFunctionTest.java | 65 +- .../parser/AstQualifiedNameBuilderTest.java | 9 +- .../sql/sql/parser/AstSortBuilderTest.java | 47 +- .../context/QuerySpecificationTest.java | 61 +- 1006 files changed, 63853 insertions(+), 64619 deletions(-) diff --git a/benchmarks/src/jmh/java/org/opensearch/sql/expression/operator/predicate/ComparisonOperatorBenchmark.java b/benchmarks/src/jmh/java/org/opensearch/sql/expression/operator/predicate/ComparisonOperatorBenchmark.java index d2642dd645..01b2068694 100644 --- a/benchmarks/src/jmh/java/org/opensearch/sql/expression/operator/predicate/ComparisonOperatorBenchmark.java +++ b/benchmarks/src/jmh/java/org/opensearch/sql/expression/operator/predicate/ComparisonOperatorBenchmark.java @@ -38,7 +38,7 @@ @Fork(value = 1) public class ComparisonOperatorBenchmark { - @Param(value = { "int", "string", "date" }) + @Param(value = {"int", "string", "date"}) private String testDataType; private final Map params = @@ -65,9 +65,7 @@ public void testGreaterOperator() { private void run(Function dsl) { ExprValue param = params.get(testDataType); - FunctionExpression func = dsl.apply(new Expression[] { - literal(param), literal(param) - }); + FunctionExpression func = dsl.apply(new Expression[] {literal(param), literal(param)}); func.valueOf(); } } diff --git a/build.gradle b/build.gradle index 14ea5b67f9..c66d098a3f 100644 --- a/build.gradle +++ b/build.gradle @@ -83,7 +83,7 @@ repositories { spotless { java { target fileTree('.') { - include 'core/**/*.java' + include '**/*.java' exclude '**/build/**', '**/build-*/**' } importOrder() diff --git a/common/src/main/java/org/opensearch/sql/common/antlr/CaseInsensitiveCharStream.java b/common/src/main/java/org/opensearch/sql/common/antlr/CaseInsensitiveCharStream.java index 0036da32a1..89381872ce 100644 --- a/common/src/main/java/org/opensearch/sql/common/antlr/CaseInsensitiveCharStream.java +++ b/common/src/main/java/org/opensearch/sql/common/antlr/CaseInsensitiveCharStream.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.common.antlr; import org.antlr.v4.runtime.CharStream; diff --git a/common/src/main/java/org/opensearch/sql/common/antlr/SyntaxAnalysisErrorListener.java b/common/src/main/java/org/opensearch/sql/common/antlr/SyntaxAnalysisErrorListener.java index b499a52967..76cbf52d58 100644 --- a/common/src/main/java/org/opensearch/sql/common/antlr/SyntaxAnalysisErrorListener.java +++ b/common/src/main/java/org/opensearch/sql/common/antlr/SyntaxAnalysisErrorListener.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.common.antlr; import java.util.Locale; diff --git a/common/src/main/java/org/opensearch/sql/common/antlr/SyntaxCheckException.java b/common/src/main/java/org/opensearch/sql/common/antlr/SyntaxCheckException.java index 806cb7208b..d3c9c111ef 100644 --- a/common/src/main/java/org/opensearch/sql/common/antlr/SyntaxCheckException.java +++ b/common/src/main/java/org/opensearch/sql/common/antlr/SyntaxCheckException.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.common.antlr; public class SyntaxCheckException extends RuntimeException { diff --git a/common/src/main/java/org/opensearch/sql/common/authinterceptors/AwsSigningInterceptor.java b/common/src/main/java/org/opensearch/sql/common/authinterceptors/AwsSigningInterceptor.java index 6c65c69c31..e2d33dca8b 100644 --- a/common/src/main/java/org/opensearch/sql/common/authinterceptors/AwsSigningInterceptor.java +++ b/common/src/main/java/org/opensearch/sql/common/authinterceptors/AwsSigningInterceptor.java @@ -31,15 +31,17 @@ public class AwsSigningInterceptor implements Interceptor { private static final Logger LOG = LogManager.getLogger(); /** - * AwsSigningInterceptor which intercepts http requests - * and adds required headers for sigv4 authentication. + * AwsSigningInterceptor which intercepts http requests and adds required headers for sigv4 + * authentication. * * @param awsCredentialsProvider awsCredentialsProvider. * @param region region. * @param serviceName serviceName. */ - public AwsSigningInterceptor(@NonNull AWSCredentialsProvider awsCredentialsProvider, - @NonNull String region, @NonNull String serviceName) { + public AwsSigningInterceptor( + @NonNull AWSCredentialsProvider awsCredentialsProvider, + @NonNull String region, + @NonNull String serviceName) { this.okHttpAwsV4Signer = new OkHttpAwsV4Signer(region, serviceName); this.awsCredentialsProvider = awsCredentialsProvider; } @@ -48,25 +50,27 @@ public AwsSigningInterceptor(@NonNull AWSCredentialsProvider awsCredentialsProvi public Response intercept(Interceptor.Chain chain) throws IOException { Request request = chain.request(); - DateTimeFormatter timestampFormat = DateTimeFormatter.ofPattern("yyyyMMdd'T'HHmmss'Z'") - .withZone(ZoneId.of("GMT")); + DateTimeFormatter timestampFormat = + DateTimeFormatter.ofPattern("yyyyMMdd'T'HHmmss'Z'").withZone(ZoneId.of("GMT")); - - Request.Builder newRequestBuilder = request.newBuilder() - .addHeader("x-amz-date", timestampFormat.format(ZonedDateTime.now())) - .addHeader("host", request.url().host()); + Request.Builder newRequestBuilder = + request + .newBuilder() + .addHeader("x-amz-date", timestampFormat.format(ZonedDateTime.now())) + .addHeader("host", request.url().host()); AWSCredentials awsCredentials = awsCredentialsProvider.getCredentials(); if (awsCredentialsProvider instanceof STSAssumeRoleSessionCredentialsProvider) { - newRequestBuilder.addHeader("x-amz-security-token", + newRequestBuilder.addHeader( + "x-amz-security-token", ((STSAssumeRoleSessionCredentialsProvider) awsCredentialsProvider) .getCredentials() .getSessionToken()); } Request newRequest = newRequestBuilder.build(); - Request signed = okHttpAwsV4Signer.sign(newRequest, - awsCredentials.getAWSAccessKeyId(), awsCredentials.getAWSSecretKey()); + Request signed = + okHttpAwsV4Signer.sign( + newRequest, awsCredentials.getAWSAccessKeyId(), awsCredentials.getAWSSecretKey()); return chain.proceed(signed); } - } diff --git a/common/src/main/java/org/opensearch/sql/common/authinterceptors/BasicAuthenticationInterceptor.java b/common/src/main/java/org/opensearch/sql/common/authinterceptors/BasicAuthenticationInterceptor.java index 34634d1580..2275482e30 100644 --- a/common/src/main/java/org/opensearch/sql/common/authinterceptors/BasicAuthenticationInterceptor.java +++ b/common/src/main/java/org/opensearch/sql/common/authinterceptors/BasicAuthenticationInterceptor.java @@ -22,13 +22,11 @@ public BasicAuthenticationInterceptor(@NonNull String username, @NonNull String this.credentials = Credentials.basic(username, password); } - @Override public Response intercept(Interceptor.Chain chain) throws IOException { Request request = chain.request(); - Request authenticatedRequest = request.newBuilder() - .header("Authorization", credentials).build(); + Request authenticatedRequest = + request.newBuilder().header("Authorization", credentials).build(); return chain.proceed(authenticatedRequest); } - } diff --git a/common/src/main/java/org/opensearch/sql/common/grok/Converter.java b/common/src/main/java/org/opensearch/sql/common/grok/Converter.java index ebbe13f761..ddd3a2bbb4 100644 --- a/common/src/main/java/org/opensearch/sql/common/grok/Converter.java +++ b/common/src/main/java/org/opensearch/sql/common/grok/Converter.java @@ -23,9 +23,7 @@ import java.util.regex.Pattern; import java.util.stream.Collectors; -/** - * Convert String argument to the right type. - */ +/** Convert String argument to the right type. */ public class Converter { public enum Type { @@ -51,13 +49,13 @@ public enum Type { private static final Pattern SPLITTER = Pattern.compile("[:;]"); private static final Map TYPES = - Arrays.stream(Type.values()) - .collect(Collectors.toMap(t -> t.name().toLowerCase(), t -> t)); + Arrays.stream(Type.values()).collect(Collectors.toMap(t -> t.name().toLowerCase(), t -> t)); private static final Map TYPE_ALIASES = Arrays.stream(Type.values()) - .flatMap(type -> type.aliases.stream() - .map(alias -> new AbstractMap.SimpleEntry<>(alias, type))) + .flatMap( + type -> + type.aliases.stream().map(alias -> new AbstractMap.SimpleEntry<>(alias, type))) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); private static Type getType(String key) { @@ -69,34 +67,30 @@ private static Type getType(String key) { return type; } - /** - * getConverters. - */ - public static Map> - getConverters(Collection groupNames, Object... params) { + /** getConverters. */ + public static Map> getConverters( + Collection groupNames, Object... params) { return groupNames.stream() .filter(Converter::containsDelimiter) - .collect(Collectors.toMap(Function.identity(), key -> { - String[] list = splitGrokPattern(key); - IConverter converter = getType(list[1]).converter; - if (list.length == 3) { - converter = converter.newConverter(list[2], params); - } - return converter; - })); + .collect( + Collectors.toMap( + Function.identity(), + key -> { + String[] list = splitGrokPattern(key); + IConverter converter = getType(list[1]).converter; + if (list.length == 3) { + converter = converter.newConverter(list[2], params); + } + return converter; + })); } - /** - * getGroupTypes. - */ + /** getGroupTypes. */ public static Map getGroupTypes(Collection groupNames) { return groupNames.stream() .filter(Converter::containsDelimiter) .map(Converter::splitGrokPattern) - .collect(Collectors.toMap( - l -> l[0], - l -> getType(l[1]) - )); + .collect(Collectors.toMap(l -> l[0], l -> getType(l[1]))); } public static String extractKey(String key) { @@ -120,7 +114,6 @@ default IConverter newConverter(String param, Object... params) { } } - static class DateConverter implements IConverter { private final DateTimeFormatter formatter; @@ -138,8 +131,12 @@ private DateConverter(DateTimeFormatter formatter, ZoneId timeZone) { @Override public Instant convert(String value) { - TemporalAccessor dt = formatter - .parseBest(value.trim(), ZonedDateTime::from, LocalDateTime::from, OffsetDateTime::from, + TemporalAccessor dt = + formatter.parseBest( + value.trim(), + ZonedDateTime::from, + LocalDateTime::from, + OffsetDateTime::from, Instant::from, LocalDate::from); if (dt instanceof ZonedDateTime) { diff --git a/common/src/main/java/org/opensearch/sql/common/grok/Grok.java b/common/src/main/java/org/opensearch/sql/common/grok/Grok.java index f20f99cbc3..6dfab3f791 100644 --- a/common/src/main/java/org/opensearch/sql/common/grok/Grok.java +++ b/common/src/main/java/org/opensearch/sql/common/grok/Grok.java @@ -16,36 +16,29 @@ import org.opensearch.sql.common.grok.Converter.IConverter; /** - * {@code Grok} parse arbitrary text and structure it. - *
- * {@code Grok} is simple API that allows you to easily parse logs - * and other files (single line). With {@code Grok}, - * you can turn unstructured log and event data into structured data. + * {@code Grok} parse arbitrary text and structure it.
+ * {@code Grok} is simple API that allows you to easily parse logs and other files (single line). + * With {@code Grok}, you can turn unstructured log and event data into structured data. * * @since 0.0.1 */ public class Grok implements Serializable { - /** - * Named regex of the originalGrokPattern. - */ + /** Named regex of the originalGrokPattern. */ private final String namedRegex; + /** - * Map of the named regex of the originalGrokPattern - * with id = namedregexid and value = namedregex. + * Map of the named regex of the originalGrokPattern with id = namedregexid and value = + * namedregex. */ private final Map namedRegexCollection; - /** - * Original {@code Grok} pattern (expl: %{IP}). - */ + + /** Original {@code Grok} pattern (expl: %{IP}). */ private final String originalGrokPattern; - /** - * Pattern of the namedRegex. - */ + + /** Pattern of the namedRegex. */ private final Pattern compiledNamedRegex; - /** - * {@code Grok} patterns definition. - */ + /** {@code Grok} patterns definition. */ private final Map grokPatternDefinition; public final Set namedGroups; @@ -54,19 +47,16 @@ public class Grok implements Serializable { public final Map> converters; - /** - * only use in grok discovery. - */ + /** only use in grok discovery. */ private String savedPattern = ""; - /** - * Grok. - */ - public Grok(String pattern, - String namedRegex, - Map namedRegexCollection, - Map patternDefinitions, - ZoneId defaultTimeZone) { + /** Grok. */ + public Grok( + String pattern, + String namedRegex, + Map namedRegexCollection, + Map patternDefinitions, + ZoneId defaultTimeZone) { this.originalGrokPattern = pattern; this.namedRegex = namedRegex; this.compiledNamedRegex = Pattern.compile(namedRegex); @@ -132,8 +122,8 @@ public Map getNamedRegexCollection() { } /** - * Match the given log with the named regex. - * And return the json representation of the matched element + * Match the given log with the named regex. And return the json representation of the + * matched element * * @param log : log to match * @return map containing matches @@ -144,8 +134,8 @@ public Map capture(String log) { } /** - * Match the given list of log with the named regex - * and return the list of json representation of the matched elements. + * Match the given list of log with the named regex and return the list of json + * representation of the matched elements. * * @param logs : list of log * @return list of maps containing matches @@ -159,8 +149,8 @@ public ArrayList> capture(List logs) { } /** - * Match the given text with the named regex - * {@code Grok} will extract data from the string and get an extence of {@link Match}. + * Match the given text with the named regex {@code Grok} will extract data from the + * string and get an extence of {@link Match}. * * @param text : Single line of log * @return Grok Match @@ -172,9 +162,7 @@ public Match match(CharSequence text) { Matcher matcher = compiledNamedRegex.matcher(text); if (matcher.find()) { - return new Match( - text, this, matcher, matcher.start(0), matcher.end(0) - ); + return new Match(text, this, matcher, matcher.start(0), matcher.end(0)); } return Match.EMPTY; diff --git a/common/src/main/java/org/opensearch/sql/common/grok/GrokCompiler.java b/common/src/main/java/org/opensearch/sql/common/grok/GrokCompiler.java index 18894fc7a1..aba96ad4cb 100644 --- a/common/src/main/java/org/opensearch/sql/common/grok/GrokCompiler.java +++ b/common/src/main/java/org/opensearch/sql/common/grok/GrokCompiler.java @@ -31,13 +31,10 @@ public class GrokCompiler implements Serializable { // We don't want \n and commented line private static final Pattern patternLinePattern = Pattern.compile("^([A-z0-9_]+)\\s+(.*)$"); - /** - * {@code Grok} patterns definitions. - */ + /** {@code Grok} patterns definitions. */ private final Map grokPatternDefinitions = new HashMap<>(); - private GrokCompiler() { - } + private GrokCompiler() {} public static GrokCompiler newInstance() { return new GrokCompiler(); @@ -50,10 +47,10 @@ public Map getPatternDefinitions() { /** * Registers a new pattern definition. * - * @param name : Pattern Name + * @param name : Pattern Name * @param pattern : Regular expression Or {@code Grok} pattern * @throws GrokException runtime expt - **/ + */ public void register(String name, String pattern) { name = Objects.requireNonNull(name).trim(); pattern = Objects.requireNonNull(pattern).trim(); @@ -63,9 +60,7 @@ public void register(String name, String pattern) { } } - /** - * Registers multiple pattern definitions. - */ + /** Registers multiple pattern definitions. */ public void register(Map patternDefinitions) { Objects.requireNonNull(patternDefinitions); patternDefinitions.forEach(this::register); @@ -78,12 +73,9 @@ public void register(InputStream input) throws IOException { register(input, StandardCharsets.UTF_8); } - /** - * Registers multiple pattern definitions from a given inputStream. - */ + /** Registers multiple pattern definitions from a given inputStream. */ public void register(InputStream input, Charset charset) throws IOException { - try ( - BufferedReader in = new BufferedReader(new InputStreamReader(input, charset))) { + try (BufferedReader in = new BufferedReader(new InputStreamReader(input, charset))) { in.lines() .map(patternLinePattern::matcher) .filter(Matcher::matches) @@ -91,11 +83,10 @@ public void register(InputStream input, Charset charset) throws IOException { } } - /** - * Registers multiple pattern definitions from a given Reader. - */ + /** Registers multiple pattern definitions from a given Reader. */ public void register(Reader input) throws IOException { - new BufferedReader(input).lines() + new BufferedReader(input) + .lines() .map(patternLinePattern::matcher) .filter(Matcher::matches) .forEach(m -> register(m.group(1), m.group(2))); @@ -109,9 +100,7 @@ public void registerPatternFromClasspath(String path) throws GrokException { registerPatternFromClasspath(path, StandardCharsets.UTF_8); } - /** - * registerPatternFromClasspath. - */ + /** registerPatternFromClasspath. */ public void registerPatternFromClasspath(String path, Charset charset) throws GrokException { final InputStream inputStream = this.getClass().getResourceAsStream(path); try (Reader reader = new InputStreamReader(inputStream, charset)) { @@ -121,9 +110,7 @@ public void registerPatternFromClasspath(String path, Charset charset) throws Gr } } - /** - * Compiles a given Grok pattern and returns a Grok object which can parse the pattern. - */ + /** Compiles a given Grok pattern and returns a Grok object which can parse the pattern. */ public Grok compile(String pattern) throws IllegalArgumentException { return compile(pattern, false); } @@ -135,11 +122,11 @@ public Grok compile(final String pattern, boolean namedOnly) throws IllegalArgum /** * Compiles a given Grok pattern and returns a Grok object which can parse the pattern. * - * @param pattern : Grok pattern (ex: %{IP}) - * @param defaultTimeZone : time zone used to parse a timestamp when it doesn't contain - * the time zone - * @param namedOnly : Whether to capture named expressions only or not (i.e. %{IP:ip} - * but not ${IP}) + * @param pattern : Grok pattern (ex: %{IP}) + * @param defaultTimeZone : time zone used to parse a timestamp when it doesn't contain the time + * zone + * @param namedOnly : Whether to capture named expressions only or not (i.e. %{IP:ip} but not + * ${IP}) * @return a compiled pattern * @throws IllegalArgumentException when pattern definition is invalid */ @@ -184,14 +171,15 @@ public Grok compile(final String pattern, ZoneId defaultTimeZone, boolean namedO for (int i = 0; i < count; i++) { String definitionOfPattern = patternDefinitions.get(group.get("pattern")); if (definitionOfPattern == null) { - throw new IllegalArgumentException(format("No definition for key '%s' found, aborting", - group.get("pattern"))); + throw new IllegalArgumentException( + format("No definition for key '%s' found, aborting", group.get("pattern"))); } String replacement = String.format("(?%s)", index, definitionOfPattern); if (namedOnly && group.get("subname") == null) { replacement = String.format("(?:%s)", definitionOfPattern); } - namedRegexCollection.put("name" + index, + namedRegexCollection.put( + "name" + index, (group.get("subname") != null ? group.get("subname") : group.get("name"))); namedRegex = StringUtils.replace(namedRegex, "%{" + group.get("name") + "}", replacement, 1); @@ -205,12 +193,6 @@ public Grok compile(final String pattern, ZoneId defaultTimeZone, boolean namedO throw new IllegalArgumentException("Pattern not found"); } - return new Grok( - pattern, - namedRegex, - namedRegexCollection, - patternDefinitions, - defaultTimeZone - ); + return new Grok(pattern, namedRegex, namedRegexCollection, patternDefinitions, defaultTimeZone); } } diff --git a/common/src/main/java/org/opensearch/sql/common/grok/GrokUtils.java b/common/src/main/java/org/opensearch/sql/common/grok/GrokUtils.java index 9ff65acde2..4b145bbbe8 100644 --- a/common/src/main/java/org/opensearch/sql/common/grok/GrokUtils.java +++ b/common/src/main/java/org/opensearch/sql/common/grok/GrokUtils.java @@ -12,7 +12,6 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; - /** * {@code GrokUtils} contain set of useful tools or methods. * @@ -20,29 +19,25 @@ */ public class GrokUtils { - /** - * Extract Grok patter like %{FOO} to FOO, Also Grok pattern with semantic. - */ - public static final Pattern GROK_PATTERN = Pattern.compile( - "%\\{" - + "(?" - + "(?[A-z0-9]+)" - + "(?::(?[A-z0-9_:;,\\-\\/\\s\\.']+))?" - + ")" - + "(?:=(?" - + "(?:" - + "(?:[^{}]+|\\.+)+" - + ")+" - + ")" - + ")?" - + "\\}"); - - public static final Pattern NAMED_REGEX = Pattern - .compile("\\(\\?<([a-zA-Z][a-zA-Z0-9]*)>"); - - /** - * getNameGroups. - */ + /** Extract Grok patter like %{FOO} to FOO, Also Grok pattern with semantic. */ + public static final Pattern GROK_PATTERN = + Pattern.compile( + "%\\{" + + "(?" + + "(?[A-z0-9]+)" + + "(?::(?[A-z0-9_:;,\\-\\/\\s\\.']+))?" + + ")" + + "(?:=(?" + + "(?:" + + "(?:[^{}]+|\\.+)+" + + ")+" + + ")" + + ")?" + + "\\}"); + + public static final Pattern NAMED_REGEX = Pattern.compile("\\(\\?<([a-zA-Z][a-zA-Z0-9]*)>"); + + /** getNameGroups. */ public static Set getNameGroups(String regex) { Set namedGroups = new LinkedHashSet<>(); Matcher matcher = NAMED_REGEX.matcher(regex); @@ -52,9 +47,7 @@ public static Set getNameGroups(String regex) { return namedGroups; } - /** - * namedGroups. - */ + /** namedGroups. */ public static Map namedGroups(Matcher matcher, Set groupNames) { Map namedGroups = new LinkedHashMap<>(); for (String groupName : groupNames) { diff --git a/common/src/main/java/org/opensearch/sql/common/grok/Match.java b/common/src/main/java/org/opensearch/sql/common/grok/Match.java index 6831f35cee..3771817bba 100644 --- a/common/src/main/java/org/opensearch/sql/common/grok/Match.java +++ b/common/src/main/java/org/opensearch/sql/common/grok/Match.java @@ -5,7 +5,6 @@ package org.opensearch.sql.common.grok; - import static java.lang.String.format; import java.util.ArrayList; @@ -31,9 +30,7 @@ public class Match { private boolean keepEmptyCaptures = true; private Map capture = Collections.emptyMap(); - /** - * Create a new {@code Match} object. - */ + /** Create a new {@code Match} object. */ public Match(CharSequence subject, Grok grok, Matcher match, int start, int end) { this.subject = subject; this.grok = grok; @@ -42,9 +39,7 @@ public Match(CharSequence subject, Grok grok, Matcher match, int start, int end) this.end = end; } - /** - * Create Empty grok matcher. - */ + /** Create Empty grok matcher. */ public static final Match EMPTY = new Match("", null, null, 0, 0); public Matcher getMatch() { @@ -59,9 +54,7 @@ public int getEnd() { return end; } - /** - * Ignore empty captures. - */ + /** Ignore empty captures. */ public void setKeepEmptyCaptures(boolean ignore) { // clear any cached captures if (capture.size() > 0) { @@ -97,8 +90,8 @@ public Map capture() { * * @param flattened will it flatten values. * @return the matched elements. - * @throws GrokException if a keys has multiple non-null values, but only if flattened is set - * to true. + * @throws GrokException if a keys has multiple non-null values, but only if flattened is set to + * true. */ private Map capture(boolean flattened) throws GrokException { if (match == null) { @@ -116,70 +109,69 @@ private Map capture(boolean flattened) throws GrokException { Map mappedw = GrokUtils.namedGroups(this.match, this.grok.namedGroups); - mappedw.forEach((key, valueString) -> { - String id = this.grok.getNamedRegexCollectionById(key); - if (id != null && !id.isEmpty()) { - key = id; - } - - if ("UNWANTED".equals(key)) { - return; - } - - Object value = valueString; - if (valueString != null) { - IConverter converter = grok.converters.get(key); - - if (converter != null) { - key = Converter.extractKey(key); - try { - value = converter.convert(valueString); - } catch (Exception e) { - capture.put(key + "_grokfailure", e.toString()); + mappedw.forEach( + (key, valueString) -> { + String id = this.grok.getNamedRegexCollectionById(key); + if (id != null && !id.isEmpty()) { + key = id; } - if (value instanceof String) { - value = cleanString((String) value); + if ("UNWANTED".equals(key)) { + return; } - } else { - value = cleanString(valueString); - } - } else if (!isKeepEmptyCaptures()) { - return; - } - - if (capture.containsKey(key)) { - Object currentValue = capture.get(key); - if (flattened) { - if (currentValue == null && value != null) { - capture.put(key, value); - } - if (currentValue != null && value != null) { - throw new GrokException( - format( - "key '%s' has multiple non-null values, this is not allowed in flattened mode," - + " values:'%s', '%s'", - key, - currentValue, - value)); + Object value = valueString; + if (valueString != null) { + IConverter converter = grok.converters.get(key); + + if (converter != null) { + key = Converter.extractKey(key); + try { + value = converter.convert(valueString); + } catch (Exception e) { + capture.put(key + "_grokfailure", e.toString()); + } + + if (value instanceof String) { + value = cleanString((String) value); + } + } else { + value = cleanString(valueString); + } + } else if (!isKeepEmptyCaptures()) { + return; } - } else { - if (currentValue instanceof List) { - @SuppressWarnings("unchecked") - List cvl = (List) currentValue; - cvl.add(value); + + if (capture.containsKey(key)) { + Object currentValue = capture.get(key); + + if (flattened) { + if (currentValue == null && value != null) { + capture.put(key, value); + } + if (currentValue != null && value != null) { + throw new GrokException( + format( + "key '%s' has multiple non-null values, this is not allowed in flattened" + + " mode, values:'%s', '%s'", + key, currentValue, value)); + } + } else { + if (currentValue instanceof List) { + @SuppressWarnings("unchecked") + List cvl = (List) currentValue; + cvl.add(value); + } else { + List list = new ArrayList(); + list.add(currentValue); + list.add(value); + capture.put(key, list); + } + } } else { - List list = new ArrayList(); - list.add(currentValue); - list.add(value); - capture.put(key, list); + capture.put(key, value); } - } - } else { - capture.put(key, value); - } - }); + }); capture = Collections.unmodifiableMap(capture); @@ -189,13 +181,11 @@ private Map capture(boolean flattened) throws GrokException { /** * Match to the subject the regex and save the matched element into a map * - *

Multiple values to the same key are flattened to one value: the sole non-null value will - * be captured. - * Should there be multiple non-null values a RuntimeException is being thrown. + *

Multiple values to the same key are flattened to one value: the sole non-null value will be + * captured. Should there be multiple non-null values a RuntimeException is being thrown. * *

This can be used in cases like: (foo (.*:message) bar|bar (.*:message) foo) where the regexp - * guarantees that only - * one value will be captured. + * guarantees that only one value will be captured. * *

See also {@link #capture} which returns multiple values of the same key as list. * @@ -220,9 +210,7 @@ private String cleanString(String value) { char firstChar = value.charAt(0); char lastChar = value.charAt(value.length() - 1); - if (firstChar == lastChar - && (firstChar == '"' || firstChar == '\'') - ) { + if (firstChar == lastChar && (firstChar == '"' || firstChar == '\'')) { if (value.length() <= 2) { return ""; } else { @@ -249,5 +237,4 @@ private String cleanString(String value) { public Boolean isNull() { return this.match == null; } - } diff --git a/common/src/main/java/org/opensearch/sql/common/grok/exception/GrokException.java b/common/src/main/java/org/opensearch/sql/common/grok/exception/GrokException.java index 54ca8aada3..0e9d6d2ddf 100644 --- a/common/src/main/java/org/opensearch/sql/common/grok/exception/GrokException.java +++ b/common/src/main/java/org/opensearch/sql/common/grok/exception/GrokException.java @@ -6,9 +6,8 @@ package org.opensearch.sql.common.grok.exception; /** - * Signals that an {@code Grok} exception of some sort has occurred. - * This class is the general class of - * exceptions produced by failed or interrupted Grok operations. + * Signals that an {@code Grok} exception of some sort has occurred. This class is the general class + * of exceptions produced by failed or interrupted Grok operations. * * @since 0.0.4 */ @@ -16,9 +15,7 @@ public class GrokException extends RuntimeException { private static final long serialVersionUID = 1L; - /** - * Creates a new GrokException. - */ + /** Creates a new GrokException. */ public GrokException() { super(); } @@ -27,7 +24,7 @@ public GrokException() { * Constructs a new GrokException. * * @param message the reason for the exception - * @param cause the underlying Throwable that caused this exception to be thrown. + * @param cause the underlying Throwable that caused this exception to be thrown. */ public GrokException(String message, Throwable cause) { super(message, cause); @@ -50,5 +47,4 @@ public GrokException(String message) { public GrokException(Throwable cause) { super(cause); } - } diff --git a/common/src/main/java/org/opensearch/sql/common/response/ResponseListener.java b/common/src/main/java/org/opensearch/sql/common/response/ResponseListener.java index 3d5eadc692..bac79ddbbd 100644 --- a/common/src/main/java/org/opensearch/sql/common/response/ResponseListener.java +++ b/common/src/main/java/org/opensearch/sql/common/response/ResponseListener.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.common.response; /** diff --git a/common/src/main/java/org/opensearch/sql/common/setting/LegacySettings.java b/common/src/main/java/org/opensearch/sql/common/setting/LegacySettings.java index 172a0d8023..e8dc76645a 100644 --- a/common/src/main/java/org/opensearch/sql/common/setting/LegacySettings.java +++ b/common/src/main/java/org/opensearch/sql/common/setting/LegacySettings.java @@ -3,42 +3,31 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.common.setting; import lombok.Getter; import lombok.RequiredArgsConstructor; -/** - * Legacy Open Distro Settings. - */ +/** Legacy Open Distro Settings. */ public abstract class LegacySettings { @RequiredArgsConstructor public enum Key { - /** - * Legacy SQL Settings. - */ + /** Legacy SQL Settings. */ SQL_ENABLED("opendistro.sql.enabled"), SQL_QUERY_SLOWLOG("opendistro.sql.query.slowlog"), SQL_CURSOR_KEEPALIVE("opendistro.sql.cursor.keep_alive"), METRICS_ROLLING_WINDOW("opendistro.sql.metrics.rollingwindow"), METRICS_ROLLING_INTERVAL("opendistro.sql.metrics.rollinginterval"), - /** - * Legacy PPL Settings. - */ + /** Legacy PPL Settings. */ PPL_ENABLED("opendistro.ppl.enabled"), PPL_QUERY_MEMORY_LIMIT("opendistro.ppl.query.memory_limit"), - /** - * Legacy Common Settings. - */ + /** Legacy Common Settings. */ QUERY_SIZE_LIMIT("opendistro.query.size_limit"), - /** - * Deprecated Settings. - */ + /** Deprecated Settings. */ SQL_NEW_ENGINE_ENABLED("opendistro.sql.engine.new.enabled"), QUERY_ANALYSIS_ENABLED("opendistro.sql.query.analysis.enabled"), QUERY_ANALYSIS_SEMANTIC_SUGGESTION("opendistro.sql.query.analysis.semantic.suggestion"), @@ -47,8 +36,7 @@ public enum Key { SQL_CURSOR_ENABLED("opendistro.sql.cursor.enabled"), SQL_CURSOR_FETCH_SIZE("opendistro.sql.cursor.fetch_size"); - @Getter - private final String keyValue; + @Getter private final String keyValue; } public abstract T getSettingValue(Key key); diff --git a/common/src/main/java/org/opensearch/sql/common/setting/Settings.java b/common/src/main/java/org/opensearch/sql/common/setting/Settings.java index 3b0eba157d..1e5243f91f 100644 --- a/common/src/main/java/org/opensearch/sql/common/setting/Settings.java +++ b/common/src/main/java/org/opensearch/sql/common/setting/Settings.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.common.setting; import com.google.common.base.Strings; @@ -14,29 +13,21 @@ import lombok.Getter; import lombok.RequiredArgsConstructor; -/** - * Setting. - */ +/** Setting. */ public abstract class Settings { @RequiredArgsConstructor public enum Key { - /** - * SQL Settings. - */ + /** SQL Settings. */ SQL_ENABLED("plugins.sql.enabled"), SQL_SLOWLOG("plugins.sql.slowlog"), SQL_CURSOR_KEEP_ALIVE("plugins.sql.cursor.keep_alive"), SQL_DELETE_ENABLED("plugins.sql.delete.enabled"), - /** - * PPL Settings. - */ + /** PPL Settings. */ PPL_ENABLED("plugins.ppl.enabled"), - /** - * Common Settings for SQL and PPL. - */ + /** Common Settings for SQL and PPL. */ QUERY_MEMORY_LIMIT("plugins.query.memory_limit"), QUERY_SIZE_LIMIT("plugins.query.size_limit"), ENCYRPTION_MASTER_KEY("plugins.query.datasources.encryption.masterkey"), @@ -47,8 +38,7 @@ public enum Key { CLUSTER_NAME("cluster.name"); - @Getter - private final String keyValue; + @Getter private final String keyValue; private static final Map ALL_KEYS; @@ -66,9 +56,7 @@ public static Optional of(String keyValue) { } } - /** - * Get Setting Value. - */ + /** Get Setting Value. */ public abstract T getSettingValue(Key key); public abstract List getSettings(); diff --git a/common/src/main/java/org/opensearch/sql/common/utils/QueryContext.java b/common/src/main/java/org/opensearch/sql/common/utils/QueryContext.java index ab11029d73..686263238a 100644 --- a/common/src/main/java/org/opensearch/sql/common/utils/QueryContext.java +++ b/common/src/main/java/org/opensearch/sql/common/utils/QueryContext.java @@ -3,35 +3,30 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.common.utils; -import java.time.LocalDateTime; import java.util.Map; -import java.util.Optional; import java.util.UUID; import org.apache.logging.log4j.ThreadContext; /** - * Utility class for recording and accessing context for the query being executed. - * Implementation Details: context variables is being persisted statically in the thread context + * Utility class for recording and accessing context for the query being executed. Implementation + * Details: context variables is being persisted statically in the thread context + * * @see: @ThreadContext */ public class QueryContext { - /** - * The key of the request id in the context map. - */ + /** The key of the request id in the context map. */ private static final String REQUEST_ID_KEY = "request_id"; /** * Generates a random UUID and adds to the {@link ThreadContext} as the request id. - *

- * Note: If a request id already present, this method will overwrite it with a new - * one. This is to pre-vent re-using the same request id for different requests in - * case the same thread handles both of them. But this also means one should not - * call this method twice on the same thread within the lifetime of the request. - *

+ * + *

Note: If a request id already present, this method will overwrite it with a new one. This is + * to pre-vent re-using the same request id for different requests in case the same thread handles + * both of them. But this also means one should not call this method twice on the same thread + * within the lifetime of the request. */ public static String addRequestId() { var id = UUID.randomUUID().toString(); @@ -41,6 +36,7 @@ public static String addRequestId() { /** * Get RequestID. + * * @return the current request id from {@link ThreadContext}. */ public static String getRequestId() { @@ -52,8 +48,8 @@ public static String getRequestId() { } /** - * Wraps a given instance of {@link Runnable} into a new one which gets all the - * entries from current ThreadContext map. + * Wraps a given instance of {@link Runnable} into a new one which gets all the entries from + * current ThreadContext map. * * @param task the instance of Runnable to wrap * @return the new task diff --git a/common/src/main/java/org/opensearch/sql/common/utils/StringUtils.java b/common/src/main/java/org/opensearch/sql/common/utils/StringUtils.java index 0699245338..c81f56ef63 100644 --- a/common/src/main/java/org/opensearch/sql/common/utils/StringUtils.java +++ b/common/src/main/java/org/opensearch/sql/common/utils/StringUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.common.utils; import com.google.common.base.Strings; @@ -13,6 +12,7 @@ public class StringUtils { /** * Unquote any string with mark specified. + * * @param text string * @param mark quotation mark * @return An unquoted string whose outer pair of (single/double/back-tick) quotes have been @@ -26,11 +26,10 @@ public static String unquote(String text, String mark) { } /** - * Unquote Identifier which has " or ' or ` as mark. - * Strings quoted by ' or " with two of these quotes appearing next to each other in the quote - * acts as an escape - * Example: 'Test''s' will result in 'Test's', similar with those single quotes being replaced - * with double. + * Unquote Identifier which has " or ' or ` as mark. Strings quoted by ' or " with two of these + * quotes appearing next to each other in the quote acts as an escape Example: 'Test''s' will + * result in 'Test's', similar with those single quotes being replaced with double. + * * @param text string * @return An unquoted string whose outer pair of (single/double/back-tick) quotes have been * removed @@ -45,10 +44,7 @@ public static String unquoteText(String text) { char firstChar = text.charAt(0); char lastChar = text.charAt(text.length() - 1); - if (firstChar == lastChar - && (firstChar == '\'' - || firstChar == '"' - || firstChar == '`')) { + if (firstChar == lastChar && (firstChar == '\'' || firstChar == '"' || firstChar == '`')) { enclosingQuote = firstChar; } else { return text; @@ -67,8 +63,7 @@ public static String unquoteText(String text) { for (int chIndex = 1; chIndex < text.length() - 1; chIndex++) { currentChar = text.charAt(chIndex); nextChar = text.charAt(chIndex + 1); - if (currentChar == enclosingQuote - && nextChar == currentChar) { + if (currentChar == enclosingQuote && nextChar == currentChar) { chIndex++; } textSB.append(currentChar); @@ -79,9 +74,9 @@ public static String unquoteText(String text) { /** * Unquote Identifier which has ` as mark. + * * @param identifier identifier that possibly enclosed by double quotes or back ticks - * @return An unquoted string whose outer pair of (double/back-tick) quotes have been - * removed + * @return An unquoted string whose outer pair of (double/back-tick) quotes have been removed */ public static String unquoteIdentifier(String identifier) { if (isQuoted(identifier, "`")) { @@ -92,16 +87,15 @@ public static String unquoteIdentifier(String identifier) { } /** - * Returns a formatted string using the specified format string and - * arguments, as well as the {@link Locale#ROOT} locale. + * Returns a formatted string using the specified format string and arguments, as well as the + * {@link Locale#ROOT} locale. * * @param format format string - * @param args arguments referenced by the format specifiers in the format string + * @param args arguments referenced by the format specifiers in the format string * @return A formatted string * @throws IllegalFormatException If a format string contains an illegal syntax, a format - * specifier that is incompatible with the given arguments, - * insufficient arguments given the format string, or other - * illegal conditions. + * specifier that is incompatible with the given arguments, insufficient arguments given the + * format string, or other illegal conditions. * @see java.lang.String#format(Locale, String, Object...) */ public static String format(final String format, Object... args) { diff --git a/common/src/test/java/org/opensearch/sql/common/authinterceptors/AwsSigningInterceptorTest.java b/common/src/test/java/org/opensearch/sql/common/authinterceptors/AwsSigningInterceptorTest.java index 894f3974ce..435ac9dc93 100644 --- a/common/src/test/java/org/opensearch/sql/common/authinterceptors/AwsSigningInterceptorTest.java +++ b/common/src/test/java/org/opensearch/sql/common/authinterceptors/AwsSigningInterceptorTest.java @@ -7,9 +7,6 @@ package org.opensearch.sql.common.authinterceptors; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.auth.AWSSessionCredentials; import com.amazonaws.auth.AWSStaticCredentialsProvider; @@ -26,42 +23,40 @@ import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.junit.jupiter.MockitoExtension; -import org.opensearch.sql.common.authinterceptors.AwsSigningInterceptor; @ExtendWith(MockitoExtension.class) public class AwsSigningInterceptorTest { - @Mock - private Interceptor.Chain chain; + @Mock private Interceptor.Chain chain; - @Captor - ArgumentCaptor requestArgumentCaptor; + @Captor ArgumentCaptor requestArgumentCaptor; - @Mock - private STSAssumeRoleSessionCredentialsProvider stsAssumeRoleSessionCredentialsProvider; + @Mock private STSAssumeRoleSessionCredentialsProvider stsAssumeRoleSessionCredentialsProvider; @Test void testConstructors() { - Assertions.assertThrows(NullPointerException.class, () -> - new AwsSigningInterceptor(null, "us-east-1", "aps")); - Assertions.assertThrows(NullPointerException.class, () -> - new AwsSigningInterceptor(getStaticAWSCredentialsProvider("accessKey", "secretKey"), null, - "aps")); - Assertions.assertThrows(NullPointerException.class, () -> - new AwsSigningInterceptor(getStaticAWSCredentialsProvider("accessKey", "secretKey"), - "us-east-1", null)); + Assertions.assertThrows( + NullPointerException.class, () -> new AwsSigningInterceptor(null, "us-east-1", "aps")); + Assertions.assertThrows( + NullPointerException.class, + () -> + new AwsSigningInterceptor( + getStaticAWSCredentialsProvider("accessKey", "secretKey"), null, "aps")); + Assertions.assertThrows( + NullPointerException.class, + () -> + new AwsSigningInterceptor( + getStaticAWSCredentialsProvider("accessKey", "secretKey"), "us-east-1", null)); } @Test @SneakyThrows void testIntercept() { - Mockito.when(chain.request()).thenReturn(new Request.Builder() - .url("http://localhost:9090") - .build()); - AwsSigningInterceptor awsSigningInterceptor - = new AwsSigningInterceptor( - getStaticAWSCredentialsProvider("testAccessKey", "testSecretKey"), - "us-east-1", "aps"); + Mockito.when(chain.request()) + .thenReturn(new Request.Builder().url("http://localhost:9090").build()); + AwsSigningInterceptor awsSigningInterceptor = + new AwsSigningInterceptor( + getStaticAWSCredentialsProvider("testAccessKey", "testSecretKey"), "us-east-1", "aps"); awsSigningInterceptor.intercept(chain); Mockito.verify(chain).proceed(requestArgumentCaptor.capture()); Request request = requestArgumentCaptor.getValue(); @@ -70,31 +65,26 @@ void testIntercept() { Assertions.assertNotNull(request.headers("host")); } - @Test @SneakyThrows void testSTSCredentialsProviderInterceptor() { - Mockito.when(chain.request()).thenReturn(new Request.Builder() - .url("http://localhost:9090") - .build()); + Mockito.when(chain.request()) + .thenReturn(new Request.Builder().url("http://localhost:9090").build()); Mockito.when(stsAssumeRoleSessionCredentialsProvider.getCredentials()) .thenReturn(getAWSSessionCredentials()); - AwsSigningInterceptor awsSigningInterceptor - = new AwsSigningInterceptor(stsAssumeRoleSessionCredentialsProvider, - "us-east-1", "aps"); + AwsSigningInterceptor awsSigningInterceptor = + new AwsSigningInterceptor(stsAssumeRoleSessionCredentialsProvider, "us-east-1", "aps"); awsSigningInterceptor.intercept(chain); Mockito.verify(chain).proceed(requestArgumentCaptor.capture()); Request request = requestArgumentCaptor.getValue(); Assertions.assertNotNull(request.headers("Authorization")); Assertions.assertNotNull(request.headers("x-amz-date")); Assertions.assertNotNull(request.headers("host")); - Assertions.assertEquals("session_token", - request.headers("x-amz-security-token").get(0)); + Assertions.assertEquals("session_token", request.headers("x-amz-security-token").get(0)); } - - private AWSCredentialsProvider getStaticAWSCredentialsProvider(String accessKey, - String secretKey) { + private AWSCredentialsProvider getStaticAWSCredentialsProvider( + String accessKey, String secretKey) { return new AWSStaticCredentialsProvider(new BasicAWSCredentials(accessKey, secretKey)); } @@ -116,5 +106,4 @@ public String getAWSSecretKey() { } }; } - } diff --git a/common/src/test/java/org/opensearch/sql/common/authinterceptors/BasicAuthenticationInterceptorTest.java b/common/src/test/java/org/opensearch/sql/common/authinterceptors/BasicAuthenticationInterceptorTest.java index 596894da6d..d59928d2ef 100644 --- a/common/src/test/java/org/opensearch/sql/common/authinterceptors/BasicAuthenticationInterceptorTest.java +++ b/common/src/test/java/org/opensearch/sql/common/authinterceptors/BasicAuthenticationInterceptorTest.java @@ -24,29 +24,25 @@ @ExtendWith(MockitoExtension.class) public class BasicAuthenticationInterceptorTest { - @Mock - private Interceptor.Chain chain; + @Mock private Interceptor.Chain chain; - @Captor - ArgumentCaptor requestArgumentCaptor; + @Captor ArgumentCaptor requestArgumentCaptor; @Test void testConstructors() { - Assertions.assertThrows(NullPointerException.class, () -> - new BasicAuthenticationInterceptor(null, "test")); - Assertions.assertThrows(NullPointerException.class, () -> - new BasicAuthenticationInterceptor("testAdmin", null)); + Assertions.assertThrows( + NullPointerException.class, () -> new BasicAuthenticationInterceptor(null, "test")); + Assertions.assertThrows( + NullPointerException.class, () -> new BasicAuthenticationInterceptor("testAdmin", null)); } - @Test @SneakyThrows void testIntercept() { - Mockito.when(chain.request()).thenReturn(new Request.Builder() - .url("http://localhost:9090") - .build()); - BasicAuthenticationInterceptor basicAuthenticationInterceptor - = new BasicAuthenticationInterceptor("testAdmin", "testPassword"); + Mockito.when(chain.request()) + .thenReturn(new Request.Builder().url("http://localhost:9090").build()); + BasicAuthenticationInterceptor basicAuthenticationInterceptor = + new BasicAuthenticationInterceptor("testAdmin", "testPassword"); basicAuthenticationInterceptor.intercept(chain); Mockito.verify(chain).proceed(requestArgumentCaptor.capture()); Request request = requestArgumentCaptor.getValue(); @@ -54,5 +50,4 @@ void testIntercept() { Collections.singletonList(Credentials.basic("testAdmin", "testPassword")), request.headers("Authorization")); } - } diff --git a/common/src/test/java/org/opensearch/sql/common/grok/ApacheDataTypeTest.java b/common/src/test/java/org/opensearch/sql/common/grok/ApacheDataTypeTest.java index 09695c8220..7eb0e964de 100644 --- a/common/src/test/java/org/opensearch/sql/common/grok/ApacheDataTypeTest.java +++ b/common/src/test/java/org/opensearch/sql/common/grok/ApacheDataTypeTest.java @@ -5,7 +5,6 @@ package org.opensearch.sql.common.grok; - import static org.junit.Assert.assertEquals; import com.google.common.io.Resources; @@ -42,12 +41,13 @@ public void setup() throws Exception { @Test public void test002_httpd_access_semi() throws GrokException { - Grok grok = compiler.compile( - "%{IPORHOST:clientip} %{USER:ident;boolean} %{USER:auth} " - + "\\[%{HTTPDATE:timestamp;date;dd/MMM/yyyy:HH:mm:ss Z}\\] \"(?:%{WORD:verb;string} " - + "%{NOTSPACE:request}" - + "(?: HTTP/%{NUMBER:httpversion;float})?|%{DATA:rawrequest})\" %{NUMBER:response;int} " - + "(?:%{NUMBER:bytes;long}|-)"); + Grok grok = + compiler.compile( + "%{IPORHOST:clientip} %{USER:ident;boolean} %{USER:auth}" + + " \\[%{HTTPDATE:timestamp;date;dd/MMM/yyyy:HH:mm:ss Z}\\]" + + " \"(?:%{WORD:verb;string} %{NOTSPACE:request}(?:" + + " HTTP/%{NUMBER:httpversion;float})?|%{DATA:rawrequest})\" %{NUMBER:response;int}" + + " (?:%{NUMBER:bytes;long}|-)"); System.out.println(line); Match gm = grok.match(line); @@ -61,17 +61,17 @@ public void test002_httpd_access_semi() throws GrokException { assertEquals(map.get("httpversion"), 1.1f); assertEquals(map.get("bytes"), 12846L); assertEquals("GET", map.get("verb")); - } @Test public void test002_httpd_access_colon() throws GrokException { - Grok grok = compiler.compile( - "%{IPORHOST:clientip} %{USER:ident:boolean} %{USER:auth} " - + "\\[%{HTTPDATE:timestamp:date:dd/MMM/yyyy:HH:mm:ss Z}\\] \"(?:%{WORD:verb:string} " - + "%{NOTSPACE:request}" - + "(?: HTTP/%{NUMBER:httpversion:float})?|%{DATA:rawrequest})\" %{NUMBER:response:int} " - + "(?:%{NUMBER:bytes:long}|-)"); + Grok grok = + compiler.compile( + "%{IPORHOST:clientip} %{USER:ident:boolean} %{USER:auth}" + + " \\[%{HTTPDATE:timestamp:date:dd/MMM/yyyy:HH:mm:ss Z}\\]" + + " \"(?:%{WORD:verb:string} %{NOTSPACE:request}(?:" + + " HTTP/%{NUMBER:httpversion:float})?|%{DATA:rawrequest})\" %{NUMBER:response:int}" + + " (?:%{NUMBER:bytes:long}|-)"); Match gm = grok.match(line); Map map = gm.capture(); @@ -85,6 +85,5 @@ public void test002_httpd_access_colon() throws GrokException { assertEquals(map.get("httpversion"), 1.1f); assertEquals(map.get("bytes"), 12846L); assertEquals("GET", map.get("verb")); - } } diff --git a/common/src/test/java/org/opensearch/sql/common/grok/ApacheTest.java b/common/src/test/java/org/opensearch/sql/common/grok/ApacheTest.java index 33113d1996..db420b16d3 100644 --- a/common/src/test/java/org/opensearch/sql/common/grok/ApacheTest.java +++ b/common/src/test/java/org/opensearch/sql/common/grok/ApacheTest.java @@ -5,7 +5,6 @@ package org.opensearch.sql.common.grok; - import com.google.common.io.Resources; import java.io.BufferedReader; import java.io.File; @@ -65,5 +64,4 @@ public void test002_nasa_httpd_access() throws GrokException, IOException { br.close(); } } - } diff --git a/common/src/test/java/org/opensearch/sql/common/grok/BasicTest.java b/common/src/test/java/org/opensearch/sql/common/grok/BasicTest.java index 26df7ba57e..c724b58f3e 100644 --- a/common/src/test/java/org/opensearch/sql/common/grok/BasicTest.java +++ b/common/src/test/java/org/opensearch/sql/common/grok/BasicTest.java @@ -33,8 +33,7 @@ @FixMethodOrder(MethodSorters.NAME_ASCENDING) public class BasicTest { - @Rule - public TemporaryFolder tempFolder = new TemporaryFolder(); + @Rule public TemporaryFolder tempFolder = new TemporaryFolder(); private GrokCompiler compiler; @@ -111,8 +110,8 @@ public void test005_testLoadPatternFromFile() throws IOException, GrokException public void test006_testLoadPatternFromFileIso_8859_1() throws IOException, GrokException { File temp = tempFolder.newFile("grok-tmp-pattern"); try (FileOutputStream fis = new FileOutputStream(temp); - BufferedWriter bw = new BufferedWriter( - new OutputStreamWriter(fis, StandardCharsets.ISO_8859_1))) { + BufferedWriter bw = + new BufferedWriter(new OutputStreamWriter(fis, StandardCharsets.ISO_8859_1))) { bw.write("TEST \\u2022"); } @@ -130,5 +129,4 @@ public void test007_testLoadPatternFromReader() throws IOException, GrokExceptio Grok grok = compiler.compile("%{TEST}"); assertEquals("(?\\u20AC)", grok.getNamedRegex()); } - } diff --git a/common/src/test/java/org/opensearch/sql/common/grok/CaptureTest.java b/common/src/test/java/org/opensearch/sql/common/grok/CaptureTest.java index 1173541e16..60e2761c83 100644 --- a/common/src/test/java/org/opensearch/sql/common/grok/CaptureTest.java +++ b/common/src/test/java/org/opensearch/sql/common/grok/CaptureTest.java @@ -98,7 +98,8 @@ public void test005_captureSubName() throws GrokException { Map map = match.capture(); assertEquals(1, map.size()); assertEquals("Hello", map.get(subname).toString()); - assertEquals("{abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_abcdef=Hello}", + assertEquals( + "{abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_abcdef=Hello}", map.toString()); } @@ -145,7 +146,8 @@ public void test008_flattenDuplicateKeys() throws GrokException { m3.captureFlattened(); fail("should report error due tu ambiguity"); } catch (RuntimeException e) { - assertThat(e.getMessage(), + assertThat( + e.getMessage(), containsString("has multiple non-null values, this is not allowed in flattened mode")); } } diff --git a/common/src/test/java/org/opensearch/sql/common/grok/GrokDocumentationTest.java b/common/src/test/java/org/opensearch/sql/common/grok/GrokDocumentationTest.java index 22115a825f..15d450e812 100644 --- a/common/src/test/java/org/opensearch/sql/common/grok/GrokDocumentationTest.java +++ b/common/src/test/java/org/opensearch/sql/common/grok/GrokDocumentationTest.java @@ -40,23 +40,43 @@ public void assureCodeInReadmeWorks() { Assertions.assertThat(capture).hasSize(22); final boolean debug = false; - final Object[] keywordArray = new Object[] {"COMBINEDAPACHELOG", - "COMMONAPACHELOG", "clientip", "ident", "auth", "timestamp", "MONTHDAY", - "MONTH", "YEAR", "TIME", "HOUR", "MINUTE", "SECOND", "INT", "verb", - "httpversion", "rawrequest", "request", "response", "bytes", "referrer", - "agent"}; + final Object[] keywordArray = + new Object[] { + "COMBINEDAPACHELOG", + "COMMONAPACHELOG", + "clientip", + "ident", + "auth", + "timestamp", + "MONTHDAY", + "MONTH", + "YEAR", + "TIME", + "HOUR", + "MINUTE", + "SECOND", + "INT", + "verb", + "httpversion", + "rawrequest", + "request", + "response", + "bytes", + "referrer", + "agent" + }; if (debug) { capture.keySet().stream().forEach(System.err::println); } - assertTrue(new HashSet(Arrays.asList(keywordArray)) - .containsAll(new HashSet(capture.keySet()))); + assertTrue( + new HashSet(Arrays.asList(keywordArray)) + .containsAll(new HashSet(capture.keySet()))); Arrays.asList(keywordArray).stream() .forEach(o -> assertThat(capture.keySet(), hasItem((String) o))); - assertThat(new HashSet(capture.keySet()), - containsInAnyOrder(keywordArray)); - assertTrue(new HashSet(capture.keySet()) - .containsAll(new HashSet(Arrays.asList(keywordArray)))); - + assertThat(new HashSet(capture.keySet()), containsInAnyOrder(keywordArray)); + assertTrue( + new HashSet(capture.keySet()) + .containsAll(new HashSet(Arrays.asList(keywordArray)))); } } diff --git a/common/src/test/java/org/opensearch/sql/common/grok/GrokTest.java b/common/src/test/java/org/opensearch/sql/common/grok/GrokTest.java index b5e8366807..862f9b8195 100644 --- a/common/src/test/java/org/opensearch/sql/common/grok/GrokTest.java +++ b/common/src/test/java/org/opensearch/sql/common/grok/GrokTest.java @@ -37,7 +37,6 @@ import org.junit.runners.MethodSorters; import org.opensearch.sql.common.grok.exception.GrokException; - @FixMethodOrder(MethodSorters.NAME_ASCENDING) public class GrokTest { @@ -138,7 +137,6 @@ public void test002_numbers() { Match gm = grok.match("-42"); Map map = gm.capture(); assertEquals("{NUMBER=-42}", map.toString()); - } @Test @@ -152,7 +150,6 @@ public void test003_word() { gm = grok.match("abc"); map = gm.capture(); assertEquals("{WORD=abc}", map.toString()); - } @Test @@ -162,7 +159,6 @@ public void test004_space() { Match gm = grok.match("abc dc"); Map map = gm.capture(); assertEquals("{SPACE=}", map.toString()); - } @Test @@ -172,7 +168,6 @@ public void test004_number() { Match gm = grok.match("Something costs $55.4!"); Map map = gm.capture(); assertEquals("{NUMBER=55.4}", map.toString()); - } @Test @@ -182,7 +177,6 @@ public void test005_notSpace() { Match gm = grok.match("abc dc"); Map map = gm.capture(); assertEquals("{NOTSPACE=abc}", map.toString()); - } @Test @@ -209,7 +203,6 @@ public void test007_uuid() { gm = grok.match("03A8413C-F604-4D21-8F4D-24B19D98B5A7"); map = gm.capture(); assertEquals("{UUID=03A8413C-F604-4D21-8F4D-24B19D98B5A7}", map.toString()); - } @Test @@ -219,7 +212,6 @@ public void test008_mac() { Match gm = grok.match("5E:FF:56:A2:AF:15"); Map map = gm.capture(); assertEquals("{MAC=5E:FF:56:A2:AF:15}", map.toString()); - } @Test @@ -241,10 +233,12 @@ public void test010_hostPort() { Match gm = grok.match("www.google.fr:80"); Map map = gm.capture(); - assertEquals(ImmutableMap.of( - "HOSTPORT", "www.google.fr:80", - "IPORHOST", "www.google.fr", - "PORT", "80"), map); + assertEquals( + ImmutableMap.of( + "HOSTPORT", "www.google.fr:80", + "IPORHOST", "www.google.fr", + "PORT", "80"), + map); } @Test @@ -267,10 +261,11 @@ public void test011_combineApache() { assertEquals(map.get("TIME").toString(), "01:36:30"); gm = - grok.match("112.169.19.192 - - [06/Mar/2013:01:36:30 +0900] \"GET " - + "/wp-content/plugins/easy-table/themes/default/style.css?ver=1.0 HTTP/1.1\" " - + "304 - \"http://www.nflabs.com/\" \"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_2) " - + "AppleWebKit/537.22 (KHTML, like Gecko) Chrome/25.0.1364.152 Safari/537.22\""); + grok.match( + "112.169.19.192 - - [06/Mar/2013:01:36:30 +0900] \"GET" + + " /wp-content/plugins/easy-table/themes/default/style.css?ver=1.0 HTTP/1.1\" 304" + + " - \"http://www.nflabs.com/\" \"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_2)" + + " AppleWebKit/537.22 (KHTML, like Gecko) Chrome/25.0.1364.152 Safari/537.22\""); map = gm.capture(); assertEquals( map.get("agent").toString(), @@ -278,7 +273,8 @@ public void test011_combineApache() { + "Chrome/25.0.1364.152 Safari/537.22"); assertEquals(map.get("clientip").toString(), "112.169.19.192"); assertEquals(map.get("httpversion").toString(), "1.1"); - assertEquals(map.get("request").toString(), + assertEquals( + map.get("request").toString(), "/wp-content/plugins/easy-table/themes/default/style.css?ver=1.0"); assertEquals(map.get("TIME").toString(), "01:36:30"); } @@ -319,7 +315,7 @@ public void test013_IpSet() throws Throwable { Grok grok = compiler.compile("%{IP}"); try (FileReader fr = new FileReader(Resources.getResource(ResourceManager.IP).getFile()); - BufferedReader br = new BufferedReader(fr)) { + BufferedReader br = new BufferedReader(fr)) { String line; System.out.println("Starting test with ip"); while ((line = br.readLine()) != null) { @@ -336,10 +332,31 @@ public void test014_month() { Grok grok = compiler.compile("%{MONTH}"); - String[] months = - {"Jan", "January", "Feb", "February", "Mar", "March", "Apr", "April", "May", "Jun", "June", - "Jul", "July", "Aug", "August", "Sep", "September", "Oct", "October", "Nov", - "November", "Dec", "December"}; + String[] months = { + "Jan", + "January", + "Feb", + "February", + "Mar", + "March", + "Apr", + "April", + "May", + "Jun", + "June", + "Jul", + "July", + "Aug", + "August", + "Sep", + "September", + "Oct", + "October", + "Nov", + "November", + "Dec", + "December" + }; int counter = 0; for (String month : months) { Match match = grok.match(month); @@ -355,20 +372,21 @@ public void test015_iso8601() throws GrokException { Grok grok = compiler.compile("%{TIMESTAMP_ISO8601}"); String[] times = { - "2001-01-01T00:00:00", - "1974-03-02T04:09:09", - "2010-05-03T08:18:18+00:00", - "2004-07-04T12:27:27-00:00", - "2001-09-05T16:36:36+0000", - "2001-11-06T20:45:45-0000", - "2001-12-07T23:54:54Z", - "2001-01-01T00:00:00.123456", - "1974-03-02T04:09:09.123456", - "2010-05-03T08:18:18.123456+00:00", - "2004-07-04T12:27:27.123456-00:00", - "2001-09-05T16:36:36.123456+0000", - "2001-11-06T20:45:45.123456-0000", - "2001-12-07T23:54:54.123456Z"}; + "2001-01-01T00:00:00", + "1974-03-02T04:09:09", + "2010-05-03T08:18:18+00:00", + "2004-07-04T12:27:27-00:00", + "2001-09-05T16:36:36+0000", + "2001-11-06T20:45:45-0000", + "2001-12-07T23:54:54Z", + "2001-01-01T00:00:00.123456", + "1974-03-02T04:09:09.123456", + "2010-05-03T08:18:18.123456+00:00", + "2004-07-04T12:27:27.123456-00:00", + "2001-09-05T16:36:36.123456+0000", + "2001-11-06T20:45:45.123456-0000", + "2001-12-07T23:54:54.123456Z" + }; int counter = 0; for (String time : times) { @@ -385,33 +403,34 @@ public void test016_uri() throws GrokException { Grok grok = compiler.compile("%{URI}"); String[] uris = { - "http://www.google.com", - "telnet://helloworld", - "http://www.example.com/", - "http://www.example.com/test.html", - "http://www.example.com/test.html?foo=bar", - "http://www.example.com/test.html?foo=bar&fizzle=baz", - "http://www.example.com:80/test.html?foo=bar&fizzle=baz", - "https://www.example.com:443/test.html?foo=bar&fizzle=baz", - "https://user@www.example.com:443/test.html?foo=bar&fizzle=baz", - "https://user:pass@somehost/fetch.pl", - "puppet:///", - "http://www.foo.com", - "http://www.foo.com/", - "http://www.foo.com/?testing", - "http://www.foo.com/?one=two", - "http://www.foo.com/?one=two&foo=bar", - "foo://somehost.com:12345", - "foo://user@somehost.com:12345", - "foo://user@somehost.com:12345/", - "foo://user@somehost.com:12345/foo.bar/baz/fizz", - "foo://user@somehost.com:12345/foo.bar/baz/fizz?test", - "foo://user@somehost.com:12345/foo.bar/baz/fizz?test=1&sink&foo=4", - "http://www.google.com/search?hl=en&source=hp&q=hello+world+%5E%40%23%24&btnG=Google+Search", - "http://www.freebsd.org/cgi/url.cgi?ports/sysutils/grok/pkg-descr", - "http://www.google.com/search?q=CAPTCHA+ssh&start=0&ie=utf-8&oe=utf-8&client=firefox-a" - + "&rls=org.mozilla:en-US:official", - "svn+ssh://somehost:12345/testing"}; + "http://www.google.com", + "telnet://helloworld", + "http://www.example.com/", + "http://www.example.com/test.html", + "http://www.example.com/test.html?foo=bar", + "http://www.example.com/test.html?foo=bar&fizzle=baz", + "http://www.example.com:80/test.html?foo=bar&fizzle=baz", + "https://www.example.com:443/test.html?foo=bar&fizzle=baz", + "https://user@www.example.com:443/test.html?foo=bar&fizzle=baz", + "https://user:pass@somehost/fetch.pl", + "puppet:///", + "http://www.foo.com", + "http://www.foo.com/", + "http://www.foo.com/?testing", + "http://www.foo.com/?one=two", + "http://www.foo.com/?one=two&foo=bar", + "foo://somehost.com:12345", + "foo://user@somehost.com:12345", + "foo://user@somehost.com:12345/", + "foo://user@somehost.com:12345/foo.bar/baz/fizz", + "foo://user@somehost.com:12345/foo.bar/baz/fizz?test", + "foo://user@somehost.com:12345/foo.bar/baz/fizz?test=1&sink&foo=4", + "http://www.google.com/search?hl=en&source=hp&q=hello+world+%5E%40%23%24&btnG=Google+Search", + "http://www.freebsd.org/cgi/url.cgi?ports/sysutils/grok/pkg-descr", + "http://www.google.com/search?q=CAPTCHA+ssh&start=0&ie=utf-8&oe=utf-8&client=firefox-a" + + "&rls=org.mozilla:en-US:official", + "svn+ssh://somehost:12345/testing" + }; int counter = 0; for (String uri : uris) { @@ -429,10 +448,7 @@ public void test017_nonMachingList() throws GrokException { Grok grok = compiler.compile("%{URI}"); String[] uris = { - "http://www.google.com", - "telnet://helloworld", - "", - "svn+ssh://somehost:12345/testing" + "http://www.google.com", "telnet://helloworld", "", "svn+ssh://somehost:12345/testing" }; int counter = 0; @@ -458,9 +474,7 @@ public void test018_namedOnlySimpleCase() throws GrokException { String text = "<< barfoobarfoo >>"; Match match = grok.match(text); Map map = match.capture(); - assertEquals("unable to parse: " + text, - text, - map.get("text")); + assertEquals("unable to parse: " + text, text, map.get("text")); } @Test @@ -488,9 +502,7 @@ private void testPatternRepetitions(boolean namedOnly, String pattern) throws Gr private void assertMatches(String description, Grok grok, String text) { Match match = grok.match(text); Map map = match.capture(); - assertEquals(format("%s: unable to parse '%s'", description, text), - text, - map.get("text")); + assertEquals(format("%s: unable to parse '%s'", description, text), text, map.get("text")); } @Test @@ -630,8 +642,8 @@ public void createGrokWithDefaultPatterns() throws GrokException { compiler.compile("%{USERNAME}", false); } - private void ensureAbortsWithDefinitionMissing(String pattern, String compilePattern, - boolean namedOnly) { + private void ensureAbortsWithDefinitionMissing( + String pattern, String compilePattern, boolean namedOnly) { try { compiler.compile(pattern); compiler.compile(compilePattern, namedOnly); @@ -643,10 +655,11 @@ private void ensureAbortsWithDefinitionMissing(String pattern, String compilePat @Test public void testGroupTypes() { - Grok grok = compiler.compile( - "%{HTTPDATE:timestamp;date;dd/MMM/yyyy:HH:mm:ss Z} %{USERNAME:username:text} " - + "%{IPORHOST:host}:%{POSINT:port:integer}", - true); + Grok grok = + compiler.compile( + "%{HTTPDATE:timestamp;date;dd/MMM/yyyy:HH:mm:ss Z} %{USERNAME:username:text} " + + "%{IPORHOST:host}:%{POSINT:port:integer}", + true); assertEquals(Converter.Type.DATETIME, grok.groupTypes.get("timestamp")); assertEquals(Converter.Type.STRING, grok.groupTypes.get("username")); assertEquals(Converter.Type.INT, grok.groupTypes.get("port")); @@ -667,8 +680,8 @@ public void testTimeZone() { DateTimeFormatter dtf = DateTimeFormatter.ofPattern("MM/dd/yyyy HH:mm:ss"); Grok grok = compiler.compile("%{DATESTAMP:timestamp;date;MM/dd/yyyy HH:mm:ss}", true); Instant instant = (Instant) grok.match(date).capture().get("timestamp"); - assertEquals(ZonedDateTime.parse(date, dtf.withZone(ZoneOffset.systemDefault())).toInstant(), - instant); + assertEquals( + ZonedDateTime.parse(date, dtf.withZone(ZoneOffset.systemDefault())).toInstant(), instant); // set default timezone to PST ZoneId pst = ZoneId.of("PST", ZoneId.SHORT_IDS); diff --git a/common/src/test/java/org/opensearch/sql/common/grok/MessagesTest.java b/common/src/test/java/org/opensearch/sql/common/grok/MessagesTest.java index 98cbb3aaeb..930da8caa8 100644 --- a/common/src/test/java/org/opensearch/sql/common/grok/MessagesTest.java +++ b/common/src/test/java/org/opensearch/sql/common/grok/MessagesTest.java @@ -16,7 +16,6 @@ import org.junit.Test; import org.opensearch.sql.common.grok.exception.GrokException; - public class MessagesTest { @Test @@ -26,8 +25,9 @@ public void test001_linux_messages() throws GrokException, IOException { Grok grok = compiler.compile("%{MESSAGESLOG}"); - BufferedReader br = new BufferedReader( - new FileReader(Resources.getResource(ResourceManager.MESSAGES).getFile())); + BufferedReader br = + new BufferedReader( + new FileReader(Resources.getResource(ResourceManager.MESSAGES).getFile())); String line; System.out.println("Starting test with linux messages log -- may take a while"); while ((line = br.readLine()) != null) { @@ -38,5 +38,4 @@ public void test001_linux_messages() throws GrokException, IOException { } br.close(); } - } diff --git a/common/src/test/java/org/opensearch/sql/common/grok/ResourceManager.java b/common/src/test/java/org/opensearch/sql/common/grok/ResourceManager.java index a13a72cd00..fba64b59d3 100644 --- a/common/src/test/java/org/opensearch/sql/common/grok/ResourceManager.java +++ b/common/src/test/java/org/opensearch/sql/common/grok/ResourceManager.java @@ -5,9 +5,7 @@ package org.opensearch.sql.common.grok; -/** - * {@code ResourceManager} . - */ +/** {@code ResourceManager} . */ public final class ResourceManager { public static final String PATTERNS = "patterns/patterns"; diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/auth/AuthenticationType.java b/datasources/src/main/java/org/opensearch/sql/datasources/auth/AuthenticationType.java index 715e72c0c3..b6581608bf 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/auth/AuthenticationType.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/auth/AuthenticationType.java @@ -12,8 +12,8 @@ import java.util.Map; public enum AuthenticationType { - - BASICAUTH("basicauth"), AWSSIGV4AUTH("awssigv4"); + BASICAUTH("basicauth"), + AWSSIGV4AUTH("awssigv4"); private String name; diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/auth/DataSourceUserAuthorizationHelper.java b/datasources/src/main/java/org/opensearch/sql/datasources/auth/DataSourceUserAuthorizationHelper.java index adcfb0bdfd..75d0ec8539 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/auth/DataSourceUserAuthorizationHelper.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/auth/DataSourceUserAuthorizationHelper.java @@ -8,9 +8,8 @@ import org.opensearch.sql.datasource.model.DataSourceMetadata; /** - * Interface for datasource authorization helper. - * The implementation of this class helps in determining - * if authorization is required and the roles associated with the user. + * Interface for datasource authorization helper. The implementation of this class helps in + * determining if authorization is required and the roles associated with the user. */ public interface DataSourceUserAuthorizationHelper { diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/auth/DataSourceUserAuthorizationHelperImpl.java b/datasources/src/main/java/org/opensearch/sql/datasources/auth/DataSourceUserAuthorizationHelperImpl.java index cd55991d00..67d747f0bf 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/auth/DataSourceUserAuthorizationHelperImpl.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/auth/DataSourceUserAuthorizationHelperImpl.java @@ -19,36 +19,39 @@ public class DataSourceUserAuthorizationHelperImpl implements DataSourceUserAuth private final Client client; private Boolean isAuthorizationRequired() { - String userString = client.threadPool() - .getThreadContext().getTransient( - ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT); + String userString = + client + .threadPool() + .getThreadContext() + .getTransient(ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT); return userString != null; } private List getUserRoles() { - String userString = client.threadPool() - .getThreadContext().getTransient( - ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT); + String userString = + client + .threadPool() + .getThreadContext() + .getTransient(ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT); User user = User.parse(userString); return user.getRoles(); } - @Override public void authorizeDataSource(DataSourceMetadata dataSourceMetadata) { if (isAuthorizationRequired() && !dataSourceMetadata.getName().equals(DEFAULT_DATASOURCE_NAME)) { boolean isAuthorized = false; for (String role : getUserRoles()) { - if (dataSourceMetadata.getAllowedRoles().contains(role) - || role.equals("all_access")) { + if (dataSourceMetadata.getAllowedRoles().contains(role) || role.equals("all_access")) { isAuthorized = true; break; } } if (!isAuthorized) { throw new SecurityException( - String.format("User is not authorized to access datasource %s. " + String.format( + "User is not authorized to access datasource %s. " + "User should be mapped to any of the roles in %s for access.", dataSourceMetadata.getName(), dataSourceMetadata.getAllowedRoles().toString())); } diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/encryptor/Encryptor.java b/datasources/src/main/java/org/opensearch/sql/datasources/encryptor/Encryptor.java index 578b66d0ba..4572b45f53 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/encryptor/Encryptor.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/encryptor/Encryptor.java @@ -24,5 +24,4 @@ public interface Encryptor { * @return String plainText. */ String decrypt(String encryptedText); - } diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/encryptor/EncryptorImpl.java b/datasources/src/main/java/org/opensearch/sql/datasources/encryptor/EncryptorImpl.java index 98f693eca1..c6abe78394 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/encryptor/EncryptorImpl.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/encryptor/EncryptorImpl.java @@ -25,32 +25,40 @@ public class EncryptorImpl implements Encryptor { @Override public String encrypt(String plainText) { validate(masterKey); - final AwsCrypto crypto = AwsCrypto.builder() - .withCommitmentPolicy(CommitmentPolicy.RequireEncryptRequireDecrypt) - .build(); + final AwsCrypto crypto = + AwsCrypto.builder() + .withCommitmentPolicy(CommitmentPolicy.RequireEncryptRequireDecrypt) + .build(); - JceMasterKey jceMasterKey - = JceMasterKey.getInstance(new SecretKeySpec(masterKey.getBytes(), "AES"), "Custom", - "opensearch.config.master.key", "AES/GCM/NoPadding"); + JceMasterKey jceMasterKey = + JceMasterKey.getInstance( + new SecretKeySpec(masterKey.getBytes(), "AES"), + "Custom", + "opensearch.config.master.key", + "AES/GCM/NoPadding"); - final CryptoResult encryptResult = crypto.encryptData(jceMasterKey, - plainText.getBytes(StandardCharsets.UTF_8)); + final CryptoResult encryptResult = + crypto.encryptData(jceMasterKey, plainText.getBytes(StandardCharsets.UTF_8)); return Base64.getEncoder().encodeToString(encryptResult.getResult()); } @Override public String decrypt(String encryptedText) { validate(masterKey); - final AwsCrypto crypto = AwsCrypto.builder() - .withCommitmentPolicy(CommitmentPolicy.RequireEncryptRequireDecrypt) - .build(); + final AwsCrypto crypto = + AwsCrypto.builder() + .withCommitmentPolicy(CommitmentPolicy.RequireEncryptRequireDecrypt) + .build(); - JceMasterKey jceMasterKey - = JceMasterKey.getInstance(new SecretKeySpec(masterKey.getBytes(), "AES"), "Custom", - "opensearch.config.master.key", "AES/GCM/NoPadding"); + JceMasterKey jceMasterKey = + JceMasterKey.getInstance( + new SecretKeySpec(masterKey.getBytes(), "AES"), + "Custom", + "opensearch.config.master.key", + "AES/GCM/NoPadding"); - final CryptoResult decryptedResult - = crypto.decryptData(jceMasterKey, Base64.getDecoder().decode(encryptedText)); + final CryptoResult decryptedResult = + crypto.decryptData(jceMasterKey, Base64.getDecoder().decode(encryptedText)); return new String(decryptedResult.getResult()); } @@ -65,6 +73,4 @@ private void validate(String masterKey) { + "admin/datasources.rst#master-key-config-for-encrypting-credential-information"); } } - - } diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/exceptions/DataSourceNotFoundException.java b/datasources/src/main/java/org/opensearch/sql/datasources/exceptions/DataSourceNotFoundException.java index 484b0b92b2..40b601000c 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/exceptions/DataSourceNotFoundException.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/exceptions/DataSourceNotFoundException.java @@ -7,12 +7,9 @@ package org.opensearch.sql.datasources.exceptions; -/** - * DataSourceNotFoundException. - */ +/** DataSourceNotFoundException. */ public class DataSourceNotFoundException extends RuntimeException { public DataSourceNotFoundException(String message) { super(message); } - } diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/exceptions/ErrorMessage.java b/datasources/src/main/java/org/opensearch/sql/datasources/exceptions/ErrorMessage.java index 265b3ddf31..386eb780cd 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/exceptions/ErrorMessage.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/exceptions/ErrorMessage.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.datasources.exceptions; import com.google.gson.Gson; @@ -12,27 +11,20 @@ import lombok.Getter; import org.opensearch.core.rest.RestStatus; -/** - * Error Message. - */ +/** Error Message. */ public class ErrorMessage { protected Throwable exception; private final int status; - @Getter - private final String type; + @Getter private final String type; - @Getter - private final String reason; + @Getter private final String reason; - @Getter - private final String details; + @Getter private final String details; - /** - * Error Message Constructor. - */ + /** Error Message Constructor. */ public ErrorMessage(Throwable exception, int status) { this.exception = exception; this.status = status; diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/CreateDataSourceActionRequest.java b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/CreateDataSourceActionRequest.java index 0cbb2355ca..b01d5b40dd 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/CreateDataSourceActionRequest.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/CreateDataSourceActionRequest.java @@ -7,7 +7,6 @@ package org.opensearch.sql.datasources.model.transport; - import static org.opensearch.sql.analysis.DataSourceSchemaIdentifierNameResolver.DEFAULT_DATASOURCE_NAME; import java.io.IOException; @@ -17,15 +16,11 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.sql.datasource.model.DataSourceMetadata; -public class CreateDataSourceActionRequest - extends ActionRequest { +public class CreateDataSourceActionRequest extends ActionRequest { - @Getter - private DataSourceMetadata dataSourceMetadata; + @Getter private DataSourceMetadata dataSourceMetadata; - /** - * Constructor of CreateDataSourceActionRequest from StreamInput. - */ + /** Constructor of CreateDataSourceActionRequest from StreamInput. */ public CreateDataSourceActionRequest(StreamInput in) throws IOException { super(in); } @@ -38,9 +33,8 @@ public CreateDataSourceActionRequest(DataSourceMetadata dataSourceMetadata) { public ActionRequestValidationException validate() { if (this.dataSourceMetadata.getName().equals(DEFAULT_DATASOURCE_NAME)) { ActionRequestValidationException exception = new ActionRequestValidationException(); - exception - .addValidationError( - "Not allowed to create datasource with name : " + DEFAULT_DATASOURCE_NAME); + exception.addValidationError( + "Not allowed to create datasource with name : " + DEFAULT_DATASOURCE_NAME); return exception; } else { return null; diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/CreateDataSourceActionResponse.java b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/CreateDataSourceActionResponse.java index 4ed0464a25..aeb1e2d3d9 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/CreateDataSourceActionResponse.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/CreateDataSourceActionResponse.java @@ -15,11 +15,9 @@ import org.opensearch.core.common.io.stream.StreamOutput; @RequiredArgsConstructor -public class CreateDataSourceActionResponse - extends ActionResponse { +public class CreateDataSourceActionResponse extends ActionResponse { - @Getter - private final String result; + @Getter private final String result; public CreateDataSourceActionResponse(StreamInput in) throws IOException { super(in); diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/DeleteDataSourceActionRequest.java b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/DeleteDataSourceActionRequest.java index 1eb2d17bff..d6e3bcb3f9 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/DeleteDataSourceActionRequest.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/DeleteDataSourceActionRequest.java @@ -18,8 +18,7 @@ public class DeleteDataSourceActionRequest extends ActionRequest { - @Getter - private String dataSourceName; + @Getter private String dataSourceName; /** Constructor of DeleteDataSourceActionRequest from StreamInput. */ public DeleteDataSourceActionRequest(StreamInput in) throws IOException { @@ -34,18 +33,15 @@ public DeleteDataSourceActionRequest(String dataSourceName) { public ActionRequestValidationException validate() { if (StringUtils.isEmpty(this.dataSourceName)) { ActionRequestValidationException exception = new ActionRequestValidationException(); - exception - .addValidationError("Datasource Name cannot be empty or null"); + exception.addValidationError("Datasource Name cannot be empty or null"); return exception; } else if (this.dataSourceName.equals(DEFAULT_DATASOURCE_NAME)) { ActionRequestValidationException exception = new ActionRequestValidationException(); - exception - .addValidationError( - "Not allowed to delete datasource with name : " + DEFAULT_DATASOURCE_NAME); + exception.addValidationError( + "Not allowed to delete datasource with name : " + DEFAULT_DATASOURCE_NAME); return exception; } else { return null; } } - } diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/DeleteDataSourceActionResponse.java b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/DeleteDataSourceActionResponse.java index ec57c4aee7..d8c29c2a67 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/DeleteDataSourceActionResponse.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/DeleteDataSourceActionResponse.java @@ -17,8 +17,7 @@ @RequiredArgsConstructor public class DeleteDataSourceActionResponse extends ActionResponse { - @Getter - private final String result; + @Getter private final String result; public DeleteDataSourceActionResponse(StreamInput in) throws IOException { super(in); @@ -29,5 +28,4 @@ public DeleteDataSourceActionResponse(StreamInput in) throws IOException { public void writeTo(StreamOutput streamOutput) throws IOException { streamOutput.writeString(result); } - } diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/GetDataSourceActionRequest.java b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/GetDataSourceActionRequest.java index 23f4898543..2d9a4de35a 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/GetDataSourceActionRequest.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/GetDataSourceActionRequest.java @@ -19,12 +19,9 @@ @NoArgsConstructor public class GetDataSourceActionRequest extends ActionRequest { - @Getter - private String dataSourceName; + @Getter private String dataSourceName; - /** - * Constructor of GetDataSourceActionRequest from StreamInput. - */ + /** Constructor of GetDataSourceActionRequest from StreamInput. */ public GetDataSourceActionRequest(StreamInput in) throws IOException { super(in); } @@ -37,13 +34,11 @@ public GetDataSourceActionRequest(String dataSourceName) { public ActionRequestValidationException validate() { if (this.dataSourceName != null && this.dataSourceName.equals(DEFAULT_DATASOURCE_NAME)) { ActionRequestValidationException exception = new ActionRequestValidationException(); - exception - .addValidationError( - "Not allowed to fetch datasource with name : " + DEFAULT_DATASOURCE_NAME); + exception.addValidationError( + "Not allowed to fetch datasource with name : " + DEFAULT_DATASOURCE_NAME); return exception; } else { return null; } } - } diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/GetDataSourceActionResponse.java b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/GetDataSourceActionResponse.java index dccb3e9b52..ac8d5d4c62 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/GetDataSourceActionResponse.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/GetDataSourceActionResponse.java @@ -17,8 +17,7 @@ @RequiredArgsConstructor public class GetDataSourceActionResponse extends ActionResponse { - @Getter - private final String result; + @Getter private final String result; public GetDataSourceActionResponse(StreamInput in) throws IOException { super(in); @@ -29,5 +28,4 @@ public GetDataSourceActionResponse(StreamInput in) throws IOException { public void writeTo(StreamOutput streamOutput) throws IOException { streamOutput.writeString(result); } - } diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/UpdateDataSourceActionRequest.java b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/UpdateDataSourceActionRequest.java index 11bc2d1e20..b502f348e2 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/UpdateDataSourceActionRequest.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/UpdateDataSourceActionRequest.java @@ -7,7 +7,6 @@ package org.opensearch.sql.datasources.model.transport; - import static org.opensearch.sql.analysis.DataSourceSchemaIdentifierNameResolver.DEFAULT_DATASOURCE_NAME; import java.io.IOException; @@ -17,11 +16,9 @@ import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.sql.datasource.model.DataSourceMetadata; -public class UpdateDataSourceActionRequest - extends ActionRequest { +public class UpdateDataSourceActionRequest extends ActionRequest { - @Getter - private DataSourceMetadata dataSourceMetadata; + @Getter private DataSourceMetadata dataSourceMetadata; /** Constructor of UpdateDataSourceActionRequest from StreamInput. */ public UpdateDataSourceActionRequest(StreamInput in) throws IOException { @@ -36,9 +33,8 @@ public UpdateDataSourceActionRequest(DataSourceMetadata dataSourceMetadata) { public ActionRequestValidationException validate() { if (this.dataSourceMetadata.getName().equals(DEFAULT_DATASOURCE_NAME)) { ActionRequestValidationException exception = new ActionRequestValidationException(); - exception - .addValidationError( - "Not allowed to update datasource with name : " + DEFAULT_DATASOURCE_NAME); + exception.addValidationError( + "Not allowed to update datasource with name : " + DEFAULT_DATASOURCE_NAME); return exception; } else { return null; diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/UpdateDataSourceActionResponse.java b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/UpdateDataSourceActionResponse.java index 88e8c41ea9..0be992d067 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/UpdateDataSourceActionResponse.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/model/transport/UpdateDataSourceActionResponse.java @@ -15,11 +15,9 @@ import org.opensearch.core.common.io.stream.StreamOutput; @RequiredArgsConstructor -public class UpdateDataSourceActionResponse - extends ActionResponse { +public class UpdateDataSourceActionResponse extends ActionResponse { - @Getter - private final String result; + @Getter private final String result; public UpdateDataSourceActionResponse(StreamInput in) throws IOException { super(in); diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/rest/RestDataSourceQueryAction.java b/datasources/src/main/java/org/opensearch/sql/datasources/rest/RestDataSourceQueryAction.java index 15735b945a..b5929d0f20 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/rest/RestDataSourceQueryAction.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/rest/RestDataSourceQueryAction.java @@ -47,7 +47,6 @@ import org.opensearch.sql.datasources.utils.Scheduler; import org.opensearch.sql.datasources.utils.XContentParserUtils; - public class RestDataSourceQueryAction extends BaseRestHandler { public static final String DATASOURCE_ACTIONS = "datasource_actions"; @@ -83,8 +82,9 @@ public List routes() { * Response body: * Ref [org.opensearch.sql.plugin.transport.datasource.model.GetDataSourceActionResponse] */ - new Route(GET, String.format(Locale.ROOT, "%s/{%s}", - BASE_DATASOURCE_ACTION_URL, "dataSourceName")), + new Route( + GET, + String.format(Locale.ROOT, "%s/{%s}", BASE_DATASOURCE_ACTION_URL, "dataSourceName")), new Route(GET, BASE_DATASOURCE_ACTION_URL), /* @@ -107,9 +107,9 @@ public List routes() { * Response body: Ref * [org.opensearch.sql.plugin.transport.datasource.model.DeleteDataSourceActionResponse] */ - new Route(DELETE, String.format(Locale.ROOT, "%s/{%s}", - BASE_DATASOURCE_ACTION_URL, "dataSourceName")) - ); + new Route( + DELETE, + String.format(Locale.ROOT, "%s/{%s}", BASE_DATASOURCE_ACTION_URL, "dataSourceName"))); } @Override @@ -125,101 +125,125 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient case DELETE: return executeDeleteRequest(restRequest, nodeClient); default: - return restChannel - -> restChannel.sendResponse(new BytesRestResponse(RestStatus.METHOD_NOT_ALLOWED, - String.valueOf(restRequest.method()))); + return restChannel -> + restChannel.sendResponse( + new BytesRestResponse( + RestStatus.METHOD_NOT_ALLOWED, String.valueOf(restRequest.method()))); } } - private RestChannelConsumer executePostRequest(RestRequest restRequest, - NodeClient nodeClient) throws IOException { - - DataSourceMetadata dataSourceMetadata - = XContentParserUtils.toDataSourceMetadata(restRequest.contentParser()); - return restChannel -> Scheduler.schedule(nodeClient, - () -> nodeClient.execute(TransportCreateDataSourceAction.ACTION_TYPE, - new CreateDataSourceActionRequest(dataSourceMetadata), - new ActionListener<>() { - @Override - public void onResponse( - CreateDataSourceActionResponse createDataSourceActionResponse) { - restChannel.sendResponse( - new BytesRestResponse(RestStatus.CREATED, "application/json; charset=UTF-8", - createDataSourceActionResponse.getResult())); - } - - @Override - public void onFailure(Exception e) { - handleException(e, restChannel); - } - })); + private RestChannelConsumer executePostRequest(RestRequest restRequest, NodeClient nodeClient) + throws IOException { + + DataSourceMetadata dataSourceMetadata = + XContentParserUtils.toDataSourceMetadata(restRequest.contentParser()); + return restChannel -> + Scheduler.schedule( + nodeClient, + () -> + nodeClient.execute( + TransportCreateDataSourceAction.ACTION_TYPE, + new CreateDataSourceActionRequest(dataSourceMetadata), + new ActionListener<>() { + @Override + public void onResponse( + CreateDataSourceActionResponse createDataSourceActionResponse) { + restChannel.sendResponse( + new BytesRestResponse( + RestStatus.CREATED, + "application/json; charset=UTF-8", + createDataSourceActionResponse.getResult())); + } + + @Override + public void onFailure(Exception e) { + handleException(e, restChannel); + } + })); } - private RestChannelConsumer executeGetRequest(RestRequest restRequest, - NodeClient nodeClient) { + private RestChannelConsumer executeGetRequest(RestRequest restRequest, NodeClient nodeClient) { String dataSourceName = restRequest.param("dataSourceName"); - return restChannel -> Scheduler.schedule(nodeClient, - () -> nodeClient.execute(TransportGetDataSourceAction.ACTION_TYPE, - new GetDataSourceActionRequest(dataSourceName), - new ActionListener<>() { - @Override - public void onResponse(GetDataSourceActionResponse getDataSourceActionResponse) { - restChannel.sendResponse( - new BytesRestResponse(RestStatus.OK, "application/json; charset=UTF-8", - getDataSourceActionResponse.getResult())); - } - - @Override - public void onFailure(Exception e) { - handleException(e, restChannel); - } - })); + return restChannel -> + Scheduler.schedule( + nodeClient, + () -> + nodeClient.execute( + TransportGetDataSourceAction.ACTION_TYPE, + new GetDataSourceActionRequest(dataSourceName), + new ActionListener<>() { + @Override + public void onResponse( + GetDataSourceActionResponse getDataSourceActionResponse) { + restChannel.sendResponse( + new BytesRestResponse( + RestStatus.OK, + "application/json; charset=UTF-8", + getDataSourceActionResponse.getResult())); + } + + @Override + public void onFailure(Exception e) { + handleException(e, restChannel); + } + })); } - private RestChannelConsumer executeUpdateRequest(RestRequest restRequest, - NodeClient nodeClient) throws IOException { - DataSourceMetadata dataSourceMetadata - = XContentParserUtils.toDataSourceMetadata(restRequest.contentParser()); - return restChannel -> Scheduler.schedule(nodeClient, - () -> nodeClient.execute(TransportUpdateDataSourceAction.ACTION_TYPE, - new UpdateDataSourceActionRequest(dataSourceMetadata), - new ActionListener<>() { - @Override - public void onResponse( - UpdateDataSourceActionResponse updateDataSourceActionResponse) { - restChannel.sendResponse( - new BytesRestResponse(RestStatus.OK, "application/json; charset=UTF-8", - updateDataSourceActionResponse.getResult())); - } - - @Override - public void onFailure(Exception e) { - handleException(e, restChannel); - } - })); + private RestChannelConsumer executeUpdateRequest(RestRequest restRequest, NodeClient nodeClient) + throws IOException { + DataSourceMetadata dataSourceMetadata = + XContentParserUtils.toDataSourceMetadata(restRequest.contentParser()); + return restChannel -> + Scheduler.schedule( + nodeClient, + () -> + nodeClient.execute( + TransportUpdateDataSourceAction.ACTION_TYPE, + new UpdateDataSourceActionRequest(dataSourceMetadata), + new ActionListener<>() { + @Override + public void onResponse( + UpdateDataSourceActionResponse updateDataSourceActionResponse) { + restChannel.sendResponse( + new BytesRestResponse( + RestStatus.OK, + "application/json; charset=UTF-8", + updateDataSourceActionResponse.getResult())); + } + + @Override + public void onFailure(Exception e) { + handleException(e, restChannel); + } + })); } - private RestChannelConsumer executeDeleteRequest(RestRequest restRequest, - NodeClient nodeClient) { + private RestChannelConsumer executeDeleteRequest(RestRequest restRequest, NodeClient nodeClient) { String dataSourceName = restRequest.param("dataSourceName"); - return restChannel -> Scheduler.schedule(nodeClient, - () -> nodeClient.execute(TransportDeleteDataSourceAction.ACTION_TYPE, - new DeleteDataSourceActionRequest(dataSourceName), - new ActionListener<>() { - @Override - public void onResponse( - DeleteDataSourceActionResponse deleteDataSourceActionResponse) { - restChannel.sendResponse( - new BytesRestResponse(RestStatus.NO_CONTENT, "application/json; charset=UTF-8", - deleteDataSourceActionResponse.getResult())); - } - - @Override - public void onFailure(Exception e) { - handleException(e, restChannel); - } - })); + return restChannel -> + Scheduler.schedule( + nodeClient, + () -> + nodeClient.execute( + TransportDeleteDataSourceAction.ACTION_TYPE, + new DeleteDataSourceActionRequest(dataSourceName), + new ActionListener<>() { + @Override + public void onResponse( + DeleteDataSourceActionResponse deleteDataSourceActionResponse) { + restChannel.sendResponse( + new BytesRestResponse( + RestStatus.NO_CONTENT, + "application/json; charset=UTF-8", + deleteDataSourceActionResponse.getResult())); + } + + @Override + public void onFailure(Exception e) { + handleException(e, restChannel); + } + })); } private void handleException(Exception e, RestChannel restChannel) { @@ -240,8 +264,7 @@ private void handleException(Exception e, RestChannel restChannel) { private void reportError(final RestChannel channel, final Exception e, final RestStatus status) { channel.sendResponse( - new BytesRestResponse( - status, new ErrorMessage(e, status.getStatus()).toString())); + new BytesRestResponse(status, new ErrorMessage(e, status.getStatus()).toString())); } private static boolean isClientError(Exception e) { @@ -250,5 +273,4 @@ private static boolean isClientError(Exception e) { || e instanceof IllegalArgumentException || e instanceof IllegalStateException; } - } diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/service/DataSourceLoaderCache.java b/datasources/src/main/java/org/opensearch/sql/datasources/service/DataSourceLoaderCache.java index 3fe2954c12..dbcc321b3f 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/service/DataSourceLoaderCache.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/service/DataSourceLoaderCache.java @@ -4,8 +4,8 @@ import org.opensearch.sql.datasource.model.DataSourceMetadata; /** - * Interface for DataSourceLoaderCache which provides methods for - * fetch, loading and invalidating DataSource cache. + * Interface for DataSourceLoaderCache which provides methods for fetch, loading and invalidating + * DataSource cache. */ public interface DataSourceLoaderCache { @@ -16,5 +16,4 @@ public interface DataSourceLoaderCache { * @return {@link DataSource} */ DataSource getOrLoadDataSource(DataSourceMetadata dataSourceMetadata); - } diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/service/DataSourceLoaderCacheImpl.java b/datasources/src/main/java/org/opensearch/sql/datasources/service/DataSourceLoaderCacheImpl.java index ba9520fc0c..44454dbd38 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/service/DataSourceLoaderCacheImpl.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/service/DataSourceLoaderCacheImpl.java @@ -12,10 +12,9 @@ import org.opensearch.sql.storage.DataSourceFactory; /** - * Default implementation of DataSourceLoaderCache. This implementation - * utilizes Google Guava Cache {@link Cache} for caching DataSource objects - * against {@link DataSourceMetadata}. Expires the cache objects every 24 hrs after - * the last access. + * Default implementation of DataSourceLoaderCache. This implementation utilizes Google Guava Cache + * {@link Cache} for caching DataSource objects against {@link DataSourceMetadata}. Expires the + * cache objects every 24 hrs after the last access. */ public class DataSourceLoaderCacheImpl implements DataSourceLoaderCache { private final Map dataSourceFactoryMap; @@ -27,24 +26,24 @@ public class DataSourceLoaderCacheImpl implements DataSourceLoaderCache { * @param dataSourceFactorySet set of {@link DataSourceFactory}. */ public DataSourceLoaderCacheImpl(Set dataSourceFactorySet) { - this.dataSourceFactoryMap = dataSourceFactorySet.stream() - .collect(Collectors.toMap(DataSourceFactory::getDataSourceType, f -> f)); - this.dataSourceCache = CacheBuilder.newBuilder() - .maximumSize(1000) - .expireAfterAccess(24, TimeUnit.HOURS) - .build(); + this.dataSourceFactoryMap = + dataSourceFactorySet.stream() + .collect(Collectors.toMap(DataSourceFactory::getDataSourceType, f -> f)); + this.dataSourceCache = + CacheBuilder.newBuilder().maximumSize(1000).expireAfterAccess(24, TimeUnit.HOURS).build(); } @Override public DataSource getOrLoadDataSource(DataSourceMetadata dataSourceMetadata) { DataSource dataSource = this.dataSourceCache.getIfPresent(dataSourceMetadata); if (dataSource == null) { - dataSource = this.dataSourceFactoryMap.get(dataSourceMetadata.getConnector()) - .createDataSource(dataSourceMetadata); + dataSource = + this.dataSourceFactoryMap + .get(dataSourceMetadata.getConnector()) + .createDataSource(dataSourceMetadata); this.dataSourceCache.put(dataSourceMetadata, dataSource); return dataSource; } return dataSource; } - } diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/service/DataSourceMetadataStorage.java b/datasources/src/main/java/org/opensearch/sql/datasources/service/DataSourceMetadataStorage.java index e6483900c6..4d59c68fa0 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/service/DataSourceMetadataStorage.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/service/DataSourceMetadataStorage.java @@ -13,29 +13,26 @@ import org.opensearch.sql.datasource.model.DataSourceMetadata; /** - * Interface for DataSourceMetadata Storage - * which will be only used by DataSourceService for Storage. + * Interface for DataSourceMetadata Storage which will be only used by DataSourceService for + * Storage. */ public interface DataSourceMetadataStorage { /** - * Returns all dataSource Metadata objects. The returned objects won't contain - * any of the credential info. + * Returns all dataSource Metadata objects. The returned objects won't contain any of the + * credential info. * * @return list of {@link DataSourceMetadata}. */ List getDataSourceMetadata(); - /** - * Gets {@link DataSourceMetadata} corresponding to the - * datasourceName from underlying storage. + * Gets {@link DataSourceMetadata} corresponding to the datasourceName from underlying storage. * * @param datasourceName name of the {@link DataSource}. */ Optional getDataSourceMetadata(String datasourceName); - /** * Stores {@link DataSourceMetadata} in underlying storage. * @@ -43,7 +40,6 @@ public interface DataSourceMetadataStorage { */ void createDataSourceMetadata(DataSourceMetadata dataSourceMetadata); - /** * Updates {@link DataSourceMetadata} in underlying storage. * @@ -51,13 +47,10 @@ public interface DataSourceMetadataStorage { */ void updateDataSourceMetadata(DataSourceMetadata dataSourceMetadata); - /** - * Deletes {@link DataSourceMetadata} corresponding to the - * datasourceName from underlying storage. + * Deletes {@link DataSourceMetadata} corresponding to the datasourceName from underlying storage. * * @param datasourceName name of the {@link DataSource}. */ void deleteDataSourceMetadata(String datasourceName); - } diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/service/DataSourceServiceImpl.java b/datasources/src/main/java/org/opensearch/sql/datasources/service/DataSourceServiceImpl.java index 86afa90c2b..2ac480bbf2 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/service/DataSourceServiceImpl.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/service/DataSourceServiceImpl.java @@ -41,13 +41,11 @@ public class DataSourceServiceImpl implements DataSourceService { private final DataSourceUserAuthorizationHelper dataSourceUserAuthorizationHelper; - /** - * Construct from the set of {@link DataSourceFactory} at bootstrap time. - */ - public DataSourceServiceImpl(Set dataSourceFactories, - DataSourceMetadataStorage dataSourceMetadataStorage, - DataSourceUserAuthorizationHelper - dataSourceUserAuthorizationHelper) { + /** Construct from the set of {@link DataSourceFactory} at bootstrap time. */ + public DataSourceServiceImpl( + Set dataSourceFactories, + DataSourceMetadataStorage dataSourceMetadataStorage, + DataSourceUserAuthorizationHelper dataSourceUserAuthorizationHelper) { this.dataSourceMetadataStorage = dataSourceMetadataStorage; this.dataSourceUserAuthorizationHelper = dataSourceUserAuthorizationHelper; this.dataSourceLoaderCache = new DataSourceLoaderCacheImpl(dataSourceFactories); @@ -55,8 +53,8 @@ public DataSourceServiceImpl(Set dataSourceFactories, @Override public Set getDataSourceMetadata(boolean isDefaultDataSourceRequired) { - List dataSourceMetadataList - = this.dataSourceMetadataStorage.getDataSourceMetadata(); + List dataSourceMetadataList = + this.dataSourceMetadataStorage.getDataSourceMetadata(); Set dataSourceMetadataSet = new HashSet<>(dataSourceMetadataList); if (isDefaultDataSourceRequired) { dataSourceMetadataSet.add(DataSourceMetadata.defaultOpenSearchDataSourceMetadata()); @@ -67,28 +65,26 @@ public Set getDataSourceMetadata(boolean isDefaultDataSource @Override public DataSourceMetadata getDataSourceMetadata(String datasourceName) { - Optional dataSourceMetadataOptional - = getDataSourceMetadataFromName(datasourceName); + Optional dataSourceMetadataOptional = + getDataSourceMetadataFromName(datasourceName); if (dataSourceMetadataOptional.isEmpty()) { - throw new IllegalArgumentException("DataSource with name: " + datasourceName - + " doesn't exist."); + throw new IllegalArgumentException( + "DataSource with name: " + datasourceName + " doesn't exist."); } removeAuthInfo(dataSourceMetadataOptional.get()); return dataSourceMetadataOptional.get(); } - @Override public DataSource getDataSource(String dataSourceName) { - Optional - dataSourceMetadataOptional = getDataSourceMetadataFromName(dataSourceName); + Optional dataSourceMetadataOptional = + getDataSourceMetadataFromName(dataSourceName); if (dataSourceMetadataOptional.isEmpty()) { throw new DataSourceNotFoundException( String.format("DataSource with name %s doesn't exist.", dataSourceName)); } else { DataSourceMetadata dataSourceMetadata = dataSourceMetadataOptional.get(); - this.dataSourceUserAuthorizationHelper - .authorizeDataSource(dataSourceMetadata); + this.dataSourceUserAuthorizationHelper.authorizeDataSource(dataSourceMetadata); return dataSourceLoaderCache.getOrLoadDataSource(dataSourceMetadata); } } @@ -130,7 +126,6 @@ public Boolean dataSourceExists(String dataSourceName) { || this.dataSourceMetadataStorage.getDataSourceMetadata(dataSourceName).isPresent(); } - /** * This can be moved to a different validator class when we introduce more connectors. * @@ -159,7 +154,6 @@ private Optional getDataSourceMetadataFromName(String dataSo } } - // It is advised to avoid sending any kind credential // info in api response from security point of view. private void removeAuthInfo(Set dataSourceMetadataSet) { @@ -167,11 +161,8 @@ private void removeAuthInfo(Set dataSourceMetadataSet) { } private void removeAuthInfo(DataSourceMetadata dataSourceMetadata) { - HashMap safeProperties - = new HashMap<>(dataSourceMetadata.getProperties()); - safeProperties - .entrySet() - .removeIf(entry -> entry.getKey().contains("auth")); + HashMap safeProperties = new HashMap<>(dataSourceMetadata.getProperties()); + safeProperties.entrySet().removeIf(entry -> entry.getKey().contains("auth")); dataSourceMetadata.setProperties(safeProperties); } } diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/storage/OpenSearchDataSourceMetadataStorage.java b/datasources/src/main/java/org/opensearch/sql/datasources/storage/OpenSearchDataSourceMetadataStorage.java index e2927a4b0c..4eb16924c4 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/storage/OpenSearchDataSourceMetadataStorage.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/storage/OpenSearchDataSourceMetadataStorage.java @@ -55,8 +55,8 @@ public class OpenSearchDataSourceMetadataStorage implements DataSourceMetadataSt private static final String DATASOURCE_INDEX_MAPPING_FILE_NAME = "datasources-index-mapping.yml"; private static final Integer DATASOURCE_QUERY_RESULT_SIZE = 10000; - private static final String DATASOURCE_INDEX_SETTINGS_FILE_NAME - = "datasources-index-settings.yml"; + private static final String DATASOURCE_INDEX_SETTINGS_FILE_NAME = + "datasources-index-settings.yml"; private static final Logger LOG = LogManager.getLogger(); private final Client client; private final ClusterService clusterService; @@ -64,15 +64,15 @@ public class OpenSearchDataSourceMetadataStorage implements DataSourceMetadataSt private final Encryptor encryptor; /** - * This class implements DataSourceMetadataStorage interface - * using OpenSearch as underlying storage. + * This class implements DataSourceMetadataStorage interface using OpenSearch as underlying + * storage. * - * @param client opensearch NodeClient. + * @param client opensearch NodeClient. * @param clusterService ClusterService. - * @param encryptor Encryptor. + * @param encryptor Encryptor. */ - public OpenSearchDataSourceMetadataStorage(Client client, ClusterService clusterService, - Encryptor encryptor) { + public OpenSearchDataSourceMetadataStorage( + Client client, ClusterService clusterService, Encryptor encryptor) { this.client = client; this.clusterService = clusterService; this.encryptor = encryptor; @@ -93,8 +93,7 @@ public Optional getDataSourceMetadata(String datasourceName) createDataSourcesIndex(); return Optional.empty(); } - return searchInDataSourcesIndex(QueryBuilders.termQuery("name", datasourceName)) - .stream() + return searchInDataSourcesIndex(QueryBuilders.termQuery("name", datasourceName)).stream() .findFirst() .map(x -> this.encryptDecryptAuthenticationData(x, false)); } @@ -111,14 +110,14 @@ public void createDataSourceMetadata(DataSourceMetadata dataSourceMetadata) { indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); ActionFuture indexResponseActionFuture; IndexResponse indexResponse; - try (ThreadContext.StoredContext storedContext = client.threadPool().getThreadContext() - .stashContext()) { + try (ThreadContext.StoredContext storedContext = + client.threadPool().getThreadContext().stashContext()) { indexRequest.source(XContentParserUtils.convertToXContent(dataSourceMetadata)); indexResponseActionFuture = client.index(indexRequest); indexResponse = indexResponseActionFuture.actionGet(); } catch (VersionConflictEngineException exception) { - throw new IllegalArgumentException("A datasource already exists with name: " - + dataSourceMetadata.getName()); + throw new IllegalArgumentException( + "A datasource already exists with name: " + dataSourceMetadata.getName()); } catch (Exception e) { throw new RuntimeException(e); } @@ -126,27 +125,27 @@ public void createDataSourceMetadata(DataSourceMetadata dataSourceMetadata) { if (indexResponse.getResult().equals(DocWriteResponse.Result.CREATED)) { LOG.debug("DatasourceMetadata : {} successfully created", dataSourceMetadata.getName()); } else { - throw new RuntimeException("Saving dataSource metadata information failed with result : " - + indexResponse.getResult().getLowercase()); + throw new RuntimeException( + "Saving dataSource metadata information failed with result : " + + indexResponse.getResult().getLowercase()); } } @Override public void updateDataSourceMetadata(DataSourceMetadata dataSourceMetadata) { encryptDecryptAuthenticationData(dataSourceMetadata, true); - UpdateRequest updateRequest - = new UpdateRequest(DATASOURCE_INDEX_NAME, dataSourceMetadata.getName()); + UpdateRequest updateRequest = + new UpdateRequest(DATASOURCE_INDEX_NAME, dataSourceMetadata.getName()); UpdateResponse updateResponse; - try (ThreadContext.StoredContext storedContext = client.threadPool().getThreadContext() - .stashContext()) { + try (ThreadContext.StoredContext storedContext = + client.threadPool().getThreadContext().stashContext()) { updateRequest.doc(XContentParserUtils.convertToXContent(dataSourceMetadata)); updateRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - ActionFuture updateResponseActionFuture - = client.update(updateRequest); + ActionFuture updateResponseActionFuture = client.update(updateRequest); updateResponse = updateResponseActionFuture.actionGet(); } catch (DocumentMissingException exception) { - throw new DataSourceNotFoundException("Datasource with name: " - + dataSourceMetadata.getName() + " doesn't exist"); + throw new DataSourceNotFoundException( + "Datasource with name: " + dataSourceMetadata.getName() + " doesn't exist"); } catch (Exception e) { throw new RuntimeException(e); } @@ -155,8 +154,9 @@ public void updateDataSourceMetadata(DataSourceMetadata dataSourceMetadata) { || updateResponse.getResult().equals(DocWriteResponse.Result.NOOP)) { LOG.debug("DatasourceMetadata : {} successfully updated", dataSourceMetadata.getName()); } else { - throw new RuntimeException("Saving dataSource metadata information failed with result : " - + updateResponse.getResult().getLowercase()); + throw new RuntimeException( + "Saving dataSource metadata information failed with result : " + + updateResponse.getResult().getLowercase()); } } @@ -165,48 +165,54 @@ public void deleteDataSourceMetadata(String datasourceName) { DeleteRequest deleteRequest = new DeleteRequest(DATASOURCE_INDEX_NAME); deleteRequest.id(datasourceName); ActionFuture deleteResponseActionFuture; - try (ThreadContext.StoredContext storedContext = client.threadPool().getThreadContext() - .stashContext()) { + try (ThreadContext.StoredContext storedContext = + client.threadPool().getThreadContext().stashContext()) { deleteResponseActionFuture = client.delete(deleteRequest); } DeleteResponse deleteResponse = deleteResponseActionFuture.actionGet(); if (deleteResponse.getResult().equals(DocWriteResponse.Result.DELETED)) { LOG.debug("DatasourceMetadata : {} successfully deleted", datasourceName); } else if (deleteResponse.getResult().equals(DocWriteResponse.Result.NOT_FOUND)) { - throw new DataSourceNotFoundException("Datasource with name: " - + datasourceName + " doesn't exist"); + throw new DataSourceNotFoundException( + "Datasource with name: " + datasourceName + " doesn't exist"); } else { - throw new RuntimeException("Deleting dataSource metadata information failed with result : " - + deleteResponse.getResult().getLowercase()); + throw new RuntimeException( + "Deleting dataSource metadata information failed with result : " + + deleteResponse.getResult().getLowercase()); } } private void createDataSourcesIndex() { try { - InputStream mappingFileStream = OpenSearchDataSourceMetadataStorage.class.getClassLoader() - .getResourceAsStream(DATASOURCE_INDEX_MAPPING_FILE_NAME); - InputStream settingsFileStream = OpenSearchDataSourceMetadataStorage.class.getClassLoader() - .getResourceAsStream(DATASOURCE_INDEX_SETTINGS_FILE_NAME); + InputStream mappingFileStream = + OpenSearchDataSourceMetadataStorage.class + .getClassLoader() + .getResourceAsStream(DATASOURCE_INDEX_MAPPING_FILE_NAME); + InputStream settingsFileStream = + OpenSearchDataSourceMetadataStorage.class + .getClassLoader() + .getResourceAsStream(DATASOURCE_INDEX_SETTINGS_FILE_NAME); CreateIndexRequest createIndexRequest = new CreateIndexRequest(DATASOURCE_INDEX_NAME); - createIndexRequest.mapping(IOUtils.toString(mappingFileStream, StandardCharsets.UTF_8), - XContentType.YAML) - .settings(IOUtils.toString(settingsFileStream, StandardCharsets.UTF_8), - XContentType.YAML); + createIndexRequest + .mapping(IOUtils.toString(mappingFileStream, StandardCharsets.UTF_8), XContentType.YAML) + .settings( + IOUtils.toString(settingsFileStream, StandardCharsets.UTF_8), XContentType.YAML); ActionFuture createIndexResponseActionFuture; - try (ThreadContext.StoredContext ignored = client.threadPool().getThreadContext() - .stashContext()) { + try (ThreadContext.StoredContext ignored = + client.threadPool().getThreadContext().stashContext()) { createIndexResponseActionFuture = client.admin().indices().create(createIndexRequest); } CreateIndexResponse createIndexResponse = createIndexResponseActionFuture.actionGet(); if (createIndexResponse.isAcknowledged()) { LOG.info("Index: {} creation Acknowledged", DATASOURCE_INDEX_NAME); } else { - throw new RuntimeException( - "Index creation is not acknowledged."); + throw new RuntimeException("Index creation is not acknowledged."); } } catch (Throwable e) { throw new RuntimeException( - "Internal server error while creating" + DATASOURCE_INDEX_NAME + " index:: " + "Internal server error while creating" + + DATASOURCE_INDEX_NAME + + " index:: " + e.getMessage()); } } @@ -218,17 +224,19 @@ private List searchInDataSourcesIndex(QueryBuilder query) { searchSourceBuilder.query(query); searchSourceBuilder.size(DATASOURCE_QUERY_RESULT_SIZE); searchRequest.source(searchSourceBuilder); - // strongly consistent reads is requred. more info https://github.com/opensearch-project/sql/issues/1801. + // strongly consistent reads is requred. more info + // https://github.com/opensearch-project/sql/issues/1801. searchRequest.preference("_primary"); ActionFuture searchResponseActionFuture; - try (ThreadContext.StoredContext ignored = client.threadPool().getThreadContext() - .stashContext()) { + try (ThreadContext.StoredContext ignored = + client.threadPool().getThreadContext().stashContext()) { searchResponseActionFuture = client.search(searchRequest); } SearchResponse searchResponse = searchResponseActionFuture.actionGet(); if (searchResponse.status().getStatus() != 200) { - throw new RuntimeException("Fetching dataSource metadata information failed with status : " - + searchResponse.status()); + throw new RuntimeException( + "Fetching dataSource metadata information failed with status : " + + searchResponse.status()); } else { List list = new ArrayList<>(); for (SearchHit searchHit : searchResponse.getHits().getHits()) { @@ -246,14 +254,15 @@ private List searchInDataSourcesIndex(QueryBuilder query) { } @SuppressWarnings("missingswitchdefault") - private DataSourceMetadata encryptDecryptAuthenticationData(DataSourceMetadata dataSourceMetadata, - Boolean isEncryption) { + private DataSourceMetadata encryptDecryptAuthenticationData( + DataSourceMetadata dataSourceMetadata, Boolean isEncryption) { Map propertiesMap = dataSourceMetadata.getProperties(); - Optional authTypeOptional - = propertiesMap.keySet().stream().filter(s -> s.endsWith("auth.type")) - .findFirst() - .map(propertiesMap::get) - .map(AuthenticationType::get); + Optional authTypeOptional = + propertiesMap.keySet().stream() + .filter(s -> s.endsWith("auth.type")) + .findFirst() + .map(propertiesMap::get) + .map(AuthenticationType::get); if (authTypeOptional.isPresent()) { switch (authTypeOptional.get()) { case BASICAUTH: @@ -267,8 +276,8 @@ private DataSourceMetadata encryptDecryptAuthenticationData(DataSourceMetadata d return dataSourceMetadata; } - private void handleBasicAuthPropertiesEncryptionDecryption(Map propertiesMap, - Boolean isEncryption) { + private void handleBasicAuthPropertiesEncryptionDecryption( + Map propertiesMap, Boolean isEncryption) { ArrayList list = new ArrayList<>(); propertiesMap.keySet().stream() .filter(s -> s.endsWith("auth.username")) @@ -281,21 +290,19 @@ private void handleBasicAuthPropertiesEncryptionDecryption(Map p encryptOrDecrypt(propertiesMap, isEncryption, list); } - private void encryptOrDecrypt(Map propertiesMap, Boolean isEncryption, - List keyIdentifiers) { + private void encryptOrDecrypt( + Map propertiesMap, Boolean isEncryption, List keyIdentifiers) { for (String key : keyIdentifiers) { if (isEncryption) { - propertiesMap.put(key, - this.encryptor.encrypt(propertiesMap.get(key))); + propertiesMap.put(key, this.encryptor.encrypt(propertiesMap.get(key))); } else { - propertiesMap.put(key, - this.encryptor.decrypt(propertiesMap.get(key))); + propertiesMap.put(key, this.encryptor.decrypt(propertiesMap.get(key))); } } } - private void handleSigV4PropertiesEncryptionDecryption(Map propertiesMap, - Boolean isEncryption) { + private void handleSigV4PropertiesEncryptionDecryption( + Map propertiesMap, Boolean isEncryption) { ArrayList list = new ArrayList<>(); propertiesMap.keySet().stream() .filter(s -> s.endsWith("auth.access_key")) @@ -307,5 +314,4 @@ private void handleSigV4PropertiesEncryptionDecryption(Map prope .ifPresent(list::add); encryptOrDecrypt(propertiesMap, isEncryption, list); } - } diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportCreateDataSourceAction.java b/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportCreateDataSourceAction.java index fefd0250ce..54ca92b695 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportCreateDataSourceAction.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportCreateDataSourceAction.java @@ -23,38 +23,44 @@ public class TransportCreateDataSourceAction extends HandledTransportAction { public static final String NAME = "cluster:admin/opensearch/ql/datasources/create"; - public static final ActionType - ACTION_TYPE = new ActionType<>(NAME, CreateDataSourceActionResponse::new); + public static final ActionType ACTION_TYPE = + new ActionType<>(NAME, CreateDataSourceActionResponse::new); private DataSourceService dataSourceService; /** * TransportCreateDataSourceAction action for creating datasource. * - * @param transportService transportService. - * @param actionFilters actionFilters. + * @param transportService transportService. + * @param actionFilters actionFilters. * @param dataSourceService dataSourceService. */ @Inject - public TransportCreateDataSourceAction(TransportService transportService, - ActionFilters actionFilters, - DataSourceServiceImpl dataSourceService) { - super(TransportCreateDataSourceAction.NAME, transportService, actionFilters, + public TransportCreateDataSourceAction( + TransportService transportService, + ActionFilters actionFilters, + DataSourceServiceImpl dataSourceService) { + super( + TransportCreateDataSourceAction.NAME, + transportService, + actionFilters, CreateDataSourceActionRequest::new); this.dataSourceService = dataSourceService; } @Override - protected void doExecute(Task task, CreateDataSourceActionRequest request, - ActionListener actionListener) { + protected void doExecute( + Task task, + CreateDataSourceActionRequest request, + ActionListener actionListener) { try { DataSourceMetadata dataSourceMetadata = request.getDataSourceMetadata(); dataSourceService.createDataSource(dataSourceMetadata); - actionListener.onResponse(new CreateDataSourceActionResponse("Created DataSource with name " - + dataSourceMetadata.getName())); + actionListener.onResponse( + new CreateDataSourceActionResponse( + "Created DataSource with name " + dataSourceMetadata.getName())); } catch (Exception e) { actionListener.onFailure(e); } } - } diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportDeleteDataSourceAction.java b/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportDeleteDataSourceAction.java index 39e51aabef..5578d40651 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportDeleteDataSourceAction.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportDeleteDataSourceAction.java @@ -23,37 +23,43 @@ public class TransportDeleteDataSourceAction extends HandledTransportAction { public static final String NAME = "cluster:admin/opensearch/ql/datasources/delete"; - public static final ActionType - ACTION_TYPE = new ActionType<>(NAME, DeleteDataSourceActionResponse::new); + public static final ActionType ACTION_TYPE = + new ActionType<>(NAME, DeleteDataSourceActionResponse::new); private DataSourceService dataSourceService; /** * TransportDeleteDataSourceAction action for deleting datasource. * - * @param transportService transportService. - * @param actionFilters actionFilters. + * @param transportService transportService. + * @param actionFilters actionFilters. * @param dataSourceService dataSourceService. */ @Inject - public TransportDeleteDataSourceAction(TransportService transportService, - ActionFilters actionFilters, - DataSourceServiceImpl dataSourceService) { - super(TransportDeleteDataSourceAction.NAME, transportService, actionFilters, + public TransportDeleteDataSourceAction( + TransportService transportService, + ActionFilters actionFilters, + DataSourceServiceImpl dataSourceService) { + super( + TransportDeleteDataSourceAction.NAME, + transportService, + actionFilters, DeleteDataSourceActionRequest::new); this.dataSourceService = dataSourceService; } @Override - protected void doExecute(Task task, DeleteDataSourceActionRequest request, - ActionListener actionListener) { + protected void doExecute( + Task task, + DeleteDataSourceActionRequest request, + ActionListener actionListener) { try { dataSourceService.deleteDataSource(request.getDataSourceName()); - actionListener.onResponse(new DeleteDataSourceActionResponse("Deleted DataSource with name " - + request.getDataSourceName())); + actionListener.onResponse( + new DeleteDataSourceActionResponse( + "Deleted DataSource with name " + request.getDataSourceName())); } catch (Exception e) { actionListener.onFailure(e); } } - } diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportGetDataSourceAction.java b/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportGetDataSourceAction.java index 477d10fa0b..34ad59c80f 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportGetDataSourceAction.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportGetDataSourceAction.java @@ -26,30 +26,36 @@ public class TransportGetDataSourceAction extends HandledTransportAction { public static final String NAME = "cluster:admin/opensearch/ql/datasources/read"; - public static final ActionType - ACTION_TYPE = new ActionType<>(NAME, GetDataSourceActionResponse::new); + public static final ActionType ACTION_TYPE = + new ActionType<>(NAME, GetDataSourceActionResponse::new); private DataSourceService dataSourceService; /** * TransportGetDataSourceAction action for getting datasource. * - * @param transportService transportService. - * @param actionFilters actionFilters. + * @param transportService transportService. + * @param actionFilters actionFilters. * @param dataSourceService dataSourceService. */ @Inject - public TransportGetDataSourceAction(TransportService transportService, - ActionFilters actionFilters, - DataSourceServiceImpl dataSourceService) { - super(TransportGetDataSourceAction.NAME, transportService, actionFilters, + public TransportGetDataSourceAction( + TransportService transportService, + ActionFilters actionFilters, + DataSourceServiceImpl dataSourceService) { + super( + TransportGetDataSourceAction.NAME, + transportService, + actionFilters, GetDataSourceActionRequest::new); this.dataSourceService = dataSourceService; } @Override - protected void doExecute(Task task, GetDataSourceActionRequest request, - ActionListener actionListener) { + protected void doExecute( + Task task, + GetDataSourceActionRequest request, + ActionListener actionListener) { try { String responseContent; if (request.getDataSourceName() == null) { @@ -66,30 +72,27 @@ protected void doExecute(Task task, GetDataSourceActionRequest request, private String handleGetAllDataSourcesRequest() { String responseContent; - Set dataSourceMetadataSet = - dataSourceService.getDataSourceMetadata(false); - responseContent = new JsonResponseFormatter>( - JsonResponseFormatter.Style.PRETTY) { - @Override - protected Object buildJsonObject(Set response) { - return response; - } - }.format(dataSourceMetadataSet); + Set dataSourceMetadataSet = dataSourceService.getDataSourceMetadata(false); + responseContent = + new JsonResponseFormatter>(JsonResponseFormatter.Style.PRETTY) { + @Override + protected Object buildJsonObject(Set response) { + return response; + } + }.format(dataSourceMetadataSet); return responseContent; } private String handleSingleDataSourceRequest(String datasourceName) { String responseContent; - DataSourceMetadata dataSourceMetadata - = dataSourceService - .getDataSourceMetadata(datasourceName); - responseContent = new JsonResponseFormatter( - JsonResponseFormatter.Style.PRETTY) { - @Override - protected Object buildJsonObject(DataSourceMetadata response) { - return response; - } - }.format(dataSourceMetadata); + DataSourceMetadata dataSourceMetadata = dataSourceService.getDataSourceMetadata(datasourceName); + responseContent = + new JsonResponseFormatter(JsonResponseFormatter.Style.PRETTY) { + @Override + protected Object buildJsonObject(DataSourceMetadata response) { + return response; + } + }.format(dataSourceMetadata); return responseContent; } } diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportUpdateDataSourceAction.java b/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportUpdateDataSourceAction.java index e01a8ffea2..4325282f83 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportUpdateDataSourceAction.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/transport/TransportUpdateDataSourceAction.java @@ -23,8 +23,8 @@ public class TransportUpdateDataSourceAction extends HandledTransportAction { public static final String NAME = "cluster:admin/opensearch/ql/datasources/update"; - public static final ActionType - ACTION_TYPE = new ActionType<>(NAME, UpdateDataSourceActionResponse::new); + public static final ActionType ACTION_TYPE = + new ActionType<>(NAME, UpdateDataSourceActionResponse::new); private DataSourceService dataSourceService; @@ -36,24 +36,30 @@ public class TransportUpdateDataSourceAction * @param dataSourceService dataSourceService. */ @Inject - public TransportUpdateDataSourceAction(TransportService transportService, - ActionFilters actionFilters, - DataSourceServiceImpl dataSourceService) { - super(TransportUpdateDataSourceAction.NAME, transportService, actionFilters, + public TransportUpdateDataSourceAction( + TransportService transportService, + ActionFilters actionFilters, + DataSourceServiceImpl dataSourceService) { + super( + TransportUpdateDataSourceAction.NAME, + transportService, + actionFilters, UpdateDataSourceActionRequest::new); this.dataSourceService = dataSourceService; } @Override - protected void doExecute(Task task, UpdateDataSourceActionRequest request, - ActionListener actionListener) { + protected void doExecute( + Task task, + UpdateDataSourceActionRequest request, + ActionListener actionListener) { try { dataSourceService.updateDataSource(request.getDataSourceMetadata()); - actionListener.onResponse(new UpdateDataSourceActionResponse("Updated DataSource with name " - + request.getDataSourceMetadata().getName())); + actionListener.onResponse( + new UpdateDataSourceActionResponse( + "Updated DataSource with name " + request.getDataSourceMetadata().getName())); } catch (Exception e) { actionListener.onFailure(e); } } - } diff --git a/datasources/src/main/java/org/opensearch/sql/datasources/utils/XContentParserUtils.java b/datasources/src/main/java/org/opensearch/sql/datasources/utils/XContentParserUtils.java index 38a500afae..1ad79addac 100644 --- a/datasources/src/main/java/org/opensearch/sql/datasources/utils/XContentParserUtils.java +++ b/datasources/src/main/java/org/opensearch/sql/datasources/utils/XContentParserUtils.java @@ -22,9 +22,7 @@ import org.opensearch.sql.datasource.model.DataSourceMetadata; import org.opensearch.sql.datasource.model.DataSourceType; -/** - * Utitlity class to serialize and deserialize objects in XContent. - */ +/** Utitlity class to serialize and deserialize objects in XContent. */ @UtilityClass public class XContentParserUtils { public static final String NAME_FIELD = "name"; @@ -87,9 +85,13 @@ public static DataSourceMetadata toDataSourceMetadata(XContentParser parser) thr * @throws IOException IOException. */ public static DataSourceMetadata toDataSourceMetadata(String json) throws IOException { - try (XContentParser parser = XContentType.JSON.xContent() - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, - json)) { + try (XContentParser parser = + XContentType.JSON + .xContent() + .createParser( + NamedXContentRegistry.EMPTY, + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + json)) { return toDataSourceMetadata(parser); } } @@ -116,6 +118,4 @@ public static XContentBuilder convertToXContent(DataSourceMetadata metadata) thr builder.endObject(); return builder; } - - } diff --git a/datasources/src/test/java/org/opensearch/sql/datasources/auth/AuthenticationTypeTest.java b/datasources/src/test/java/org/opensearch/sql/datasources/auth/AuthenticationTypeTest.java index 23bb4688e1..4bc4800093 100644 --- a/datasources/src/test/java/org/opensearch/sql/datasources/auth/AuthenticationTypeTest.java +++ b/datasources/src/test/java/org/opensearch/sql/datasources/auth/AuthenticationTypeTest.java @@ -5,7 +5,6 @@ package org.opensearch.sql.datasources.auth; - import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNull; diff --git a/datasources/src/test/java/org/opensearch/sql/datasources/auth/DataSourceUserAuthorizationHelperImplTest.java b/datasources/src/test/java/org/opensearch/sql/datasources/auth/DataSourceUserAuthorizationHelperImplTest.java index 552bd0edf9..6ee3c12edd 100644 --- a/datasources/src/test/java/org/opensearch/sql/datasources/auth/DataSourceUserAuthorizationHelperImplTest.java +++ b/datasources/src/test/java/org/opensearch/sql/datasources/auth/DataSourceUserAuthorizationHelperImplTest.java @@ -27,65 +27,76 @@ public class DataSourceUserAuthorizationHelperImplTest { @Mock(answer = Answers.RETURNS_DEEP_STUBS) private Client client; - @InjectMocks - private DataSourceUserAuthorizationHelperImpl dataSourceUserAuthorizationHelper; - + @InjectMocks private DataSourceUserAuthorizationHelperImpl dataSourceUserAuthorizationHelper; @Test public void testAuthorizeDataSourceWithAllowedRoles() { String userString = "myuser|bckrole1,bckrol2|prometheus_access|myTenant"; - Mockito.when(client.threadPool().getThreadContext() - .getTransient(OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT)) + Mockito.when( + client + .threadPool() + .getThreadContext() + .getTransient(OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT)) .thenReturn(userString); DataSourceMetadata dataSourceMetadata = dataSourceMetadata(); - this.dataSourceUserAuthorizationHelper - .authorizeDataSource(dataSourceMetadata); + this.dataSourceUserAuthorizationHelper.authorizeDataSource(dataSourceMetadata); } @Test public void testAuthorizeDataSourceWithAdminRole() { String userString = "myuser|bckrole1,bckrol2|all_access|myTenant"; - Mockito.when(client.threadPool().getThreadContext() - .getTransient(OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT)) + Mockito.when( + client + .threadPool() + .getThreadContext() + .getTransient(OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT)) .thenReturn(userString); DataSourceMetadata dataSourceMetadata = dataSourceMetadata(); - this.dataSourceUserAuthorizationHelper - .authorizeDataSource(dataSourceMetadata); + this.dataSourceUserAuthorizationHelper.authorizeDataSource(dataSourceMetadata); } @Test public void testAuthorizeDataSourceWithNullUserString() { - Mockito.when(client.threadPool().getThreadContext() - .getTransient(OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT)) + Mockito.when( + client + .threadPool() + .getThreadContext() + .getTransient(OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT)) .thenReturn(null); DataSourceMetadata dataSourceMetadata = dataSourceMetadata(); - this.dataSourceUserAuthorizationHelper - .authorizeDataSource(dataSourceMetadata); + this.dataSourceUserAuthorizationHelper.authorizeDataSource(dataSourceMetadata); } @Test public void testAuthorizeDataSourceWithDefaultDataSource() { String userString = "myuser|bckrole1,bckrol2|role1|myTenant"; - Mockito.when(client.threadPool().getThreadContext() - .getTransient(OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT)) + Mockito.when( + client + .threadPool() + .getThreadContext() + .getTransient(OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT)) .thenReturn(userString); DataSourceMetadata dataSourceMetadata = DataSourceMetadata.defaultOpenSearchDataSourceMetadata(); - this.dataSourceUserAuthorizationHelper - .authorizeDataSource(dataSourceMetadata); + this.dataSourceUserAuthorizationHelper.authorizeDataSource(dataSourceMetadata); } @Test public void testAuthorizeDataSourceWithException() { String userString = "myuser|bckrole1,bckrol2|role1|myTenant"; - Mockito.when(client.threadPool().getThreadContext() - .getTransient(OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT)) + Mockito.when( + client + .threadPool() + .getThreadContext() + .getTransient(OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT)) .thenReturn(userString); DataSourceMetadata dataSourceMetadata = dataSourceMetadata(); - SecurityException securityException - = Assert.assertThrows(SecurityException.class, + SecurityException securityException = + Assert.assertThrows( + SecurityException.class, () -> this.dataSourceUserAuthorizationHelper.authorizeDataSource(dataSourceMetadata)); - Assert.assertEquals("User is not authorized to access datasource test. " + Assert.assertEquals( + "User is not authorized to access datasource test. " + "User should be mapped to any of the roles in [prometheus_access] for access.", securityException.getMessage()); } @@ -98,5 +109,4 @@ private DataSourceMetadata dataSourceMetadata() { dataSourceMetadata.setProperties(new HashMap<>()); return dataSourceMetadata; } - } diff --git a/datasources/src/test/java/org/opensearch/sql/datasources/encryptor/EncryptorImplTest.java b/datasources/src/test/java/org/opensearch/sql/datasources/encryptor/EncryptorImplTest.java index d62a5a957a..26432b139b 100644 --- a/datasources/src/test/java/org/opensearch/sql/datasources/encryptor/EncryptorImplTest.java +++ b/datasources/src/test/java/org/opensearch/sql/datasources/encryptor/EncryptorImplTest.java @@ -17,7 +17,6 @@ import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.junit.jupiter.MockitoExtension; - @ExtendWith(MockitoExtension.class) public class EncryptorImplTest { @@ -38,9 +37,11 @@ public void testMasterKeySize() { String input = "This is a test input"; String masterKey8 = "12345678"; Encryptor encryptor8 = new EncryptorImpl(masterKey8); - assertThrows(AwsCryptoException.class, () -> { - encryptor8.encrypt(input); - }); + assertThrows( + AwsCryptoException.class, + () -> { + encryptor8.encrypt(input); + }); String masterKey16 = "1234567812345678"; Encryptor encryptor16 = new EncryptorImpl(masterKey16); @@ -54,9 +55,11 @@ public void testMasterKeySize() { String masterKey17 = "12345678123456781"; Encryptor encryptor17 = new EncryptorImpl(masterKey17); - assertThrows(AwsCryptoException.class, () -> { - encryptor17.encrypt(input); - }); + assertThrows( + AwsCryptoException.class, + () -> { + encryptor17.encrypt(input); + }); } @Test @@ -64,9 +67,11 @@ public void testInvalidBase64String() { String encrypted = "invalidBase64String"; Encryptor encryptor = new EncryptorImpl("randomMasterKey"); - assertThrows(BadCiphertextException.class, () -> { - encryptor.decrypt(encrypted); - }); + assertThrows( + BadCiphertextException.class, + () -> { + encryptor.decrypt(encrypted); + }); } @Test @@ -80,19 +85,21 @@ public void testDecryptWithDifferentKey() { String encrypted = encryptor1.encrypt(input); - assertThrows(Exception.class, () -> { - encryptor2.decrypt(encrypted); - }); + assertThrows( + Exception.class, + () -> { + encryptor2.decrypt(encrypted); + }); } @Test public void testEncryptionAndDecryptionWithNullMasterKey() { String input = "This is a test input"; Encryptor encryptor = new EncryptorImpl(null); - IllegalStateException illegalStateException - = Assertions.assertThrows(IllegalStateException.class, - () -> encryptor.encrypt(input)); - Assertions.assertEquals("Master key is a required config for using create and" + IllegalStateException illegalStateException = + Assertions.assertThrows(IllegalStateException.class, () -> encryptor.encrypt(input)); + Assertions.assertEquals( + "Master key is a required config for using create and" + " update datasource APIs." + "Please set plugins.query.datasources.encryption.masterkey config " + "in opensearch.yml in all the cluster nodes. " @@ -100,10 +107,10 @@ public void testEncryptionAndDecryptionWithNullMasterKey() { + "https://github.com/opensearch-project/sql/blob/main/docs/user/ppl/" + "admin/datasources.rst#master-key-config-for-encrypting-credential-information", illegalStateException.getMessage()); - illegalStateException - = Assertions.assertThrows(IllegalStateException.class, - () -> encryptor.decrypt(input)); - Assertions.assertEquals("Master key is a required config for using create and" + illegalStateException = + Assertions.assertThrows(IllegalStateException.class, () -> encryptor.decrypt(input)); + Assertions.assertEquals( + "Master key is a required config for using create and" + " update datasource APIs." + "Please set plugins.query.datasources.encryption.masterkey config " + "in opensearch.yml in all the cluster nodes. " @@ -118,10 +125,10 @@ public void testEncryptionAndDecryptionWithEmptyMasterKey() { String masterKey = ""; String input = "This is a test input"; Encryptor encryptor = new EncryptorImpl(masterKey); - IllegalStateException illegalStateException - = Assertions.assertThrows(IllegalStateException.class, - () -> encryptor.encrypt(input)); - Assertions.assertEquals("Master key is a required config for using create and" + IllegalStateException illegalStateException = + Assertions.assertThrows(IllegalStateException.class, () -> encryptor.encrypt(input)); + Assertions.assertEquals( + "Master key is a required config for using create and" + " update datasource APIs." + "Please set plugins.query.datasources.encryption.masterkey config " + "in opensearch.yml in all the cluster nodes. " @@ -129,10 +136,10 @@ public void testEncryptionAndDecryptionWithEmptyMasterKey() { + "https://github.com/opensearch-project/sql/blob/main/docs/user/ppl/" + "admin/datasources.rst#master-key-config-for-encrypting-credential-information", illegalStateException.getMessage()); - illegalStateException - = Assertions.assertThrows(IllegalStateException.class, - () -> encryptor.decrypt(input)); - Assertions.assertEquals("Master key is a required config for using create and" + illegalStateException = + Assertions.assertThrows(IllegalStateException.class, () -> encryptor.decrypt(input)); + Assertions.assertEquals( + "Master key is a required config for using create and" + " update datasource APIs." + "Please set plugins.query.datasources.encryption.masterkey config " + "in opensearch.yml in all the cluster nodes. " @@ -141,5 +148,4 @@ public void testEncryptionAndDecryptionWithEmptyMasterKey() { + "admin/datasources.rst#master-key-config-for-encrypting-credential-information", illegalStateException.getMessage()); } - } diff --git a/datasources/src/test/java/org/opensearch/sql/datasources/service/DataSourceLoaderCacheImplTest.java b/datasources/src/test/java/org/opensearch/sql/datasources/service/DataSourceLoaderCacheImplTest.java index bf656857b0..b2ea221eb7 100644 --- a/datasources/src/test/java/org/opensearch/sql/datasources/service/DataSourceLoaderCacheImplTest.java +++ b/datasources/src/test/java/org/opensearch/sql/datasources/service/DataSourceLoaderCacheImplTest.java @@ -25,11 +25,9 @@ @ExtendWith(MockitoExtension.class) class DataSourceLoaderCacheImplTest { - @Mock - private DataSourceFactory dataSourceFactory; + @Mock private DataSourceFactory dataSourceFactory; - @Mock - private StorageEngine storageEngine; + @Mock private StorageEngine storageEngine; @BeforeEach public void setup() { @@ -55,8 +53,8 @@ void testGetOrLoadDataSource() { dataSourceMetadata.setProperties(ImmutableMap.of()); DataSource dataSource = dataSourceLoaderCache.getOrLoadDataSource(dataSourceMetadata); verify(dataSourceFactory, times(1)).createDataSource(dataSourceMetadata); - Assertions.assertEquals(dataSource, - dataSourceLoaderCache.getOrLoadDataSource(dataSourceMetadata)); + Assertions.assertEquals( + dataSource, dataSourceLoaderCache.getOrLoadDataSource(dataSourceMetadata)); verifyNoMoreInteractions(dataSourceFactory); } @@ -81,5 +79,4 @@ private DataSourceMetadata getMetadata() { dataSourceMetadata.setProperties(ImmutableMap.of()); return dataSourceMetadata; } - } diff --git a/datasources/src/test/java/org/opensearch/sql/datasources/service/DataSourceServiceImplTest.java b/datasources/src/test/java/org/opensearch/sql/datasources/service/DataSourceServiceImplTest.java index e1312ec582..56d3586c6e 100644 --- a/datasources/src/test/java/org/opensearch/sql/datasources/service/DataSourceServiceImplTest.java +++ b/datasources/src/test/java/org/opensearch/sql/datasources/service/DataSourceServiceImplTest.java @@ -46,15 +46,11 @@ @ExtendWith(MockitoExtension.class) class DataSourceServiceImplTest { - @Mock - private DataSourceFactory dataSourceFactory; - @Mock - private StorageEngine storageEngine; - @Mock - private DataSourceMetadataStorage dataSourceMetadataStorage; + @Mock private DataSourceFactory dataSourceFactory; + @Mock private StorageEngine storageEngine; + @Mock private DataSourceMetadataStorage dataSourceMetadataStorage; - @Mock - private DataSourceUserAuthorizationHelper dataSourceUserAuthorizationHelper; + @Mock private DataSourceUserAuthorizationHelper dataSourceUserAuthorizationHelper; private DataSourceService dataSourceService; @@ -75,7 +71,8 @@ public void setup() { { add(dataSourceFactory); } - }, dataSourceMetadataStorage, + }, + dataSourceMetadataStorage, dataSourceUserAuthorizationHelper); } @@ -91,22 +88,18 @@ void testGetDataSourceForDefaultOpenSearchDataSource() { @Test void testGetDataSourceForNonExistingDataSource() { - when(dataSourceMetadataStorage.getDataSourceMetadata("test")) - .thenReturn(Optional.empty()); + when(dataSourceMetadataStorage.getDataSourceMetadata("test")).thenReturn(Optional.empty()); DataSourceNotFoundException exception = assertThrows( - DataSourceNotFoundException.class, - () -> - dataSourceService.getDataSource("test")); + DataSourceNotFoundException.class, () -> dataSourceService.getDataSource("test")); assertEquals("DataSource with name test doesn't exist.", exception.getMessage()); - verify(dataSourceMetadataStorage, times(1)) - .getDataSourceMetadata("test"); + verify(dataSourceMetadataStorage, times(1)).getDataSourceMetadata("test"); } @Test void testGetDataSourceSuccessCase() { - DataSourceMetadata dataSourceMetadata = metadata("test", DataSourceType.OPENSEARCH, - Collections.emptyList(), ImmutableMap.of()); + DataSourceMetadata dataSourceMetadata = + metadata("test", DataSourceType.OPENSEARCH, Collections.emptyList(), ImmutableMap.of()); doNothing().when(dataSourceUserAuthorizationHelper).authorizeDataSource(dataSourceMetadata); when(dataSourceMetadataStorage.getDataSourceMetadata("test")) .thenReturn(Optional.of(dataSourceMetadata)); @@ -114,26 +107,31 @@ void testGetDataSourceSuccessCase() { assertEquals("test", dataSource.getName()); assertEquals(DataSourceType.OPENSEARCH, dataSource.getConnectorType()); verify(dataSourceMetadataStorage, times(1)).getDataSourceMetadata("test"); - verify(dataSourceFactory, times(1)) - .createDataSource(dataSourceMetadata); + verify(dataSourceFactory, times(1)).createDataSource(dataSourceMetadata); } @Test void testGetDataSourceWithAuthorizationFailure() { - DataSourceMetadata dataSourceMetadata = metadata("test", DataSourceType.OPENSEARCH, - Collections.singletonList("prometheus_access"), ImmutableMap.of()); - doThrow(new SecurityException("User is not authorized to access datasource test. " - + "User should be mapped to any of the roles in [prometheus_access] for access.")) + DataSourceMetadata dataSourceMetadata = + metadata( + "test", + DataSourceType.OPENSEARCH, + Collections.singletonList("prometheus_access"), + ImmutableMap.of()); + doThrow( + new SecurityException( + "User is not authorized to access datasource test. User should be mapped to any of" + + " the roles in [prometheus_access] for access.")) .when(dataSourceUserAuthorizationHelper) .authorizeDataSource(dataSourceMetadata); when(dataSourceMetadataStorage.getDataSourceMetadata("test")) .thenReturn(Optional.of(dataSourceMetadata)); - - SecurityException securityException - = Assertions.assertThrows(SecurityException.class, - () -> dataSourceService.getDataSource("test")); - Assertions.assertEquals("User is not authorized to access datasource test. " + SecurityException securityException = + Assertions.assertThrows( + SecurityException.class, () -> dataSourceService.getDataSource("test")); + Assertions.assertEquals( + "User is not authorized to access datasource test. " + "User should be mapped to any of the roles in [prometheus_access] for access.", securityException.getMessage()); @@ -141,21 +139,23 @@ void testGetDataSourceWithAuthorizationFailure() { verify(dataSourceFactory, times(0)).createDataSource(dataSourceMetadata); } - @Test void testCreateDataSourceSuccessCase() { - DataSourceMetadata dataSourceMetadata = metadata("testDS", DataSourceType.OPENSEARCH, - Collections.emptyList(), ImmutableMap.of()); + DataSourceMetadata dataSourceMetadata = + metadata("testDS", DataSourceType.OPENSEARCH, Collections.emptyList(), ImmutableMap.of()); dataSourceService.createDataSource(dataSourceMetadata); - verify(dataSourceMetadataStorage, times(1)) - .createDataSourceMetadata(dataSourceMetadata); - verify(dataSourceFactory, times(1)) - .createDataSource(dataSourceMetadata); + verify(dataSourceMetadataStorage, times(1)).createDataSourceMetadata(dataSourceMetadata); + verify(dataSourceFactory, times(1)).createDataSource(dataSourceMetadata); when(dataSourceMetadataStorage.getDataSourceMetadata("testDS")) - .thenReturn(Optional.ofNullable(metadata("testDS", DataSourceType.OPENSEARCH, - Collections.emptyList(), ImmutableMap.of()))); + .thenReturn( + Optional.ofNullable( + metadata( + "testDS", + DataSourceType.OPENSEARCH, + Collections.emptyList(), + ImmutableMap.of()))); DataSource dataSource = dataSourceService.getDataSource("testDS"); assertEquals("testDS", dataSource.getName()); assertEquals(storageEngine, dataSource.getStorageEngine()); @@ -164,14 +164,15 @@ void testCreateDataSourceSuccessCase() { @Test void testCreateDataSourceWithDisallowedDatasourceName() { - DataSourceMetadata dataSourceMetadata = metadata("testDS$$$", DataSourceType.OPENSEARCH, - Collections.emptyList(), ImmutableMap.of()); + DataSourceMetadata dataSourceMetadata = + metadata( + "testDS$$$", DataSourceType.OPENSEARCH, Collections.emptyList(), ImmutableMap.of()); IllegalArgumentException exception = assertThrows( IllegalArgumentException.class, - () -> - dataSourceService.createDataSource(dataSourceMetadata)); - assertEquals("DataSource Name: testDS$$$ contains illegal characters." + () -> dataSourceService.createDataSource(dataSourceMetadata)); + assertEquals( + "DataSource Name: testDS$$$ contains illegal characters." + " Allowed characters: a-zA-Z0-9_-*@.", exception.getMessage()); verify(dataSourceFactory, times(1)).getDataSourceType(); @@ -181,14 +182,14 @@ void testCreateDataSourceWithDisallowedDatasourceName() { @Test void testCreateDataSourceWithEmptyDatasourceName() { - DataSourceMetadata dataSourceMetadata = metadata("", DataSourceType.OPENSEARCH, - Collections.emptyList(), ImmutableMap.of()); + DataSourceMetadata dataSourceMetadata = + metadata("", DataSourceType.OPENSEARCH, Collections.emptyList(), ImmutableMap.of()); IllegalArgumentException exception = assertThrows( IllegalArgumentException.class, - () -> - dataSourceService.createDataSource(dataSourceMetadata)); - assertEquals("Missing Name Field from a DataSource. Name is a required parameter.", + () -> dataSourceService.createDataSource(dataSourceMetadata)); + assertEquals( + "Missing Name Field from a DataSource. Name is a required parameter.", exception.getMessage()); verify(dataSourceFactory, times(1)).getDataSourceType(); verify(dataSourceFactory, times(0)).createDataSource(dataSourceMetadata); @@ -197,14 +198,14 @@ void testCreateDataSourceWithEmptyDatasourceName() { @Test void testCreateDataSourceWithNullParameters() { - DataSourceMetadata dataSourceMetadata = metadata("testDS", DataSourceType.OPENSEARCH, - Collections.emptyList(), null); + DataSourceMetadata dataSourceMetadata = + metadata("testDS", DataSourceType.OPENSEARCH, Collections.emptyList(), null); IllegalArgumentException exception = assertThrows( IllegalArgumentException.class, - () -> - dataSourceService.createDataSource(dataSourceMetadata)); - assertEquals("Missing properties field in datasource configuration. " + () -> dataSourceService.createDataSource(dataSourceMetadata)); + assertEquals( + "Missing properties field in datasource configuration. " + "Properties are required parameters.", exception.getMessage()); verify(dataSourceFactory, times(1)).getDataSourceType(); @@ -219,88 +220,99 @@ void testGetDataSourceMetadataSet() { properties.put("prometheus.auth.type", "basicauth"); properties.put("prometheus.auth.username", "username"); properties.put("prometheus.auth.password", "password"); - when(dataSourceMetadataStorage.getDataSourceMetadata()).thenReturn(new ArrayList<>() { - { - add(metadata("testDS", DataSourceType.PROMETHEUS, Collections.emptyList(), - properties)); - } - }); - Set dataSourceMetadataSet - = dataSourceService.getDataSourceMetadata(false); + when(dataSourceMetadataStorage.getDataSourceMetadata()) + .thenReturn( + new ArrayList<>() { + { + add( + metadata( + "testDS", DataSourceType.PROMETHEUS, Collections.emptyList(), properties)); + } + }); + Set dataSourceMetadataSet = dataSourceService.getDataSourceMetadata(false); assertEquals(1, dataSourceMetadataSet.size()); DataSourceMetadata dataSourceMetadata = dataSourceMetadataSet.iterator().next(); assertTrue(dataSourceMetadata.getProperties().containsKey("prometheus.uri")); assertFalse(dataSourceMetadata.getProperties().containsKey("prometheus.auth.type")); assertFalse(dataSourceMetadata.getProperties().containsKey("prometheus.auth.username")); assertFalse(dataSourceMetadata.getProperties().containsKey("prometheus.auth.password")); - assertFalse(dataSourceMetadataSet - .contains(DataSourceMetadata.defaultOpenSearchDataSourceMetadata())); + assertFalse( + dataSourceMetadataSet.contains(DataSourceMetadata.defaultOpenSearchDataSourceMetadata())); verify(dataSourceMetadataStorage, times(1)).getDataSourceMetadata(); } @Test void testGetDataSourceMetadataSetWithDefaultDatasource() { - when(dataSourceMetadataStorage.getDataSourceMetadata()).thenReturn(new ArrayList<>() { - { - add(metadata("testDS", DataSourceType.PROMETHEUS, Collections.emptyList(), - ImmutableMap.of())); - } - }); - Set dataSourceMetadataSet - = dataSourceService.getDataSourceMetadata(true); + when(dataSourceMetadataStorage.getDataSourceMetadata()) + .thenReturn( + new ArrayList<>() { + { + add( + metadata( + "testDS", + DataSourceType.PROMETHEUS, + Collections.emptyList(), + ImmutableMap.of())); + } + }); + Set dataSourceMetadataSet = dataSourceService.getDataSourceMetadata(true); assertEquals(2, dataSourceMetadataSet.size()); - assertTrue(dataSourceMetadataSet - .contains(DataSourceMetadata.defaultOpenSearchDataSourceMetadata())); + assertTrue( + dataSourceMetadataSet.contains(DataSourceMetadata.defaultOpenSearchDataSourceMetadata())); verify(dataSourceMetadataStorage, times(1)).getDataSourceMetadata(); } @Test void testUpdateDataSourceSuccessCase() { - DataSourceMetadata dataSourceMetadata = metadata("testDS", DataSourceType.OPENSEARCH, - Collections.emptyList(), ImmutableMap.of()); + DataSourceMetadata dataSourceMetadata = + metadata("testDS", DataSourceType.OPENSEARCH, Collections.emptyList(), ImmutableMap.of()); dataSourceService.updateDataSource(dataSourceMetadata); - verify(dataSourceMetadataStorage, times(1)) - .updateDataSourceMetadata(dataSourceMetadata); - verify(dataSourceFactory, times(1)) - .createDataSource(dataSourceMetadata); + verify(dataSourceMetadataStorage, times(1)).updateDataSourceMetadata(dataSourceMetadata); + verify(dataSourceFactory, times(1)).createDataSource(dataSourceMetadata); } @Test void testUpdateDefaultDataSource() { - DataSourceMetadata dataSourceMetadata = metadata(DEFAULT_DATASOURCE_NAME, - DataSourceType.OPENSEARCH, Collections.emptyList(), ImmutableMap.of()); - UnsupportedOperationException unsupportedOperationException - = assertThrows(UnsupportedOperationException.class, + DataSourceMetadata dataSourceMetadata = + metadata( + DEFAULT_DATASOURCE_NAME, + DataSourceType.OPENSEARCH, + Collections.emptyList(), + ImmutableMap.of()); + UnsupportedOperationException unsupportedOperationException = + assertThrows( + UnsupportedOperationException.class, () -> dataSourceService.updateDataSource(dataSourceMetadata)); - assertEquals("Not allowed to update default datasource :" + DEFAULT_DATASOURCE_NAME, + assertEquals( + "Not allowed to update default datasource :" + DEFAULT_DATASOURCE_NAME, unsupportedOperationException.getMessage()); } @Test void testDeleteDatasource() { dataSourceService.deleteDataSource("testDS"); - verify(dataSourceMetadataStorage, times(1)) - .deleteDataSourceMetadata("testDS"); + verify(dataSourceMetadataStorage, times(1)).deleteDataSourceMetadata("testDS"); } @Test void testDeleteDefaultDatasource() { - UnsupportedOperationException unsupportedOperationException - = assertThrows(UnsupportedOperationException.class, - () -> dataSourceService.deleteDataSource(DEFAULT_DATASOURCE_NAME)); - assertEquals("Not allowed to delete default datasource :" + DEFAULT_DATASOURCE_NAME, + UnsupportedOperationException unsupportedOperationException = + assertThrows( + UnsupportedOperationException.class, + () -> dataSourceService.deleteDataSource(DEFAULT_DATASOURCE_NAME)); + assertEquals( + "Not allowed to delete default datasource :" + DEFAULT_DATASOURCE_NAME, unsupportedOperationException.getMessage()); } @Test void testDataSourceExists() { - when(dataSourceMetadataStorage.getDataSourceMetadata("test")) - .thenReturn(Optional.empty()); + when(dataSourceMetadataStorage.getDataSourceMetadata("test")).thenReturn(Optional.empty()); Assertions.assertFalse(dataSourceService.dataSourceExists("test")); when(dataSourceMetadataStorage.getDataSourceMetadata("test")) - .thenReturn(Optional.of(metadata("test", DataSourceType.PROMETHEUS, - List.of(), ImmutableMap.of()))); + .thenReturn( + Optional.of(metadata("test", DataSourceType.PROMETHEUS, List.of(), ImmutableMap.of()))); Assertions.assertTrue(dataSourceService.dataSourceExists("test")); } @@ -310,9 +322,8 @@ void testDataSourceExistsForDefaultDataSource() { verifyNoInteractions(dataSourceMetadataStorage); } - DataSourceMetadata metadata(String name, DataSourceType type, - List allowedRoles, - Map properties) { + DataSourceMetadata metadata( + String name, DataSourceType type, List allowedRoles, Map properties) { DataSourceMetadata dataSourceMetadata = new DataSourceMetadata(); dataSourceMetadata.setName(name); dataSourceMetadata.setConnector(type); @@ -329,13 +340,15 @@ void testRemovalOfAuthorizationInfo() { properties.put("prometheus.auth.username", "username"); properties.put("prometheus.auth.password", "password"); DataSourceMetadata dataSourceMetadata = - new DataSourceMetadata("testDS", DataSourceType.PROMETHEUS, - Collections.singletonList("prometheus_access"), properties); + new DataSourceMetadata( + "testDS", + DataSourceType.PROMETHEUS, + Collections.singletonList("prometheus_access"), + properties); when(dataSourceMetadataStorage.getDataSourceMetadata("testDS")) .thenReturn(Optional.of(dataSourceMetadata)); - DataSourceMetadata dataSourceMetadata1 - = dataSourceService.getDataSourceMetadata("testDS"); + DataSourceMetadata dataSourceMetadata1 = dataSourceService.getDataSourceMetadata("testDS"); assertEquals("testDS", dataSourceMetadata1.getName()); assertEquals(DataSourceType.PROMETHEUS, dataSourceMetadata1.getConnector()); assertFalse(dataSourceMetadata1.getProperties().containsKey("prometheus.auth.type")); @@ -345,10 +358,11 @@ void testRemovalOfAuthorizationInfo() { @Test void testGetDataSourceMetadataForNonExistingDataSource() { - when(dataSourceMetadataStorage.getDataSourceMetadata("testDS")) - .thenReturn(Optional.empty()); - IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, - () -> dataSourceService.getDataSourceMetadata("testDS")); + when(dataSourceMetadataStorage.getDataSourceMetadata("testDS")).thenReturn(Optional.empty()); + IllegalArgumentException exception = + assertThrows( + IllegalArgumentException.class, + () -> dataSourceService.getDataSourceMetadata("testDS")); assertEquals("DataSource with name: testDS doesn't exist.", exception.getMessage()); } @@ -360,16 +374,15 @@ void testGetDataSourceMetadataForSpecificDataSourceName() { properties.put("prometheus.auth.username", "username"); properties.put("prometheus.auth.password", "password"); when(dataSourceMetadataStorage.getDataSourceMetadata("testDS")) - .thenReturn(Optional.ofNullable( - metadata("testDS", DataSourceType.PROMETHEUS, Collections.emptyList(), - properties))); - DataSourceMetadata dataSourceMetadata - = this.dataSourceService.getDataSourceMetadata("testDS"); + .thenReturn( + Optional.ofNullable( + metadata( + "testDS", DataSourceType.PROMETHEUS, Collections.emptyList(), properties))); + DataSourceMetadata dataSourceMetadata = this.dataSourceService.getDataSourceMetadata("testDS"); assertTrue(dataSourceMetadata.getProperties().containsKey("prometheus.uri")); assertFalse(dataSourceMetadata.getProperties().containsKey("prometheus.auth.type")); assertFalse(dataSourceMetadata.getProperties().containsKey("prometheus.auth.username")); assertFalse(dataSourceMetadata.getProperties().containsKey("prometheus.auth.password")); verify(dataSourceMetadataStorage, times(1)).getDataSourceMetadata("testDS"); } - } diff --git a/datasources/src/test/java/org/opensearch/sql/datasources/storage/OpenSearchDataSourceMetadataStorageTest.java b/datasources/src/test/java/org/opensearch/sql/datasources/storage/OpenSearchDataSourceMetadataStorageTest.java index b58ef3ea1e..7d41737b2d 100644 --- a/datasources/src/test/java/org/opensearch/sql/datasources/storage/OpenSearchDataSourceMetadataStorageTest.java +++ b/datasources/src/test/java/org/opensearch/sql/datasources/storage/OpenSearchDataSourceMetadataStorageTest.java @@ -52,33 +52,25 @@ public class OpenSearchDataSourceMetadataStorageTest { @Mock(answer = Answers.RETURNS_DEEP_STUBS) private Client client; + @Mock(answer = Answers.RETURNS_DEEP_STUBS) private ClusterService clusterService; - @Mock - private Encryptor encryptor; + + @Mock private Encryptor encryptor; + @Mock(answer = Answers.RETURNS_DEEP_STUBS) private SearchResponse searchResponse; - @Mock - private ActionFuture searchResponseActionFuture; - @Mock - private ActionFuture createIndexResponseActionFuture; - @Mock - private ActionFuture indexResponseActionFuture; - @Mock - private IndexResponse indexResponse; - @Mock - private ActionFuture updateResponseActionFuture; - @Mock - private UpdateResponse updateResponse; - @Mock - private ActionFuture deleteResponseActionFuture; - @Mock - private DeleteResponse deleteResponse; - @Mock - private SearchHit searchHit; - @InjectMocks - private OpenSearchDataSourceMetadataStorage openSearchDataSourceMetadataStorage; + @Mock private ActionFuture searchResponseActionFuture; + @Mock private ActionFuture createIndexResponseActionFuture; + @Mock private ActionFuture indexResponseActionFuture; + @Mock private IndexResponse indexResponse; + @Mock private ActionFuture updateResponseActionFuture; + @Mock private UpdateResponse updateResponse; + @Mock private ActionFuture deleteResponseActionFuture; + @Mock private DeleteResponse deleteResponse; + @Mock private SearchHit searchHit; + @InjectMocks private OpenSearchDataSourceMetadataStorage openSearchDataSourceMetadataStorage; @SneakyThrows @Test @@ -91,28 +83,24 @@ public void testGetDataSourceMetadata() { Mockito.when(searchResponse.getHits()) .thenReturn( new SearchHits( - new SearchHit[] {searchHit}, - new TotalHits(21, TotalHits.Relation.EQUAL_TO), - 1.0F)); - Mockito.when(searchHit.getSourceAsString()) - .thenReturn(getBasicDataSourceMetadataString()); + new SearchHit[] {searchHit}, new TotalHits(21, TotalHits.Relation.EQUAL_TO), 1.0F)); + Mockito.when(searchHit.getSourceAsString()).thenReturn(getBasicDataSourceMetadataString()); Mockito.when(encryptor.decrypt("password")).thenReturn("password"); Mockito.when(encryptor.decrypt("username")).thenReturn("username"); - Optional dataSourceMetadataOptional - = openSearchDataSourceMetadataStorage.getDataSourceMetadata(TEST_DATASOURCE_INDEX_NAME); - + Optional dataSourceMetadataOptional = + openSearchDataSourceMetadataStorage.getDataSourceMetadata(TEST_DATASOURCE_INDEX_NAME); Assertions.assertFalse(dataSourceMetadataOptional.isEmpty()); DataSourceMetadata dataSourceMetadata = dataSourceMetadataOptional.get(); Assertions.assertEquals(TEST_DATASOURCE_INDEX_NAME, dataSourceMetadata.getName()); Assertions.assertEquals(DataSourceType.PROMETHEUS, dataSourceMetadata.getConnector()); - Assertions.assertEquals("password", - dataSourceMetadata.getProperties().get("prometheus.auth.password")); - Assertions.assertEquals("username", - dataSourceMetadata.getProperties().get("prometheus.auth.username")); - Assertions.assertEquals("basicauth", - dataSourceMetadata.getProperties().get("prometheus.auth.type")); + Assertions.assertEquals( + "password", dataSourceMetadata.getProperties().get("prometheus.auth.password")); + Assertions.assertEquals( + "username", dataSourceMetadata.getProperties().get("prometheus.auth.username")); + Assertions.assertEquals( + "basicauth", dataSourceMetadata.getProperties().get("prometheus.auth.type")); } @SneakyThrows @@ -124,9 +112,12 @@ public void testGetDataSourceMetadataWith404SearchResponse() { Mockito.when(searchResponseActionFuture.actionGet()).thenReturn(searchResponse); Mockito.when(searchResponse.status()).thenReturn(RestStatus.NOT_FOUND); - RuntimeException runtimeException = Assertions.assertThrows(RuntimeException.class, - () -> openSearchDataSourceMetadataStorage.getDataSourceMetadata( - TEST_DATASOURCE_INDEX_NAME)); + RuntimeException runtimeException = + Assertions.assertThrows( + RuntimeException.class, + () -> + openSearchDataSourceMetadataStorage.getDataSourceMetadata( + TEST_DATASOURCE_INDEX_NAME)); Assertions.assertEquals( "Fetching dataSource metadata information failed with status : NOT_FOUND", runtimeException.getMessage()); @@ -143,15 +134,13 @@ public void testGetDataSourceMetadataWithParsingFailed() { Mockito.when(searchResponse.getHits()) .thenReturn( new SearchHits( - new SearchHit[] {searchHit}, - new TotalHits(21, TotalHits.Relation.EQUAL_TO), - 1.0F)); - Mockito.when(searchHit.getSourceAsString()) - .thenReturn("..testDs"); + new SearchHit[] {searchHit}, new TotalHits(21, TotalHits.Relation.EQUAL_TO), 1.0F)); + Mockito.when(searchHit.getSourceAsString()).thenReturn("..testDs"); - Assertions.assertThrows(RuntimeException.class, - () -> openSearchDataSourceMetadataStorage.getDataSourceMetadata( - TEST_DATASOURCE_INDEX_NAME)); + Assertions.assertThrows( + RuntimeException.class, + () -> + openSearchDataSourceMetadataStorage.getDataSourceMetadata(TEST_DATASOURCE_INDEX_NAME)); } @SneakyThrows @@ -165,28 +154,24 @@ public void testGetDataSourceMetadataWithAWSSigV4() { Mockito.when(searchResponse.getHits()) .thenReturn( new SearchHits( - new SearchHit[] {searchHit}, - new TotalHits(21, TotalHits.Relation.EQUAL_TO), - 1.0F)); - Mockito.when(searchHit.getSourceAsString()) - .thenReturn(getAWSSigv4DataSourceMetadataString()); + new SearchHit[] {searchHit}, new TotalHits(21, TotalHits.Relation.EQUAL_TO), 1.0F)); + Mockito.when(searchHit.getSourceAsString()).thenReturn(getAWSSigv4DataSourceMetadataString()); Mockito.when(encryptor.decrypt("secret_key")).thenReturn("secret_key"); Mockito.when(encryptor.decrypt("access_key")).thenReturn("access_key"); - Optional dataSourceMetadataOptional - = openSearchDataSourceMetadataStorage.getDataSourceMetadata(TEST_DATASOURCE_INDEX_NAME); - + Optional dataSourceMetadataOptional = + openSearchDataSourceMetadataStorage.getDataSourceMetadata(TEST_DATASOURCE_INDEX_NAME); Assertions.assertFalse(dataSourceMetadataOptional.isEmpty()); DataSourceMetadata dataSourceMetadata = dataSourceMetadataOptional.get(); Assertions.assertEquals(TEST_DATASOURCE_INDEX_NAME, dataSourceMetadata.getName()); Assertions.assertEquals(DataSourceType.PROMETHEUS, dataSourceMetadata.getConnector()); - Assertions.assertEquals("secret_key", - dataSourceMetadata.getProperties().get("prometheus.auth.secret_key")); - Assertions.assertEquals("access_key", - dataSourceMetadata.getProperties().get("prometheus.auth.access_key")); - Assertions.assertEquals("awssigv4", - dataSourceMetadata.getProperties().get("prometheus.auth.type")); + Assertions.assertEquals( + "secret_key", dataSourceMetadata.getProperties().get("prometheus.auth.secret_key")); + Assertions.assertEquals( + "access_key", dataSourceMetadata.getProperties().get("prometheus.auth.access_key")); + Assertions.assertEquals( + "awssigv4", dataSourceMetadata.getProperties().get("prometheus.auth.type")); } @SneakyThrows @@ -200,31 +185,27 @@ public void testGetDataSourceMetadataWithBasicAuth() { Mockito.when(searchResponse.getHits()) .thenReturn( new SearchHits( - new SearchHit[] {searchHit}, - new TotalHits(21, TotalHits.Relation.EQUAL_TO), - 1.0F)); + new SearchHit[] {searchHit}, new TotalHits(21, TotalHits.Relation.EQUAL_TO), 1.0F)); Mockito.when(searchHit.getSourceAsString()) .thenReturn(getDataSourceMetadataStringWithBasicAuthentication()); Mockito.when(encryptor.decrypt("username")).thenReturn("username"); Mockito.when(encryptor.decrypt("password")).thenReturn("password"); - Optional dataSourceMetadataOptional - = openSearchDataSourceMetadataStorage.getDataSourceMetadata(TEST_DATASOURCE_INDEX_NAME); - + Optional dataSourceMetadataOptional = + openSearchDataSourceMetadataStorage.getDataSourceMetadata(TEST_DATASOURCE_INDEX_NAME); Assertions.assertFalse(dataSourceMetadataOptional.isEmpty()); DataSourceMetadata dataSourceMetadata = dataSourceMetadataOptional.get(); Assertions.assertEquals(TEST_DATASOURCE_INDEX_NAME, dataSourceMetadata.getName()); Assertions.assertEquals(DataSourceType.PROMETHEUS, dataSourceMetadata.getConnector()); - Assertions.assertEquals("username", - dataSourceMetadata.getProperties().get("prometheus.auth.username")); - Assertions.assertEquals("password", - dataSourceMetadata.getProperties().get("prometheus.auth.password")); - Assertions.assertEquals("basicauth", - dataSourceMetadata.getProperties().get("prometheus.auth.type")); + Assertions.assertEquals( + "username", dataSourceMetadata.getProperties().get("prometheus.auth.username")); + Assertions.assertEquals( + "password", dataSourceMetadata.getProperties().get("prometheus.auth.password")); + Assertions.assertEquals( + "basicauth", dataSourceMetadata.getProperties().get("prometheus.auth.type")); } - @SneakyThrows @Test public void testGetDataSourceMetadataList() { @@ -236,15 +217,12 @@ public void testGetDataSourceMetadataList() { Mockito.when(searchResponse.getHits()) .thenReturn( new SearchHits( - new SearchHit[] {searchHit}, - new TotalHits(21, TotalHits.Relation.EQUAL_TO), - 1.0F)); + new SearchHit[] {searchHit}, new TotalHits(21, TotalHits.Relation.EQUAL_TO), 1.0F)); Mockito.when(searchHit.getSourceAsString()) .thenReturn(getDataSourceMetadataStringWithNoAuthentication()); - List dataSourceMetadataList - = openSearchDataSourceMetadataStorage.getDataSourceMetadata(); - + List dataSourceMetadataList = + openSearchDataSourceMetadataStorage.getDataSourceMetadata(); Assertions.assertEquals(1, dataSourceMetadataList.size()); DataSourceMetadata dataSourceMetadata = dataSourceMetadataList.get(0); @@ -252,7 +230,6 @@ public void testGetDataSourceMetadataList() { Assertions.assertEquals(DataSourceType.PROMETHEUS, dataSourceMetadata.getConnector()); } - @SneakyThrows @Test public void testGetDataSourceMetadataListWithNoIndex() { @@ -264,8 +241,8 @@ public void testGetDataSourceMetadataListWithNoIndex() { .thenReturn(new CreateIndexResponse(true, true, DATASOURCE_INDEX_NAME)); Mockito.when(client.index(ArgumentMatchers.any())).thenReturn(indexResponseActionFuture); - List dataSourceMetadataList - = openSearchDataSourceMetadataStorage.getDataSourceMetadata(); + List dataSourceMetadataList = + openSearchDataSourceMetadataStorage.getDataSourceMetadata(); Assertions.assertEquals(0, dataSourceMetadataList.size()); } @@ -281,8 +258,8 @@ public void testGetDataSourceMetadataWithNoIndex() { .thenReturn(new CreateIndexResponse(true, true, DATASOURCE_INDEX_NAME)); Mockito.when(client.index(ArgumentMatchers.any())).thenReturn(indexResponseActionFuture); - Optional dataSourceMetadataOptional - = openSearchDataSourceMetadataStorage.getDataSourceMetadata(TEST_DATASOURCE_INDEX_NAME); + Optional dataSourceMetadataOptional = + openSearchDataSourceMetadataStorage.getDataSourceMetadata(TEST_DATASOURCE_INDEX_NAME); Assertions.assertFalse(dataSourceMetadataOptional.isPresent()); } @@ -310,8 +287,6 @@ public void testCreateDataSourceMetadata() { Mockito.verify(client.admin().indices(), Mockito.times(1)).create(ArgumentMatchers.any()); Mockito.verify(client, Mockito.times(1)).index(ArgumentMatchers.any()); Mockito.verify(client.threadPool().getThreadContext(), Mockito.times(2)).stashContext(); - - } @Test @@ -334,7 +309,6 @@ public void testCreateDataSourceMetadataWithOutCreatingIndex() { Mockito.verify(client.threadPool().getThreadContext(), Mockito.times(1)).stashContext(); } - @Test public void testCreateDataSourceMetadataFailedWithNotFoundResponse() { @@ -351,10 +325,14 @@ public void testCreateDataSourceMetadataFailedWithNotFoundResponse() { Mockito.when(indexResponse.getResult()).thenReturn(DocWriteResponse.Result.NOT_FOUND); DataSourceMetadata dataSourceMetadata = getDataSourceMetadata(); - RuntimeException runtimeException = Assertions.assertThrows(RuntimeException.class, - () -> this.openSearchDataSourceMetadataStorage.createDataSourceMetadata( - dataSourceMetadata)); - Assertions.assertEquals("Saving dataSource metadata information failed with result : not_found", + RuntimeException runtimeException = + Assertions.assertThrows( + RuntimeException.class, + () -> + this.openSearchDataSourceMetadataStorage.createDataSourceMetadata( + dataSourceMetadata)); + Assertions.assertEquals( + "Saving dataSource metadata information failed with result : not_found", runtimeException.getMessage()); Mockito.verify(encryptor, Mockito.times(1)).encrypt("secret_key"); @@ -362,8 +340,6 @@ public void testCreateDataSourceMetadataFailedWithNotFoundResponse() { Mockito.verify(client.admin().indices(), Mockito.times(1)).create(ArgumentMatchers.any()); Mockito.verify(client, Mockito.times(1)).index(ArgumentMatchers.any()); Mockito.verify(client.threadPool().getThreadContext(), Mockito.times(2)).stashContext(); - - } @Test @@ -381,20 +357,19 @@ public void testCreateDataSourceMetadataWithVersionConflict() { .thenThrow(VersionConflictEngineException.class); DataSourceMetadata dataSourceMetadata = getDataSourceMetadata(); IllegalArgumentException illegalArgumentException = - Assertions.assertThrows(IllegalArgumentException.class, - () -> this.openSearchDataSourceMetadataStorage.createDataSourceMetadata( - dataSourceMetadata)); - Assertions.assertEquals("A datasource already exists with name: testDS", - illegalArgumentException.getMessage()); - + Assertions.assertThrows( + IllegalArgumentException.class, + () -> + this.openSearchDataSourceMetadataStorage.createDataSourceMetadata( + dataSourceMetadata)); + Assertions.assertEquals( + "A datasource already exists with name: testDS", illegalArgumentException.getMessage()); Mockito.verify(encryptor, Mockito.times(1)).encrypt("secret_key"); Mockito.verify(encryptor, Mockito.times(1)).encrypt("access_key"); Mockito.verify(client.admin().indices(), Mockito.times(1)).create(ArgumentMatchers.any()); Mockito.verify(client, Mockito.times(1)).index(ArgumentMatchers.any()); Mockito.verify(client.threadPool().getThreadContext(), Mockito.times(2)).stashContext(); - - } @Test @@ -412,19 +387,20 @@ public void testCreateDataSourceMetadataWithException() { .thenThrow(new RuntimeException("error while indexing")); DataSourceMetadata dataSourceMetadata = getDataSourceMetadata(); - RuntimeException runtimeException = Assertions.assertThrows(RuntimeException.class, - () -> this.openSearchDataSourceMetadataStorage.createDataSourceMetadata( - dataSourceMetadata)); - Assertions.assertEquals("java.lang.RuntimeException: error while indexing", - runtimeException.getMessage()); + RuntimeException runtimeException = + Assertions.assertThrows( + RuntimeException.class, + () -> + this.openSearchDataSourceMetadataStorage.createDataSourceMetadata( + dataSourceMetadata)); + Assertions.assertEquals( + "java.lang.RuntimeException: error while indexing", runtimeException.getMessage()); Mockito.verify(encryptor, Mockito.times(1)).encrypt("secret_key"); Mockito.verify(encryptor, Mockito.times(1)).encrypt("access_key"); Mockito.verify(client.admin().indices(), Mockito.times(1)).create(ArgumentMatchers.any()); Mockito.verify(client, Mockito.times(1)).index(ArgumentMatchers.any()); Mockito.verify(client.threadPool().getThreadContext(), Mockito.times(2)).stashContext(); - - } @Test @@ -440,9 +416,12 @@ public void testCreateDataSourceMetadataWithIndexCreationFailed() { .thenReturn(new CreateIndexResponse(false, false, DATASOURCE_INDEX_NAME)); DataSourceMetadata dataSourceMetadata = getDataSourceMetadata(); - RuntimeException runtimeException = Assertions.assertThrows(RuntimeException.class, - () -> this.openSearchDataSourceMetadataStorage.createDataSourceMetadata( - dataSourceMetadata)); + RuntimeException runtimeException = + Assertions.assertThrows( + RuntimeException.class, + () -> + this.openSearchDataSourceMetadataStorage.createDataSourceMetadata( + dataSourceMetadata)); Assertions.assertEquals( "Internal server error while creating.ql-datasources index:: " + "Index creation is not acknowledged.", @@ -470,7 +449,6 @@ public void testUpdateDataSourceMetadata() { Mockito.verify(client.admin().indices(), Mockito.times(0)).create(ArgumentMatchers.any()); Mockito.verify(client, Mockito.times(1)).update(ArgumentMatchers.any()); Mockito.verify(client.threadPool().getThreadContext(), Mockito.times(1)).stashContext(); - } @Test @@ -500,10 +478,14 @@ public void testUpdateDataSourceMetadataWithNotFoundResult() { Mockito.when(updateResponse.getResult()).thenReturn(DocWriteResponse.Result.NOT_FOUND); DataSourceMetadata dataSourceMetadata = getDataSourceMetadata(); - RuntimeException runtimeException = Assertions.assertThrows(RuntimeException.class, - () -> this.openSearchDataSourceMetadataStorage.updateDataSourceMetadata( - dataSourceMetadata)); - Assertions.assertEquals("Saving dataSource metadata information failed with result : not_found", + RuntimeException runtimeException = + Assertions.assertThrows( + RuntimeException.class, + () -> + this.openSearchDataSourceMetadataStorage.updateDataSourceMetadata( + dataSourceMetadata)); + Assertions.assertEquals( + "Saving dataSource metadata information failed with result : not_found", runtimeException.getMessage()); Mockito.verify(encryptor, Mockito.times(1)).encrypt("secret_key"); @@ -511,32 +493,31 @@ public void testUpdateDataSourceMetadataWithNotFoundResult() { Mockito.verify(client.admin().indices(), Mockito.times(0)).create(ArgumentMatchers.any()); Mockito.verify(client, Mockito.times(1)).update(ArgumentMatchers.any()); Mockito.verify(client.threadPool().getThreadContext(), Mockito.times(1)).stashContext(); - } @Test public void testUpdateDataSourceMetadataWithDocumentMissingException() { Mockito.when(encryptor.encrypt("secret_key")).thenReturn("secret_key"); Mockito.when(encryptor.encrypt("access_key")).thenReturn("access_key"); - Mockito.when(client.update(ArgumentMatchers.any())).thenThrow(new DocumentMissingException( - ShardId.fromString("[2][2]"), "testDS")); + Mockito.when(client.update(ArgumentMatchers.any())) + .thenThrow(new DocumentMissingException(ShardId.fromString("[2][2]"), "testDS")); DataSourceMetadata dataSourceMetadata = getDataSourceMetadata(); dataSourceMetadata.setName("testDS"); - DataSourceNotFoundException dataSourceNotFoundException = - Assertions.assertThrows(DataSourceNotFoundException.class, - () -> this.openSearchDataSourceMetadataStorage.updateDataSourceMetadata( - dataSourceMetadata)); - Assertions.assertEquals("Datasource with name: testDS doesn't exist", - dataSourceNotFoundException.getMessage()); + Assertions.assertThrows( + DataSourceNotFoundException.class, + () -> + this.openSearchDataSourceMetadataStorage.updateDataSourceMetadata( + dataSourceMetadata)); + Assertions.assertEquals( + "Datasource with name: testDS doesn't exist", dataSourceNotFoundException.getMessage()); Mockito.verify(encryptor, Mockito.times(1)).encrypt("secret_key"); Mockito.verify(encryptor, Mockito.times(1)).encrypt("access_key"); Mockito.verify(client.admin().indices(), Mockito.times(0)).create(ArgumentMatchers.any()); Mockito.verify(client, Mockito.times(1)).update(ArgumentMatchers.any()); Mockito.verify(client.threadPool().getThreadContext(), Mockito.times(1)).stashContext(); - } @Test @@ -548,19 +529,20 @@ public void testUpdateDataSourceMetadataWithRuntimeException() { DataSourceMetadata dataSourceMetadata = getDataSourceMetadata(); dataSourceMetadata.setName("testDS"); - - RuntimeException runtimeException = Assertions.assertThrows(RuntimeException.class, - () -> this.openSearchDataSourceMetadataStorage.updateDataSourceMetadata( - dataSourceMetadata)); - Assertions.assertEquals("java.lang.RuntimeException: error message", - runtimeException.getMessage()); + RuntimeException runtimeException = + Assertions.assertThrows( + RuntimeException.class, + () -> + this.openSearchDataSourceMetadataStorage.updateDataSourceMetadata( + dataSourceMetadata)); + Assertions.assertEquals( + "java.lang.RuntimeException: error message", runtimeException.getMessage()); Mockito.verify(encryptor, Mockito.times(1)).encrypt("secret_key"); Mockito.verify(encryptor, Mockito.times(1)).encrypt("access_key"); Mockito.verify(client.admin().indices(), Mockito.times(0)).create(ArgumentMatchers.any()); Mockito.verify(client, Mockito.times(1)).update(ArgumentMatchers.any()); Mockito.verify(client.threadPool().getThreadContext(), Mockito.times(1)).stashContext(); - } @Test @@ -584,11 +566,11 @@ public void testDeleteDataSourceMetadataWhichisAlreadyDeleted() { Mockito.when(deleteResponse.getResult()).thenReturn(DocWriteResponse.Result.NOT_FOUND); DataSourceNotFoundException dataSourceNotFoundException = - Assertions.assertThrows(DataSourceNotFoundException.class, + Assertions.assertThrows( + DataSourceNotFoundException.class, () -> this.openSearchDataSourceMetadataStorage.deleteDataSourceMetadata("testDS")); - Assertions.assertEquals("Datasource with name: testDS doesn't exist", - dataSourceNotFoundException.getMessage()); - + Assertions.assertEquals( + "Datasource with name: testDS doesn't exist", dataSourceNotFoundException.getMessage()); Mockito.verifyNoInteractions(encryptor); Mockito.verify(client.admin().indices(), Mockito.times(0)).create(ArgumentMatchers.any()); @@ -602,9 +584,12 @@ public void testDeleteDataSourceMetadataWithUnexpectedResult() { Mockito.when(deleteResponseActionFuture.actionGet()).thenReturn(deleteResponse); Mockito.when(deleteResponse.getResult()).thenReturn(DocWriteResponse.Result.NOOP); - RuntimeException runtimeException = Assertions.assertThrows(RuntimeException.class, - () -> this.openSearchDataSourceMetadataStorage.deleteDataSourceMetadata("testDS")); - Assertions.assertEquals("Deleting dataSource metadata information failed with result : noop", + RuntimeException runtimeException = + Assertions.assertThrows( + RuntimeException.class, + () -> this.openSearchDataSourceMetadataStorage.deleteDataSourceMetadata("testDS")); + Assertions.assertEquals( + "Deleting dataSource metadata information failed with result : noop", runtimeException.getMessage()); Mockito.verifyNoInteractions(encryptor); @@ -684,5 +669,4 @@ private DataSourceMetadata getDataSourceMetadata() { dataSourceMetadata.setProperties(properties); return dataSourceMetadata; } - } diff --git a/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportCreateDataSourceActionTest.java b/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportCreateDataSourceActionTest.java index ccae5de2a9..f1a3a2875e 100644 --- a/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportCreateDataSourceActionTest.java +++ b/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportCreateDataSourceActionTest.java @@ -27,27 +27,23 @@ @ExtendWith(MockitoExtension.class) public class TransportCreateDataSourceActionTest { - @Mock - private TransportService transportService; - @Mock - private TransportCreateDataSourceAction action; - @Mock - private DataSourceServiceImpl dataSourceService; - @Mock - private Task task; - @Mock - private ActionListener actionListener; + @Mock private TransportService transportService; + @Mock private TransportCreateDataSourceAction action; + @Mock private DataSourceServiceImpl dataSourceService; + @Mock private Task task; + @Mock private ActionListener actionListener; + @Captor private ArgumentCaptor createDataSourceActionResponseArgumentCaptor; - @Captor - private ArgumentCaptor exceptionArgumentCaptor; + @Captor private ArgumentCaptor exceptionArgumentCaptor; @BeforeEach public void setUp() { - action = new TransportCreateDataSourceAction(transportService, - new ActionFilters(new HashSet<>()), dataSourceService); + action = + new TransportCreateDataSourceAction( + transportService, new ActionFilters(new HashSet<>()), dataSourceService); } @Test @@ -61,10 +57,10 @@ public void testDoExecute() { verify(dataSourceService, times(1)).createDataSource(dataSourceMetadata); Mockito.verify(actionListener) .onResponse(createDataSourceActionResponseArgumentCaptor.capture()); - CreateDataSourceActionResponse createDataSourceActionResponse - = createDataSourceActionResponseArgumentCaptor.getValue(); - Assertions.assertEquals("Created DataSource with name test_datasource", - createDataSourceActionResponse.getResult()); + CreateDataSourceActionResponse createDataSourceActionResponse = + createDataSourceActionResponseArgumentCaptor.getValue(); + Assertions.assertEquals( + "Created DataSource with name test_datasource", createDataSourceActionResponse.getResult()); } @Test @@ -72,7 +68,8 @@ public void testDoExecuteWithException() { DataSourceMetadata dataSourceMetadata = new DataSourceMetadata(); dataSourceMetadata.setName("test_datasource"); dataSourceMetadata.setConnector(DataSourceType.PROMETHEUS); - doThrow(new RuntimeException("Error")).when(dataSourceService) + doThrow(new RuntimeException("Error")) + .when(dataSourceService) .createDataSource(dataSourceMetadata); CreateDataSourceActionRequest request = new CreateDataSourceActionRequest(dataSourceMetadata); action.doExecute(task, request, actionListener); @@ -80,7 +77,6 @@ public void testDoExecuteWithException() { Mockito.verify(actionListener).onFailure(exceptionArgumentCaptor.capture()); Exception exception = exceptionArgumentCaptor.getValue(); Assertions.assertTrue(exception instanceof RuntimeException); - Assertions.assertEquals("Error", - exception.getMessage()); + Assertions.assertEquals("Error", exception.getMessage()); } } diff --git a/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportDeleteDataSourceActionTest.java b/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportDeleteDataSourceActionTest.java index e97e7d1a65..ea581de20c 100644 --- a/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportDeleteDataSourceActionTest.java +++ b/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportDeleteDataSourceActionTest.java @@ -25,28 +25,23 @@ @ExtendWith(MockitoExtension.class) public class TransportDeleteDataSourceActionTest { - @Mock - private TransportService transportService; - @Mock - private TransportDeleteDataSourceAction action; - @Mock - private DataSourceServiceImpl dataSourceService; - @Mock - private Task task; - @Mock - private ActionListener actionListener; + @Mock private TransportService transportService; + @Mock private TransportDeleteDataSourceAction action; + @Mock private DataSourceServiceImpl dataSourceService; + @Mock private Task task; + @Mock private ActionListener actionListener; @Captor private ArgumentCaptor deleteDataSourceActionResponseArgumentCaptor; - @Captor - private ArgumentCaptor exceptionArgumentCaptor; + @Captor private ArgumentCaptor exceptionArgumentCaptor; @BeforeEach public void setUp() { - action = new TransportDeleteDataSourceAction(transportService, - new ActionFilters(new HashSet<>()), dataSourceService); + action = + new TransportDeleteDataSourceAction( + transportService, new ActionFilters(new HashSet<>()), dataSourceService); } @Test @@ -57,10 +52,10 @@ public void testDoExecute() { verify(dataSourceService, times(1)).deleteDataSource("test_datasource"); Mockito.verify(actionListener) .onResponse(deleteDataSourceActionResponseArgumentCaptor.capture()); - DeleteDataSourceActionResponse deleteDataSourceActionResponse - = deleteDataSourceActionResponseArgumentCaptor.getValue(); - Assertions.assertEquals("Deleted DataSource with name test_datasource", - deleteDataSourceActionResponse.getResult()); + DeleteDataSourceActionResponse deleteDataSourceActionResponse = + deleteDataSourceActionResponseArgumentCaptor.getValue(); + Assertions.assertEquals( + "Deleted DataSource with name test_datasource", deleteDataSourceActionResponse.getResult()); } @Test @@ -72,7 +67,6 @@ public void testDoExecuteWithException() { Mockito.verify(actionListener).onFailure(exceptionArgumentCaptor.capture()); Exception exception = exceptionArgumentCaptor.getValue(); Assertions.assertTrue(exception instanceof RuntimeException); - Assertions.assertEquals("Error", - exception.getMessage()); + Assertions.assertEquals("Error", exception.getMessage()); } } diff --git a/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportGetDataSourceActionTest.java b/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportGetDataSourceActionTest.java index fc4439470e..4f04afd667 100644 --- a/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportGetDataSourceActionTest.java +++ b/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportGetDataSourceActionTest.java @@ -34,27 +34,22 @@ @ExtendWith(MockitoExtension.class) public class TransportGetDataSourceActionTest { - @Mock - private TransportService transportService; - @Mock - private TransportGetDataSourceAction action; - @Mock - private DataSourceServiceImpl dataSourceService; - @Mock - private Task task; - @Mock - private ActionListener actionListener; + @Mock private TransportService transportService; + @Mock private TransportGetDataSourceAction action; + @Mock private DataSourceServiceImpl dataSourceService; + @Mock private Task task; + @Mock private ActionListener actionListener; @Captor private ArgumentCaptor getDataSourceActionResponseArgumentCaptor; - @Captor - private ArgumentCaptor exceptionArgumentCaptor; + @Captor private ArgumentCaptor exceptionArgumentCaptor; @BeforeEach public void setUp() { - action = new TransportGetDataSourceAction(transportService, - new ActionFilters(new HashSet<>()), dataSourceService); + action = + new TransportGetDataSourceAction( + transportService, new ActionFilters(new HashSet<>()), dataSourceService); } @Test @@ -63,23 +58,22 @@ public void testDoExecute() { dataSourceMetadata.setName("test_datasource"); dataSourceMetadata.setConnector(DataSourceType.PROMETHEUS); GetDataSourceActionRequest request = new GetDataSourceActionRequest("test_datasource"); - when(dataSourceService.getDataSourceMetadata("test_datasource")) - .thenReturn(dataSourceMetadata); + when(dataSourceService.getDataSourceMetadata("test_datasource")).thenReturn(dataSourceMetadata); action.doExecute(task, request, actionListener); verify(dataSourceService, times(1)).getDataSourceMetadata("test_datasource"); Mockito.verify(actionListener).onResponse(getDataSourceActionResponseArgumentCaptor.capture()); - GetDataSourceActionResponse getDataSourceActionResponse - = getDataSourceActionResponseArgumentCaptor.getValue(); + GetDataSourceActionResponse getDataSourceActionResponse = + getDataSourceActionResponseArgumentCaptor.getValue(); JsonResponseFormatter dataSourceMetadataJsonResponseFormatter = - new JsonResponseFormatter<>( - JsonResponseFormatter.Style.PRETTY) { + new JsonResponseFormatter<>(JsonResponseFormatter.Style.PRETTY) { @Override protected Object buildJsonObject(DataSourceMetadata response) { return response; } }; - Assertions.assertEquals(dataSourceMetadataJsonResponseFormatter.format(dataSourceMetadata), + Assertions.assertEquals( + dataSourceMetadataJsonResponseFormatter.format(dataSourceMetadata), getDataSourceActionResponse.getResult()); DataSourceMetadata result = new Gson().fromJson(getDataSourceActionResponse.getResult(), DataSourceMetadata.class); @@ -100,18 +94,16 @@ public void testDoExecuteForGetAllDataSources() { action.doExecute(task, request, actionListener); verify(dataSourceService, times(1)).getDataSourceMetadata(false); Mockito.verify(actionListener).onResponse(getDataSourceActionResponseArgumentCaptor.capture()); - GetDataSourceActionResponse getDataSourceActionResponse - = getDataSourceActionResponseArgumentCaptor.getValue(); + GetDataSourceActionResponse getDataSourceActionResponse = + getDataSourceActionResponseArgumentCaptor.getValue(); JsonResponseFormatter> dataSourceMetadataJsonResponseFormatter = - new JsonResponseFormatter<>( - JsonResponseFormatter.Style.PRETTY) { + new JsonResponseFormatter<>(JsonResponseFormatter.Style.PRETTY) { @Override protected Object buildJsonObject(Set response) { return response; } }; - Type setType = new TypeToken>() { - }.getType(); + Type setType = new TypeToken>() {}.getType(); Assertions.assertEquals( dataSourceMetadataJsonResponseFormatter.format(Collections.singleton(dataSourceMetadata)), getDataSourceActionResponse.getResult()); @@ -131,7 +123,6 @@ public void testDoExecuteWithException() { Mockito.verify(actionListener).onFailure(exceptionArgumentCaptor.capture()); Exception exception = exceptionArgumentCaptor.getValue(); Assertions.assertTrue(exception instanceof RuntimeException); - Assertions.assertEquals("Error", - exception.getMessage()); + Assertions.assertEquals("Error", exception.getMessage()); } } diff --git a/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportUpdateDataSourceActionTest.java b/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportUpdateDataSourceActionTest.java index 4b5a6e0f57..998a1aa7b2 100644 --- a/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportUpdateDataSourceActionTest.java +++ b/datasources/src/test/java/org/opensearch/sql/datasources/transport/TransportUpdateDataSourceActionTest.java @@ -27,28 +27,23 @@ @ExtendWith(MockitoExtension.class) public class TransportUpdateDataSourceActionTest { - @Mock - private TransportService transportService; - @Mock - private TransportUpdateDataSourceAction action; - @Mock - private DataSourceServiceImpl dataSourceService; - @Mock - private Task task; - @Mock - private ActionListener actionListener; + @Mock private TransportService transportService; + @Mock private TransportUpdateDataSourceAction action; + @Mock private DataSourceServiceImpl dataSourceService; + @Mock private Task task; + @Mock private ActionListener actionListener; @Captor private ArgumentCaptor updateDataSourceActionResponseArgumentCaptor; - @Captor - private ArgumentCaptor exceptionArgumentCaptor; + @Captor private ArgumentCaptor exceptionArgumentCaptor; @BeforeEach public void setUp() { - action = new TransportUpdateDataSourceAction(transportService, - new ActionFilters(new HashSet<>()), dataSourceService); + action = + new TransportUpdateDataSourceAction( + transportService, new ActionFilters(new HashSet<>()), dataSourceService); } @Test @@ -62,10 +57,10 @@ public void testDoExecute() { verify(dataSourceService, times(1)).updateDataSource(dataSourceMetadata); Mockito.verify(actionListener) .onResponse(updateDataSourceActionResponseArgumentCaptor.capture()); - UpdateDataSourceActionResponse updateDataSourceActionResponse - = updateDataSourceActionResponseArgumentCaptor.getValue(); - Assertions.assertEquals("Updated DataSource with name test_datasource", - updateDataSourceActionResponse.getResult()); + UpdateDataSourceActionResponse updateDataSourceActionResponse = + updateDataSourceActionResponseArgumentCaptor.getValue(); + Assertions.assertEquals( + "Updated DataSource with name test_datasource", updateDataSourceActionResponse.getResult()); } @Test @@ -73,7 +68,8 @@ public void testDoExecuteWithException() { DataSourceMetadata dataSourceMetadata = new DataSourceMetadata(); dataSourceMetadata.setName("test_datasource"); dataSourceMetadata.setConnector(DataSourceType.PROMETHEUS); - doThrow(new RuntimeException("Error")).when(dataSourceService) + doThrow(new RuntimeException("Error")) + .when(dataSourceService) .updateDataSource(dataSourceMetadata); UpdateDataSourceActionRequest request = new UpdateDataSourceActionRequest(dataSourceMetadata); action.doExecute(task, request, actionListener); @@ -81,7 +77,6 @@ public void testDoExecuteWithException() { Mockito.verify(actionListener).onFailure(exceptionArgumentCaptor.capture()); Exception exception = exceptionArgumentCaptor.getValue(); Assertions.assertTrue(exception instanceof RuntimeException); - Assertions.assertEquals("Error", - exception.getMessage()); + Assertions.assertEquals("Error", exception.getMessage()); } } diff --git a/datasources/src/test/java/org/opensearch/sql/datasources/utils/SchedulerTest.java b/datasources/src/test/java/org/opensearch/sql/datasources/utils/SchedulerTest.java index e3dac306cd..ff23cdcabb 100644 --- a/datasources/src/test/java/org/opensearch/sql/datasources/utils/SchedulerTest.java +++ b/datasources/src/test/java/org/opensearch/sql/datasources/utils/SchedulerTest.java @@ -19,27 +19,24 @@ @ExtendWith(MockitoExtension.class) public class SchedulerTest { - @Mock - private NodeClient nodeClient; + @Mock private NodeClient nodeClient; - @Mock - private ThreadPool threadPool; + @Mock private ThreadPool threadPool; @Test public void testSchedule() { Mockito.when(nodeClient.threadPool()).thenReturn(threadPool); Mockito.doAnswer( - invocation -> { - Runnable task = invocation.getArgument(0); - task.run(); - return null; - }) + invocation -> { + Runnable task = invocation.getArgument(0); + task.run(); + return null; + }) .when(threadPool) .schedule(ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any()); AtomicBoolean isRun = new AtomicBoolean(false); Scheduler.schedule(nodeClient, () -> isRun.set(true)); Assert.assertTrue(isRun.get()); } - } diff --git a/datasources/src/test/java/org/opensearch/sql/datasources/utils/XContentParserUtilsTest.java b/datasources/src/test/java/org/opensearch/sql/datasources/utils/XContentParserUtilsTest.java index f47d0503e7..c0c05c0282 100644 --- a/datasources/src/test/java/org/opensearch/sql/datasources/utils/XContentParserUtilsTest.java +++ b/datasources/src/test/java/org/opensearch/sql/datasources/utils/XContentParserUtilsTest.java @@ -30,7 +30,8 @@ public void testConvertToXContent() { XContentBuilder contentBuilder = XContentParserUtils.convertToXContent(dataSourceMetadata); String contentString = BytesReference.bytes(contentBuilder).utf8ToString(); - Assertions.assertEquals("{\"name\":\"testDS\",\"connector\":\"PROMETHEUS\",\"allowedRoles\":[\"prometheus_access\"],\"properties\":{\"prometheus.uri\":\"https://localhost:9090\"}}", + Assertions.assertEquals( + "{\"name\":\"testDS\",\"connector\":\"PROMETHEUS\",\"allowedRoles\":[\"prometheus_access\"],\"properties\":{\"prometheus.uri\":\"https://localhost:9090\"}}", contentString); } @@ -49,7 +50,6 @@ public void testToDataSourceMetadataFromJson() { Assertions.assertEquals(retrievedMetadata, dataSourceMetadata); Assertions.assertEquals("prometheus_access", retrievedMetadata.getAllowedRoles().get(0)); - } @SneakyThrows @@ -62,9 +62,12 @@ public void testToDataSourceMetadataFromJsonWithoutName() { Gson gson = new Gson(); String json = gson.toJson(dataSourceMetadata); - IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, () -> { - XContentParserUtils.toDataSourceMetadata(json); - }); + IllegalArgumentException exception = + assertThrows( + IllegalArgumentException.class, + () -> { + XContentParserUtils.toDataSourceMetadata(json); + }); Assertions.assertEquals("name and connector are required fields.", exception.getMessage()); } @@ -78,9 +81,12 @@ public void testToDataSourceMetadataFromJsonWithoutConnector() { Gson gson = new Gson(); String json = gson.toJson(dataSourceMetadata); - IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, () -> { - XContentParserUtils.toDataSourceMetadata(json); - }); + IllegalArgumentException exception = + assertThrows( + IllegalArgumentException.class, + () -> { + XContentParserUtils.toDataSourceMetadata(json); + }); Assertions.assertEquals("name and connector are required fields.", exception.getMessage()); } @@ -92,10 +98,12 @@ public void testToDataSourceMetadataFromJsonUsingUnknownObject() { Gson gson = new Gson(); String json = gson.toJson(hashMap); - IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, () -> { - XContentParserUtils.toDataSourceMetadata(json); - }); + IllegalArgumentException exception = + assertThrows( + IllegalArgumentException.class, + () -> { + XContentParserUtils.toDataSourceMetadata(json); + }); Assertions.assertEquals("Unknown field: test", exception.getMessage()); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/bwc/SQLBackwardsCompatibilityIT.java b/integ-test/src/test/java/org/opensearch/sql/bwc/SQLBackwardsCompatibilityIT.java index c32a3336c0..dff9aa262e 100644 --- a/integ-test/src/test/java/org/opensearch/sql/bwc/SQLBackwardsCompatibilityIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/bwc/SQLBackwardsCompatibilityIT.java @@ -3,27 +3,8 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.bwc; - -import org.json.JSONObject; -import org.junit.Assert; -import org.opensearch.client.Request; -import org.opensearch.client.RequestOptions; -import org.opensearch.client.Response; -import org.opensearch.common.settings.Settings; -import org.opensearch.sql.legacy.SQLIntegTestCase; -import org.opensearch.sql.legacy.TestsConstants; -import org.opensearch.test.rest.OpenSearchRestTestCase; - -import java.io.IOException; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.Set; -import java.util.stream.Collectors; - import static org.opensearch.sql.legacy.TestUtils.createIndexByRestClient; import static org.opensearch.sql.legacy.TestUtils.isIndexExist; import static org.opensearch.sql.legacy.TestUtils.loadDataByRestClient; @@ -36,179 +17,207 @@ import static org.opensearch.sql.util.MatcherUtils.verifySchema; import static org.opensearch.sql.util.TestUtils.getResponseBody; -public class SQLBackwardsCompatibilityIT extends SQLIntegTestCase { - - private static final ClusterType CLUSTER_TYPE = ClusterType.parse(System.getProperty("tests.rest.bwcsuite")); - private static final String CLUSTER_NAME = System.getProperty("tests.clustername"); - - @Override - protected final boolean preserveIndicesUponCompletion() { - return true; - } - - @Override - protected final boolean preserveReposUponCompletion() { - return true; - } - - @Override - protected boolean preserveTemplatesUponCompletion() { - return true; - } +import java.io.IOException; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import org.json.JSONObject; +import org.junit.Assert; +import org.opensearch.client.Request; +import org.opensearch.client.RequestOptions; +import org.opensearch.client.Response; +import org.opensearch.common.settings.Settings; +import org.opensearch.sql.legacy.SQLIntegTestCase; +import org.opensearch.sql.legacy.TestsConstants; +import org.opensearch.test.rest.OpenSearchRestTestCase; - @Override - protected final Settings restClientSettings() { - return Settings - .builder() - .put(super.restClientSettings()) - // increase the timeout here to 90 seconds to handle long waits for a green - // cluster health. the waits for green need to be longer than a minute to - // account for delayed shards - .put(OpenSearchRestTestCase.CLIENT_SOCKET_TIMEOUT, "90s") - .build(); - } +public class SQLBackwardsCompatibilityIT extends SQLIntegTestCase { - private enum ClusterType { - OLD, - MIXED, - UPGRADED; - - public static ClusterType parse(String value) { - switch (value) { - case "old_cluster": - return OLD; - case "mixed_cluster": - return MIXED; - case "upgraded_cluster": - return UPGRADED; - default: - throw new AssertionError("unknown cluster type: " + value); - } - } + private static final ClusterType CLUSTER_TYPE = + ClusterType.parse(System.getProperty("tests.rest.bwcsuite")); + private static final String CLUSTER_NAME = System.getProperty("tests.clustername"); + + @Override + protected final boolean preserveIndicesUponCompletion() { + return true; + } + + @Override + protected final boolean preserveReposUponCompletion() { + return true; + } + + @Override + protected boolean preserveTemplatesUponCompletion() { + return true; + } + + @Override + protected final Settings restClientSettings() { + return Settings.builder() + .put(super.restClientSettings()) + // increase the timeout here to 90 seconds to handle long waits for a green + // cluster health. the waits for green need to be longer than a minute to + // account for delayed shards + .put(OpenSearchRestTestCase.CLIENT_SOCKET_TIMEOUT, "90s") + .build(); + } + + private enum ClusterType { + OLD, + MIXED, + UPGRADED; + + public static ClusterType parse(String value) { + switch (value) { + case "old_cluster": + return OLD; + case "mixed_cluster": + return MIXED; + case "upgraded_cluster": + return UPGRADED; + default: + throw new AssertionError("unknown cluster type: " + value); + } } - - @SuppressWarnings("unchecked") - public void testBackwardsCompatibility() throws Exception { - String uri = getUri(); - Map> responseMap = (Map>) getAsMap(uri).get("nodes"); - for (Map response : responseMap.values()) { - List> plugins = (List>) response.get("plugins"); - Set pluginNames = plugins.stream().map(map -> map.get("name")).collect(Collectors.toSet()); - switch (CLUSTER_TYPE) { - case OLD: - Assert.assertTrue(pluginNames.contains("opensearch-sql")); - updateLegacySQLSettings(); - loadIndex(Index.ACCOUNT); - verifySQLQueries(LEGACY_QUERY_API_ENDPOINT); - break; - case MIXED: - Assert.assertTrue(pluginNames.contains("opensearch-sql")); - verifySQLSettings(); - verifySQLQueries(LEGACY_QUERY_API_ENDPOINT); - break; - case UPGRADED: - Assert.assertTrue(pluginNames.contains("opensearch-sql")); - verifySQLSettings(); - verifySQLQueries(QUERY_API_ENDPOINT); - break; - } - break; - } + } + + @SuppressWarnings("unchecked") + public void testBackwardsCompatibility() throws Exception { + String uri = getUri(); + Map> responseMap = + (Map>) getAsMap(uri).get("nodes"); + for (Map response : responseMap.values()) { + List> plugins = (List>) response.get("plugins"); + Set pluginNames = + plugins.stream().map(map -> map.get("name")).collect(Collectors.toSet()); + switch (CLUSTER_TYPE) { + case OLD: + Assert.assertTrue(pluginNames.contains("opensearch-sql")); + updateLegacySQLSettings(); + loadIndex(Index.ACCOUNT); + verifySQLQueries(LEGACY_QUERY_API_ENDPOINT); + break; + case MIXED: + Assert.assertTrue(pluginNames.contains("opensearch-sql")); + verifySQLSettings(); + verifySQLQueries(LEGACY_QUERY_API_ENDPOINT); + break; + case UPGRADED: + Assert.assertTrue(pluginNames.contains("opensearch-sql")); + verifySQLSettings(); + verifySQLQueries(QUERY_API_ENDPOINT); + break; + } + break; } - - private String getUri() { - switch (CLUSTER_TYPE) { - case OLD: - return "_nodes/" + CLUSTER_NAME + "-0/plugins"; - case MIXED: - String round = System.getProperty("tests.rest.bwcsuite_round"); - if (round.equals("second")) { - return "_nodes/" + CLUSTER_NAME + "-1/plugins"; - } else if (round.equals("third")) { - return "_nodes/" + CLUSTER_NAME + "-2/plugins"; - } else { - return "_nodes/" + CLUSTER_NAME + "-0/plugins"; - } - case UPGRADED: - return "_nodes/plugins"; - default: - throw new AssertionError("unknown cluster type: " + CLUSTER_TYPE); + } + + private String getUri() { + switch (CLUSTER_TYPE) { + case OLD: + return "_nodes/" + CLUSTER_NAME + "-0/plugins"; + case MIXED: + String round = System.getProperty("tests.rest.bwcsuite_round"); + if (round.equals("second")) { + return "_nodes/" + CLUSTER_NAME + "-1/plugins"; + } else if (round.equals("third")) { + return "_nodes/" + CLUSTER_NAME + "-2/plugins"; + } else { + return "_nodes/" + CLUSTER_NAME + "-0/plugins"; } + case UPGRADED: + return "_nodes/plugins"; + default: + throw new AssertionError("unknown cluster type: " + CLUSTER_TYPE); } - - private void updateLegacySQLSettings() throws IOException { - Request request = new Request("PUT", LEGACY_SQL_SETTINGS_API_ENDPOINT); - request.setJsonEntity(String.format(Locale.ROOT, "{\n" + - " \"persistent\" : {\n" + - " \"%s\" : \"%s\"\n" + - " }\n" + - "}", "opendistro.sql.cursor.keep_alive", "7m")); - - RequestOptions.Builder restOptionsBuilder = RequestOptions.DEFAULT.toBuilder(); - restOptionsBuilder.addHeader("Content-Type", "application/json"); - request.setOptions(restOptionsBuilder); - - Response response = client().performRequest(request); - JSONObject jsonObject = new JSONObject(getResponseBody(response)); - Assert.assertTrue((boolean) jsonObject.get("acknowledged")); - } - - private void verifySQLSettings() throws IOException { - Request request = new Request("GET", "_cluster/settings?flat_settings"); - - RequestOptions.Builder restOptionsBuilder = RequestOptions.DEFAULT.toBuilder(); - restOptionsBuilder.addHeader("Content-Type", "application/json"); - request.setOptions(restOptionsBuilder); - - Response response = client().performRequest(request); - JSONObject jsonObject = new JSONObject(getResponseBody(response)); - Assert.assertEquals("{\"transient\":{},\"persistent\":{\"opendistro.sql.cursor.keep_alive\":\"7m\"}}", jsonObject.toString()); - } - - private void verifySQLQueries(String endpoint) throws IOException { - JSONObject filterResponse = executeSQLQuery(endpoint, "SELECT COUNT(*) FILTER(WHERE age > 35) FROM " + TestsConstants.TEST_INDEX_ACCOUNT); - verifySchema(filterResponse, schema("COUNT(*) FILTER(WHERE age > 35)", null, "integer")); - verifyDataRows(filterResponse, rows(238)); - - JSONObject aggResponse = executeSQLQuery(endpoint, "SELECT COUNT(DISTINCT age) FROM " + TestsConstants.TEST_INDEX_ACCOUNT); - verifySchema(aggResponse, schema("COUNT(DISTINCT age)", null, "integer")); - verifyDataRows(aggResponse, rows(21)); - - JSONObject groupByResponse = executeSQLQuery(endpoint, "select a.gender from " + TestsConstants.TEST_INDEX_ACCOUNT + " a group by a.gender having count(*) > 0"); - verifySchema(groupByResponse, schema("gender", null, "text")); - Assert.assertEquals("[[\"F\"],[\"M\"]]", groupByResponse.getJSONArray("datarows").toString()); + } + + private void updateLegacySQLSettings() throws IOException { + Request request = new Request("PUT", LEGACY_SQL_SETTINGS_API_ENDPOINT); + request.setJsonEntity( + String.format( + Locale.ROOT, + "{\n" + " \"persistent\" : {\n" + " \"%s\" : \"%s\"\n" + " }\n" + "}", + "opendistro.sql.cursor.keep_alive", + "7m")); + + RequestOptions.Builder restOptionsBuilder = RequestOptions.DEFAULT.toBuilder(); + restOptionsBuilder.addHeader("Content-Type", "application/json"); + request.setOptions(restOptionsBuilder); + + Response response = client().performRequest(request); + JSONObject jsonObject = new JSONObject(getResponseBody(response)); + Assert.assertTrue((boolean) jsonObject.get("acknowledged")); + } + + private void verifySQLSettings() throws IOException { + Request request = new Request("GET", "_cluster/settings?flat_settings"); + + RequestOptions.Builder restOptionsBuilder = RequestOptions.DEFAULT.toBuilder(); + restOptionsBuilder.addHeader("Content-Type", "application/json"); + request.setOptions(restOptionsBuilder); + + Response response = client().performRequest(request); + JSONObject jsonObject = new JSONObject(getResponseBody(response)); + Assert.assertEquals( + "{\"transient\":{},\"persistent\":{\"opendistro.sql.cursor.keep_alive\":\"7m\"}}", + jsonObject.toString()); + } + + private void verifySQLQueries(String endpoint) throws IOException { + JSONObject filterResponse = + executeSQLQuery( + endpoint, + "SELECT COUNT(*) FILTER(WHERE age > 35) FROM " + TestsConstants.TEST_INDEX_ACCOUNT); + verifySchema(filterResponse, schema("COUNT(*) FILTER(WHERE age > 35)", null, "integer")); + verifyDataRows(filterResponse, rows(238)); + + JSONObject aggResponse = + executeSQLQuery( + endpoint, "SELECT COUNT(DISTINCT age) FROM " + TestsConstants.TEST_INDEX_ACCOUNT); + verifySchema(aggResponse, schema("COUNT(DISTINCT age)", null, "integer")); + verifyDataRows(aggResponse, rows(21)); + + JSONObject groupByResponse = + executeSQLQuery( + endpoint, + "select a.gender from " + + TestsConstants.TEST_INDEX_ACCOUNT + + " a group by a.gender having count(*) > 0"); + verifySchema(groupByResponse, schema("gender", null, "text")); + Assert.assertEquals("[[\"F\"],[\"M\"]]", groupByResponse.getJSONArray("datarows").toString()); + } + + private JSONObject executeSQLQuery(String endpoint, String query) throws IOException { + Request request = new Request("POST", endpoint); + request.setJsonEntity(String.format(Locale.ROOT, "{" + " \"query\" : \"%s\"" + "}", query)); + + RequestOptions.Builder restOptionsBuilder = RequestOptions.DEFAULT.toBuilder(); + restOptionsBuilder.addHeader("Content-Type", "application/json"); + request.setOptions(restOptionsBuilder); + + Response response = client().performRequest(request); + return new JSONObject(getResponseBody(response)); + } + + @Override + public boolean shouldResetQuerySizeLimit() { + return false; + } + + @Override + protected synchronized void loadIndex(Index index) throws IOException { + String indexName = index.getName(); + String mapping = index.getMapping(); + // current directory becomes 'integ-test/build/testrun/sqlBwcCluster#' during bwc + String dataSet = "../../../" + index.getDataSet(); + + if (!isIndexExist(client(), indexName)) { + createIndexByRestClient(client(), indexName, mapping); + loadDataByRestClient(client(), indexName, dataSet); } - - private JSONObject executeSQLQuery(String endpoint, String query) throws IOException { - Request request = new Request("POST", endpoint); - request.setJsonEntity(String.format(Locale.ROOT, "{" + - " \"query\" : \"%s\"" + - "}", query)); - - RequestOptions.Builder restOptionsBuilder = RequestOptions.DEFAULT.toBuilder(); - restOptionsBuilder.addHeader("Content-Type", "application/json"); - request.setOptions(restOptionsBuilder); - - Response response = client().performRequest(request); - return new JSONObject(getResponseBody(response)); - } - - @Override - public boolean shouldResetQuerySizeLimit() { - return false; - } - - @Override - protected synchronized void loadIndex(Index index) throws IOException { - String indexName = index.getName(); - String mapping = index.getMapping(); - // current directory becomes 'integ-test/build/testrun/sqlBwcCluster#' during bwc - String dataSet = "../../../" + index.getDataSet(); - - if (!isIndexExist(client(), indexName)) { - createIndexByRestClient(client(), indexName, mapping); - loadDataByRestClient(client(), indexName, dataSet); - } - } - + } } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/CorrectnessIT.java b/integ-test/src/test/java/org/opensearch/sql/correctness/CorrectnessIT.java index a6f3e561b3..bd4c603362 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/CorrectnessIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/CorrectnessIT.java @@ -3,14 +3,12 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness; import static org.opensearch.sql.util.TestUtils.getResourceFilePath; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; import com.google.common.collect.Maps; - import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; @@ -32,11 +30,12 @@ import org.opensearch.sql.correctness.testset.TestDataSet; import org.opensearch.test.OpenSearchIntegTestCase; -/** - * Correctness integration test by performing comparison test with other databases. - */ +/** Correctness integration test by performing comparison test with other databases. */ @OpenSearchIntegTestCase.SuiteScopeTestCase -@OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.SUITE, numDataNodes = 3, supportsDedicatedMasters = false) +@OpenSearchIntegTestCase.ClusterScope( + scope = OpenSearchIntegTestCase.Scope.SUITE, + numDataNodes = 3, + supportsDedicatedMasters = false) @ThreadLeakScope(ThreadLeakScope.Scope.NONE) public class CorrectnessIT extends OpenSearchIntegTestCase { @@ -47,8 +46,8 @@ public void performComparisonTest() { TestConfig config = new TestConfig(getCmdLineArgs()); LOG.info("Starting comparison test {}", config); - try (ComparisonTest test = new ComparisonTest(getThisDBConnection(config), - getOtherDBConnections(config))) { + try (ComparisonTest test = + new ComparisonTest(getThisDBConnection(config), getOtherDBConnections(config))) { LOG.info("Loading test data set..."); test.connect(); for (TestDataSet dataSet : config.getTestDataSets()) { @@ -81,9 +80,7 @@ private DBConnection getThisDBConnection(TestConfig config) { return new JDBCConnection("DB Tested", dbUrl); } - /** - * Use OpenSearch cluster given on CLI arg or internal embedded in SQLIntegTestCase - */ + /** Use OpenSearch cluster given on CLI arg or internal embedded in SQLIntegTestCase */ private DBConnection getOpenSearchConnection(TestConfig config) { RestClient client; String openSearchHost = config.getOpenSearchHostUrl(); @@ -96,14 +93,11 @@ private DBConnection getOpenSearchConnection(TestConfig config) { return new OpenSearchConnection("jdbc:opensearch://" + openSearchHost, client); } - /** - * Create database connection with database name and connect URL - */ + /** Create database connection with database name and connect URL */ private DBConnection[] getOtherDBConnections(TestConfig config) { - return config.getOtherDbConnectionNameAndUrls(). - entrySet().stream(). - map(e -> new JDBCConnection(e.getKey(), e.getValue())). - toArray(DBConnection[]::new); + return config.getOtherDbConnectionNameAndUrls().entrySet().stream() + .map(e -> new JDBCConnection(e.getKey(), e.getValue())) + .toArray(DBConnection[]::new); } private void store(TestReport report) { diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/TestConfig.java b/integ-test/src/test/java/org/opensearch/sql/correctness/TestConfig.java index d344c29e20..0dd96c8678 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/TestConfig.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/TestConfig.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness; import static java.util.stream.Collectors.joining; @@ -19,11 +18,8 @@ import org.opensearch.sql.legacy.utils.StringUtils; /** - * Test configuration parse the following information from command line arguments: - * 1) Test schema and data - * 2) Test queries - * 3) OpenSearch connection URL - * 4) Other database connection URLs + * Test configuration parse the following information from command line arguments: 1) Test schema + * and data 2) Test queries 3) OpenSearch connection URL 4) Other database connection URLs */ public class TestConfig { @@ -37,9 +33,7 @@ public class TestConfig { private final String openSearchHostUrl; - /** - * Test against some database rather than OpenSearch via our JDBC driver - */ + /** Test against some database rather than OpenSearch via our JDBC driver */ private final String dbConnectionUrl; private final Map otherDbConnectionNameAndUrls = new HashMap<>(); @@ -75,12 +69,14 @@ public Map getOtherDbConnectionNameAndUrls() { private TestDataSet[] buildDefaultTestDataSet() { return new TestDataSet[] { - new TestDataSet("opensearch_dashboards_sample_data_flights", - readFile("opensearch_dashboards_sample_data_flights.json"), - readFile("opensearch_dashboards_sample_data_flights.csv")), - new TestDataSet("opensearch_dashboards_sample_data_ecommerce", - readFile("opensearch_dashboards_sample_data_ecommerce.json"), - readFile("opensearch_dashboards_sample_data_ecommerce.csv")), + new TestDataSet( + "opensearch_dashboards_sample_data_flights", + readFile("opensearch_dashboards_sample_data_flights.json"), + readFile("opensearch_dashboards_sample_data_flights.csv")), + new TestDataSet( + "opensearch_dashboards_sample_data_ecommerce", + readFile("opensearch_dashboards_sample_data_ecommerce.json"), + readFile("opensearch_dashboards_sample_data_ecommerce.csv")), }; } @@ -118,31 +114,37 @@ private static String readFile(String relativePath) { @Override public String toString() { return "\n=================================\n" - + "Tested Database : " + openSearchHostUrlToString() + '\n' - + "Other Databases :\n" + otherDbConnectionInfoToString() + '\n' - + "Test data set(s) :\n" + testDataSetsToString() + '\n' - + "Test query set : " + testQuerySet + '\n' + + "Tested Database : " + + openSearchHostUrlToString() + + '\n' + + "Other Databases :\n" + + otherDbConnectionInfoToString() + + '\n' + + "Test data set(s) :\n" + + testDataSetsToString() + + '\n' + + "Test query set : " + + testQuerySet + + '\n' + "=================================\n"; } private String testDataSetsToString() { - return Arrays.stream(testDataSets). - map(TestDataSet::toString). - collect(joining("\n")); + return Arrays.stream(testDataSets).map(TestDataSet::toString).collect(joining("\n")); } private String openSearchHostUrlToString() { if (!dbConnectionUrl.isEmpty()) { return dbConnectionUrl; } - return openSearchHostUrl.isEmpty() ? "(Use internal OpenSearch in workspace)" : - openSearchHostUrl; + return openSearchHostUrl.isEmpty() + ? "(Use internal OpenSearch in workspace)" + : openSearchHostUrl; } private String otherDbConnectionInfoToString() { - return otherDbConnectionNameAndUrls.entrySet().stream(). - map(e -> StringUtils.format(" %s = %s", e.getKey(), e.getValue())). - collect(joining("\n")); + return otherDbConnectionNameAndUrls.entrySet().stream() + .map(e -> StringUtils.format(" %s = %s", e.getKey(), e.getValue())) + .collect(joining("\n")); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/report/ErrorTestCase.java b/integ-test/src/test/java/org/opensearch/sql/correctness/report/ErrorTestCase.java index cb13a01f98..1d69ff10ee 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/report/ErrorTestCase.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/report/ErrorTestCase.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.report; import static org.opensearch.sql.correctness.report.TestCaseReport.TestResult.FAILURE; @@ -12,22 +11,17 @@ import lombok.Getter; import lombok.ToString; -/** - * Report for test case that ends with an error. - */ +/** Report for test case that ends with an error. */ @EqualsAndHashCode(callSuper = true) @ToString(callSuper = true) @Getter public class ErrorTestCase extends TestCaseReport { - /** - * Root cause of the error - */ + /** Root cause of the error */ private final String reason; public ErrorTestCase(int id, String sql, String reason) { super(id, sql, FAILURE); this.reason = reason; } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/report/FailedTestCase.java b/integ-test/src/test/java/org/opensearch/sql/correctness/report/FailedTestCase.java index 86693b98e9..2b5ab431e4 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/report/FailedTestCase.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/report/FailedTestCase.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.report; import static org.opensearch.sql.correctness.report.TestCaseReport.TestResult.FAILURE; @@ -16,30 +15,21 @@ import lombok.ToString; import org.opensearch.sql.correctness.runner.resultset.DBResult; -/** - * Report for test case that fails due to inconsistent result set. - */ +/** Report for test case that fails due to inconsistent result set. */ @EqualsAndHashCode(callSuper = true) @ToString(callSuper = true) @Getter public class FailedTestCase extends TestCaseReport { - /** - * Inconsistent result sets for reporting - */ + /** Inconsistent result sets for reporting */ private final List resultSets; - /** - * Explain where the difference is caused the test failure. - */ + /** Explain where the difference is caused the test failure. */ private final String explain; - /** - * Errors occurred for partial other databases. - */ + /** Errors occurred for partial other databases. */ private final String errors; - public FailedTestCase(int id, String sql, List resultSets, String errors) { super(id, sql, FAILURE); this.resultSets = resultSets; @@ -47,10 +37,9 @@ public FailedTestCase(int id, String sql, List resultSets, String erro this.errors = errors; // Generate explanation by diff the first result with remaining - this.explain = resultSets.subList(1, resultSets.size()) - .stream() - .map(result -> resultSets.get(0).diff(result)) - .collect(Collectors.joining(", ")); + this.explain = + resultSets.subList(1, resultSets.size()).stream() + .map(result -> resultSets.get(0).diff(result)) + .collect(Collectors.joining(", ")); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/report/SuccessTestCase.java b/integ-test/src/test/java/org/opensearch/sql/correctness/report/SuccessTestCase.java index 62cd9b3fbe..8ec996e660 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/report/SuccessTestCase.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/report/SuccessTestCase.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.report; import static org.opensearch.sql.correctness.report.TestCaseReport.TestResult.SUCCESS; @@ -12,9 +11,7 @@ import lombok.Getter; import lombok.ToString; -/** - * Report for successful test case result. - */ +/** Report for successful test case result. */ @EqualsAndHashCode(callSuper = true) @ToString(callSuper = true) @Getter diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/report/TestCaseReport.java b/integ-test/src/test/java/org/opensearch/sql/correctness/report/TestCaseReport.java index 1a6285c52e..7567e9cd6a 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/report/TestCaseReport.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/report/TestCaseReport.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.report; import static org.opensearch.sql.correctness.report.TestCaseReport.TestResult.SUCCESS; @@ -12,22 +11,19 @@ import lombok.Getter; import lombok.ToString; -/** - * Base class for different test result. - */ +/** Base class for different test result. */ @EqualsAndHashCode @ToString public abstract class TestCaseReport { public enum TestResult { - SUCCESS, FAILURE; + SUCCESS, + FAILURE; } - @Getter - private final int id; + @Getter private final int id; - @Getter - private final String sql; + @Getter private final String sql; private final TestResult result; @@ -40,5 +36,4 @@ public TestCaseReport(int id, String sql, TestResult result) { public String getResult() { return result == SUCCESS ? "Success" : "Failed"; } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/report/TestReport.java b/integ-test/src/test/java/org/opensearch/sql/correctness/report/TestReport.java index 88b23ccd5b..9b9b3b7a23 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/report/TestReport.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/report/TestReport.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.report; import java.util.ArrayList; @@ -12,9 +11,7 @@ import lombok.Getter; import lombok.ToString; -/** - * Test report class to generate JSON report. - */ +/** Test report class to generate JSON report. */ @EqualsAndHashCode @ToString @Getter @@ -37,5 +34,4 @@ public void addTestCase(TestCaseReport testCase) { summary.addFailure(); } } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/report/TestSummary.java b/integ-test/src/test/java/org/opensearch/sql/correctness/report/TestSummary.java index 90767582b5..bbd4385460 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/report/TestSummary.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/report/TestSummary.java @@ -3,16 +3,13 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.report; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.ToString; -/** - * Test summary section. - */ +/** Test summary section. */ @EqualsAndHashCode @ToString @Getter @@ -33,5 +30,4 @@ public void addFailure() { failure++; total++; } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/runner/ComparisonTest.java b/integ-test/src/test/java/org/opensearch/sql/correctness/runner/ComparisonTest.java index 129bc70426..1fee41f1fe 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/runner/ComparisonTest.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/runner/ComparisonTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.runner; import static com.google.common.collect.ObjectArrays.concat; @@ -25,24 +24,16 @@ import org.opensearch.sql.correctness.testset.TestQuerySet; import org.opensearch.sql.legacy.utils.StringUtils; -/** - * Comparison test runner for query result correctness. - */ +/** Comparison test runner for query result correctness. */ public class ComparisonTest implements AutoCloseable { - /** - * Next id for test case - */ + /** Next id for test case */ private int testCaseId = 1; - /** - * Connection for database being tested - */ + /** Connection for database being tested */ private final DBConnection thisConnection; - /** - * Database connections for reference databases - */ + /** Database connections for reference databases */ private final DBConnection[] otherDbConnections; public ComparisonTest(DBConnection thisConnection, DBConnection[] otherDbConnections) { @@ -53,9 +44,7 @@ public ComparisonTest(DBConnection thisConnection, DBConnection[] otherDbConnect Arrays.sort(this.otherDbConnections, Comparator.comparing(DBConnection::getDatabaseName)); } - /** - * Open database connection. - */ + /** Open database connection. */ public void connect() { for (DBConnection conn : concat(thisConnection, otherDbConnections)) { conn.connect(); @@ -87,8 +76,11 @@ public TestReport verify(TestQuerySet querySet) { DBResult openSearchResult = thisConnection.select(sql); report.addTestCase(compareWithOtherDb(sql, openSearchResult)); } catch (Exception e) { - report.addTestCase(new ErrorTestCase(nextId(), sql, - StringUtils.format("%s: %s", e.getClass().getSimpleName(), extractRootCause(e)))); + report.addTestCase( + new ErrorTestCase( + nextId(), + sql, + StringUtils.format("%s: %s", e.getClass().getSimpleName(), extractRootCause(e)))); } } return report; @@ -116,9 +108,7 @@ public void close() { } } - /** - * Execute the query and compare with current result - */ + /** Execute the query and compare with current result */ private TestCaseReport compareWithOtherDb(String sql, DBResult openSearchResult) { List mismatchResults = Lists.newArrayList(openSearchResult); StringBuilder reasons = new StringBuilder(); @@ -137,7 +127,8 @@ private TestCaseReport compareWithOtherDb(String sql, DBResult openSearchResult) } } - if (mismatchResults.size() == 1) { // Only OpenSearch result on list. Cannot find other database support this query + if (mismatchResults.size() + == 1) { // Only OpenSearch result on list. Cannot find other database support this query return new ErrorTestCase(nextId(), sql, "No other databases support this query: " + reasons); } return new FailedTestCase(nextId(), sql, mismatchResults, reasons.toString()); @@ -150,8 +141,8 @@ private int nextId() { private void insertTestDataInBatch(DBConnection conn, String tableName, List testData) { Iterator iterator = testData.iterator(); String[] fieldNames = (String[]) iterator.next(); // first row is header of column names - Iterators.partition(iterator, 100). - forEachRemaining(batch -> conn.insert(tableName, fieldNames, batch)); + Iterators.partition(iterator, 100) + .forEachRemaining(batch -> conn.insert(tableName, fieldNames, batch)); } private String extractRootCause(Throwable e) { @@ -167,5 +158,4 @@ private String extractRootCause(Throwable e) { } return e.toString(); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/runner/connection/DBConnection.java b/integ-test/src/test/java/org/opensearch/sql/correctness/runner/connection/DBConnection.java index a475428735..b01762fd21 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/runner/connection/DBConnection.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/runner/connection/DBConnection.java @@ -3,15 +3,12 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.runner.connection; import java.util.List; import org.opensearch.sql.correctness.runner.resultset.DBResult; -/** - * Abstraction for different databases. - */ +/** Abstraction for different databases. */ public interface DBConnection { /** @@ -19,25 +16,23 @@ public interface DBConnection { */ String getDatabaseName(); - /** - * Connect to database by opening a connection. - */ + /** Connect to database by opening a connection. */ void connect(); /** * Create table with the schema. * * @param tableName table name - * @param schema schema json in OpenSearch mapping format + * @param schema schema json in OpenSearch mapping format */ void create(String tableName, String schema); /** * Insert batch of data to database. * - * @param tableName table name + * @param tableName table name * @param columnNames column names - * @param batch batch of rows + * @param batch batch of rows */ void insert(String tableName, String[] columnNames, List batch); @@ -56,9 +51,6 @@ public interface DBConnection { */ void drop(String tableName); - /** - * Close the database connection. - */ + /** Close the database connection. */ void close(); - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/runner/connection/JDBCConnection.java b/integ-test/src/test/java/org/opensearch/sql/correctness/runner/connection/JDBCConnection.java index d2d7d2aee6..7a67022117 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/runner/connection/JDBCConnection.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/runner/connection/JDBCConnection.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.runner.connection; import static java.util.stream.Collectors.joining; @@ -23,33 +22,23 @@ import org.opensearch.sql.correctness.runner.resultset.Row; import org.opensearch.sql.legacy.utils.StringUtils; -/** - * Database connection by JDBC driver. - */ +/** Database connection by JDBC driver. */ public class JDBCConnection implements DBConnection { private static final String SINGLE_QUOTE = "'"; private static final String DOUBLE_QUOTE = "''"; private static final String BACKTICK = "`"; - /** - * Database name for display - */ + /** Database name for display */ private final String databaseName; - /** - * Database connection URL - */ + /** Database connection URL */ private final String connectionUrl; - /** - * JDBC driver config properties. - */ + /** JDBC driver config properties. */ private final Properties properties; - /** - * Current live connection - */ + /** Current live connection */ private Connection connection; public JDBCConnection(String databaseName, String connectionUrl) { @@ -58,9 +47,10 @@ public JDBCConnection(String databaseName, String connectionUrl) { /** * Create a JDBC connection with parameters given (but not connect to database at the moment). - * @param databaseName database name - * @param connectionUrl connection URL - * @param properties config properties + * + * @param databaseName database name + * @param connectionUrl connection URL + * @param properties config properties */ public JDBCConnection(String databaseName, String connectionUrl, Properties properties) { this.databaseName = databaseName; @@ -104,11 +94,11 @@ public void drop(String tableName) { @Override public void insert(String tableName, String[] columnNames, List batch) { try (Statement stmt = connection.createStatement()) { - String names = - Arrays.stream(columnNames).map(this::delimited).collect(joining(",")); + String names = Arrays.stream(columnNames).map(this::delimited).collect(joining(",")); for (Object[] fieldValues : batch) { - stmt.addBatch(StringUtils.format( - "INSERT INTO %s(%s) VALUES (%s)", tableName, names, getValueList(fieldValues))); + stmt.addBatch( + StringUtils.format( + "INSERT INTO %s(%s) VALUES (%s)", tableName, names, getValueList(fieldValues))); } stmt.executeBatch(); } catch (SQLException e) { @@ -120,8 +110,10 @@ public void insert(String tableName, String[] columnNames, List batch) public DBResult select(String query) { try (Statement stmt = connection.createStatement()) { ResultSet resultSet = stmt.executeQuery(query); - DBResult result = isOrderByQuery(query) - ? DBResult.resultInOrder(databaseName) : DBResult.result(databaseName); + DBResult result = + isOrderByQuery(query) + ? DBResult.resultInOrder(databaseName) + : DBResult.result(databaseName); populateMetaData(resultSet, result); populateData(resultSet, result); return result; @@ -140,20 +132,22 @@ public void close() { } /** - * Parse out type in schema json and convert to field name and type pairs for CREATE TABLE statement. + * Parse out type in schema json and convert to field name and type pairs for CREATE TABLE + * statement. */ private String parseColumnNameAndTypesInSchemaJson(String schema) { JSONObject json = (JSONObject) new JSONObject(schema).query("/mappings/properties"); - return json.keySet().stream(). - map(colName -> delimited(colName) + " " + mapToJDBCType(json.getJSONObject(colName) - .getString("type"))) + return json.keySet().stream() + .map( + colName -> + delimited(colName) + + " " + + mapToJDBCType(json.getJSONObject(colName).getString("type"))) .collect(joining(",")); } private String getValueList(Object[] fieldValues) { - return Arrays.stream(fieldValues). - map(this::convertValueObjectToString). - collect(joining(",")); + return Arrays.stream(fieldValues).map(this::convertValueObjectToString).collect(joining(",")); } private String convertValueObjectToString(Object value) { @@ -209,9 +203,7 @@ private boolean isOrderByQuery(String query) { return query.trim().toUpperCase().contains("ORDER BY"); } - /** - * Setter for unit test mock - */ + /** Setter for unit test mock */ public void setConnection(Connection connection) { this.connection = connection; } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/runner/connection/OpenSearchConnection.java b/integ-test/src/test/java/org/opensearch/sql/correctness/runner/connection/OpenSearchConnection.java index 258c031b76..8a2240855f 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/runner/connection/OpenSearchConnection.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/runner/connection/OpenSearchConnection.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.runner.connection; import java.io.IOException; @@ -16,18 +15,15 @@ import org.opensearch.sql.correctness.runner.resultset.DBResult; /** - * OpenSearch database connection for insertion. This class wraps JDBCConnection to delegate query method. + * OpenSearch database connection for insertion. This class wraps JDBCConnection to delegate query + * method. */ public class OpenSearchConnection implements DBConnection { - /** - * Connection via our OpenSearch JDBC driver - */ + /** Connection via our OpenSearch JDBC driver */ private final DBConnection connection; - /** - * Native OpenSearch REST client for operation unsupported by driver such as CREATE/INSERT - */ + /** Native OpenSearch REST client for operation unsupported by driver such as CREATE/INSERT */ private final RestClient client; public OpenSearchConnection(String connectionUrl, RestClient client) { @@ -112,10 +108,8 @@ private String buildBulkBody(String[] columnNames, List batch) { } } - body.append("{\"index\":{}}\n"). - append(json).append("\n"); + body.append("{\"index\":{}}\n").append(json).append("\n"); } return body.toString(); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/runner/resultset/DBResult.java b/integ-test/src/test/java/org/opensearch/sql/correctness/runner/resultset/DBResult.java index eb522b008d..e3940bc539 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/runner/resultset/DBResult.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/runner/resultset/DBResult.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.runner.resultset; import com.google.common.collect.HashMultiset; @@ -12,7 +11,6 @@ import java.util.Collection; import java.util.Collections; import java.util.List; -import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; import lombok.Getter; @@ -21,53 +19,38 @@ import org.opensearch.sql.legacy.utils.StringUtils; /** - * Query result for equality comparison. Based on different type of query, such as query with/without ORDER BY and - * query with SELECT columns or just *, order of column and row may matter or not. So the internal data structure of this - * class is passed in from outside either list or set, hash map or linked hash map etc. + * Query result for equality comparison. Based on different type of query, such as query + * with/without ORDER BY and query with SELECT columns or just *, order of column and row may matter + * or not. So the internal data structure of this class is passed in from outside either list or + * set, hash map or linked hash map etc. */ @ToString public class DBResult { - /** - * Possible types for floating point number - * H2 2.x use DOUBLE PRECISION instead of DOUBLE. - */ + /** Possible types for floating point number H2 2.x use DOUBLE PRECISION instead of DOUBLE. */ private static final Set FLOAT_TYPES = ImmutableSet.of("FLOAT", "DOUBLE", "REAL", "DOUBLE PRECISION", "DECFLOAT"); - /** - * Possible types for varchar. - * H2 2.x use CHARACTER VARYING instead of VARCHAR. - */ + /** Possible types for varchar. H2 2.x use CHARACTER VARYING instead of VARCHAR. */ private static final Set VARCHAR = ImmutableSet.of("CHARACTER VARYING", "VARCHAR"); - /** - * Database name for display - */ + /** Database name for display */ private final String databaseName; - /** - * Column name and types from result set meta data - */ - @Getter - private final Collection schema; + /** Column name and types from result set meta data */ + @Getter private final Collection schema; - /** - * Data rows from result set - */ + /** Data rows from result set */ private final Collection dataRows; - /** - * In theory, a result set is a multi-set (bag) that allows duplicate and doesn't - * have order. - */ + /** In theory, a result set is a multi-set (bag) that allows duplicate and doesn't have order. */ public static DBResult result(String databaseName) { return new DBResult(databaseName, new ArrayList<>(), HashMultiset.create()); } /** - * But for queries with ORDER BY clause, we want to preserve the original order of data rows - * so we can check if the order is correct. + * But for queries with ORDER BY clause, we want to preserve the original order of data rows so we + * can check if the order is correct. */ public static DBResult resultInOrder(String databaseName) { return new DBResult(databaseName, new ArrayList<>(), new ArrayList<>()); @@ -104,21 +87,20 @@ public String getDatabaseName() { return databaseName; } - /** - * Flatten for simplifying json generated. - */ + /** Flatten for simplifying json generated. */ public Collection> getDataRows() { - Collection> values = isDataRowOrdered() - ? new ArrayList<>() : HashMultiset.create(); + Collection> values = + isDataRowOrdered() ? new ArrayList<>() : HashMultiset.create(); dataRows.stream().map(Row::getValues).forEach(values::add); return values; } /** - * Explain the difference between this and other DB result which is helpful for - * troubleshooting in final test report. - * @param other other DB result - * @return explain the difference + * Explain the difference between this and other DB result which is helpful for troubleshooting in + * final test report. + * + * @param other other DB result + * @return explain the difference */ public String diff(DBResult other) { String result = diffSchema(other); @@ -144,26 +126,27 @@ private String diffDataRows(DBResult other) { } /** - * Check if two lists are same otherwise explain if size or any element - * is different at some position. + * Check if two lists are same otherwise explain if size or any element is different at some + * position. */ private String diff(String name, List thisList, List otherList) { if (thisList.size() != otherList.size()) { - return StringUtils.format("%s size is different: this=[%d], other=[%d]", - name, thisList.size(), otherList.size()); + return StringUtils.format( + "%s size is different: this=[%d], other=[%d]", name, thisList.size(), otherList.size()); } int diff = findFirstDifference(thisList, otherList); if (diff >= 0) { - return StringUtils.format("%s at [%d] is different: this=[%s], other=[%s]", + return StringUtils.format( + "%s at [%d] is different: this=[%s], other=[%s]", name, diff, thisList.get(diff), otherList.get(diff)); } return ""; } /** - * Find first different element with assumption that the lists given have same size - * and there is no NULL element inside. + * Find first different element with assumption that the lists given have same size and there is + * no NULL element inside. */ private static int findFirstDifference(List list1, List list2) { for (int i = 0; i < list1.size(); i++) { @@ -175,16 +158,14 @@ private static int findFirstDifference(List list1, List list2) { } /** - * Is data row a list that represent original order of data set - * which doesn't/shouldn't sort again. + * Is data row a list that represent original order of data set which doesn't/shouldn't sort + * again. */ private boolean isDataRowOrdered() { return (dataRows instanceof List); } - /** - * Convert a collection to list and sort and return this new list. - */ + /** Convert a collection to list and sort and return this new list. */ private static > List sort(Collection collection) { ArrayList list = new ArrayList<>(collection); Collections.sort(list); @@ -201,12 +182,16 @@ public boolean equals(final Object o) { final DBResult other = (DBResult) o; // H2 calculates the value before setting column name // for example, for query "select 1 + 1" it returns a column named "2" instead of "1 + 1" - boolean skipColumnNameCheck = databaseName.equalsIgnoreCase("h2") || other.databaseName.equalsIgnoreCase("h2"); + boolean skipColumnNameCheck = + databaseName.equalsIgnoreCase("h2") || other.databaseName.equalsIgnoreCase("h2"); if (!skipColumnNameCheck && !schema.equals(other.schema)) { return false; } - if (skipColumnNameCheck && !schema.stream().map(Type::getType).collect(Collectors.toList()) - .equals(other.schema.stream().map(Type::getType).collect(Collectors.toList()))) { + if (skipColumnNameCheck + && !schema.stream() + .map(Type::getType) + .collect(Collectors.toList()) + .equals(other.schema.stream().map(Type::getType).collect(Collectors.toList()))) { return false; } return dataRows.equals(other.dataRows); diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/runner/resultset/Row.java b/integ-test/src/test/java/org/opensearch/sql/correctness/runner/resultset/Row.java index da08487a10..973ea76e71 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/runner/resultset/Row.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/runner/resultset/Row.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.runner.resultset; import java.math.BigDecimal; @@ -15,9 +14,7 @@ import lombok.Getter; import lombok.ToString; -/** - * Row in result set. - */ +/** Row in result set. */ @EqualsAndHashCode @ToString @Getter @@ -77,5 +74,4 @@ public int compareTo(Row other) { } return 0; } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/runner/resultset/Type.java b/integ-test/src/test/java/org/opensearch/sql/correctness/runner/resultset/Type.java index 23cc0e3347..d626f75ccb 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/runner/resultset/Type.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/runner/resultset/Type.java @@ -3,25 +3,17 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.runner.resultset; import lombok.Data; -/** - * Column type in schema - */ +/** Column type in schema */ @Data public class Type { - /** - * Column name - */ + /** Column name */ private final String name; - /** - * Column type - */ + /** Column type */ private final String type; - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/ComparisonTestTest.java b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/ComparisonTestTest.java index 03c3967544..5cab5b3175 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/ComparisonTestTest.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/ComparisonTestTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.tests; import static java.util.Arrays.asList; @@ -29,37 +28,37 @@ import org.opensearch.sql.correctness.runner.resultset.Type; import org.opensearch.sql.correctness.testset.TestQuerySet; -/** - * Tests for {@link ComparisonTest} - */ +/** Tests for {@link ComparisonTest} */ @RunWith(MockitoJUnitRunner.class) public class ComparisonTestTest { - @Mock - private DBConnection openSearchConnection; + @Mock private DBConnection openSearchConnection; - @Mock - private DBConnection otherDbConnection; + @Mock private DBConnection otherDbConnection; private ComparisonTest correctnessTest; @Before public void setUp() { when(otherDbConnection.getDatabaseName()).thenReturn("Other"); - correctnessTest = new ComparisonTest( - openSearchConnection, new DBConnection[] {otherDbConnection} - ); + correctnessTest = + new ComparisonTest(openSearchConnection, new DBConnection[] {otherDbConnection}); } @Test public void testSuccess() { - when(openSearchConnection.select(anyString())).thenReturn( - new DBResult("OpenSearch", asList(new Type("firstname", "text")), asList(new Row(asList("John")))) - ); - when(otherDbConnection.select(anyString())).thenReturn( - new DBResult("Other DB", asList(new Type("firstname", "text")), - asList(new Row(asList("John")))) - ); + when(openSearchConnection.select(anyString())) + .thenReturn( + new DBResult( + "OpenSearch", + asList(new Type("firstname", "text")), + asList(new Row(asList("John"))))); + when(otherDbConnection.select(anyString())) + .thenReturn( + new DBResult( + "Other DB", + asList(new Type("firstname", "text")), + asList(new Row(asList("John"))))); TestReport expected = new TestReport(); expected.addTestCase(new SuccessTestCase(1, "SELECT * FROM accounts")); @@ -70,15 +69,18 @@ public void testSuccess() { @Test public void testFailureDueToInconsistency() { DBResult openSearchResult = - new DBResult("OpenSearch", asList(new Type("firstname", "text")), asList(new Row(asList("John")))); - DBResult otherDbResult = new DBResult("Other DB", asList(new Type("firstname", "text")), - asList(new Row(asList("JOHN")))); + new DBResult( + "OpenSearch", asList(new Type("firstname", "text")), asList(new Row(asList("John")))); + DBResult otherDbResult = + new DBResult( + "Other DB", asList(new Type("firstname", "text")), asList(new Row(asList("JOHN")))); when(openSearchConnection.select(anyString())).thenReturn(openSearchResult); when(otherDbConnection.select(anyString())).thenReturn(otherDbResult); TestReport expected = new TestReport(); expected.addTestCase( - new FailedTestCase(1, "SELECT * FROM accounts", asList(openSearchResult, otherDbResult), "")); + new FailedTestCase( + 1, "SELECT * FROM accounts", asList(openSearchResult, otherDbResult), "")); TestReport actual = correctnessTest.verify(querySet("SELECT * FROM accounts")); assertEquals(expected, actual); } @@ -87,16 +89,19 @@ public void testFailureDueToInconsistency() { public void testSuccessFinally() { DBConnection anotherDbConnection = mock(DBConnection.class); when(anotherDbConnection.getDatabaseName()).thenReturn("Another"); - correctnessTest = new ComparisonTest( - openSearchConnection, new DBConnection[] {otherDbConnection, anotherDbConnection} - ); + correctnessTest = + new ComparisonTest( + openSearchConnection, new DBConnection[] {otherDbConnection, anotherDbConnection}); DBResult openSearchResult = - new DBResult("OpenSearch", asList(new Type("firstname", "text")), asList(new Row(asList("John")))); - DBResult otherDbResult = new DBResult("Other DB", asList(new Type("firstname", "text")), - asList(new Row(asList("JOHN")))); - DBResult anotherDbResult = new DBResult("Another DB", asList(new Type("firstname", "text")), - asList(new Row(asList("John")))); + new DBResult( + "OpenSearch", asList(new Type("firstname", "text")), asList(new Row(asList("John")))); + DBResult otherDbResult = + new DBResult( + "Other DB", asList(new Type("firstname", "text")), asList(new Row(asList("JOHN")))); + DBResult anotherDbResult = + new DBResult( + "Another DB", asList(new Type("firstname", "text")), asList(new Row(asList("John")))); when(openSearchConnection.select(anyString())).thenReturn(openSearchResult); when(anotherDbConnection.select(anyString())).thenReturn(anotherDbResult); @@ -111,30 +116,38 @@ public void testFailureDueToEventualInconsistency() { DBConnection anotherDbConnection = mock(DBConnection.class); when(anotherDbConnection.getDatabaseName()) .thenReturn("ZZZ DB"); // Make sure this will be called after Other DB - correctnessTest = new ComparisonTest( - openSearchConnection, new DBConnection[] {otherDbConnection, anotherDbConnection} - ); + correctnessTest = + new ComparisonTest( + openSearchConnection, new DBConnection[] {otherDbConnection, anotherDbConnection}); DBResult openSearchResult = - new DBResult("OpenSearch", asList(new Type("firstname", "text")), asList(new Row(asList("John")))); - DBResult otherDbResult = new DBResult("Other DB", asList(new Type("firstname", "text")), - asList(new Row(asList("JOHN")))); - DBResult anotherDbResult = new DBResult("ZZZ DB", asList(new Type("firstname", "text")), - asList(new Row(asList("Hank")))); + new DBResult( + "OpenSearch", asList(new Type("firstname", "text")), asList(new Row(asList("John")))); + DBResult otherDbResult = + new DBResult( + "Other DB", asList(new Type("firstname", "text")), asList(new Row(asList("JOHN")))); + DBResult anotherDbResult = + new DBResult( + "ZZZ DB", asList(new Type("firstname", "text")), asList(new Row(asList("Hank")))); when(openSearchConnection.select(anyString())).thenReturn(openSearchResult); when(otherDbConnection.select(anyString())).thenReturn(otherDbResult); when(anotherDbConnection.select(anyString())).thenReturn(anotherDbResult); TestReport expected = new TestReport(); - expected.addTestCase(new FailedTestCase(1, "SELECT * FROM accounts", - asList(openSearchResult, otherDbResult, anotherDbResult), "")); + expected.addTestCase( + new FailedTestCase( + 1, + "SELECT * FROM accounts", + asList(openSearchResult, otherDbResult, anotherDbResult), + "")); TestReport actual = correctnessTest.verify(querySet("SELECT * FROM accounts")); assertEquals(expected, actual); } @Test public void testErrorDueToESException() { - when(openSearchConnection.select(anyString())).thenThrow(new RuntimeException("All shards failure")); + when(openSearchConnection.select(anyString())) + .thenThrow(new RuntimeException("All shards failure")); TestReport expected = new TestReport(); expected.addTestCase( @@ -145,15 +158,21 @@ public void testErrorDueToESException() { @Test public void testErrorDueToNoOtherDBSupportThisQuery() { - when(openSearchConnection.select(anyString())).thenReturn( - new DBResult("OpenSearch", asList(new Type("firstname", "text")), asList(new Row(asList("John")))) - ); + when(openSearchConnection.select(anyString())) + .thenReturn( + new DBResult( + "OpenSearch", + asList(new Type("firstname", "text")), + asList(new Row(asList("John"))))); when(otherDbConnection.select(anyString())) .thenThrow(new RuntimeException("Unsupported feature")); TestReport expected = new TestReport(); - expected.addTestCase(new ErrorTestCase(1, "SELECT * FROM accounts", - "No other databases support this query: Unsupported feature;")); + expected.addTestCase( + new ErrorTestCase( + 1, + "SELECT * FROM accounts", + "No other databases support this query: Unsupported feature;")); TestReport actual = correctnessTest.verify(querySet("SELECT * FROM accounts")); assertEquals(expected, actual); } @@ -162,17 +181,22 @@ public void testErrorDueToNoOtherDBSupportThisQuery() { public void testSuccessWhenOneDBSupportThisQuery() { DBConnection anotherDbConnection = mock(DBConnection.class); when(anotherDbConnection.getDatabaseName()).thenReturn("Another"); - correctnessTest = new ComparisonTest( - openSearchConnection, new DBConnection[] {otherDbConnection, anotherDbConnection} - ); - - when(openSearchConnection.select(anyString())).thenReturn( - new DBResult("OpenSearch", asList(new Type("firstname", "text")), asList(new Row(asList("John")))) - ); - when(anotherDbConnection.select(anyString())).thenReturn( - new DBResult("Another DB", asList(new Type("firstname", "text")), - asList(new Row(asList("John")))) - ); + correctnessTest = + new ComparisonTest( + openSearchConnection, new DBConnection[] {otherDbConnection, anotherDbConnection}); + + when(openSearchConnection.select(anyString())) + .thenReturn( + new DBResult( + "OpenSearch", + asList(new Type("firstname", "text")), + asList(new Row(asList("John"))))); + when(anotherDbConnection.select(anyString())) + .thenReturn( + new DBResult( + "Another DB", + asList(new Type("firstname", "text")), + asList(new Row(asList("John"))))); TestReport expected = new TestReport(); expected.addTestCase(new SuccessTestCase(1, "SELECT * FROM accounts")); @@ -184,12 +208,13 @@ public void testSuccessWhenOneDBSupportThisQuery() { public void testFailureDueToInconsistencyAndExceptionMixed() { DBConnection otherDBConnection2 = mock(DBConnection.class); when(otherDBConnection2.getDatabaseName()).thenReturn("ZZZ DB"); - correctnessTest = new ComparisonTest( - openSearchConnection, new DBConnection[] {otherDbConnection, otherDBConnection2} - ); + correctnessTest = + new ComparisonTest( + openSearchConnection, new DBConnection[] {otherDbConnection, otherDBConnection2}); DBResult openSearchResult = - new DBResult("OpenSearch", asList(new Type("firstname", "text")), asList(new Row(asList("John")))); + new DBResult( + "OpenSearch", asList(new Type("firstname", "text")), asList(new Row(asList("John")))); DBResult otherResult = new DBResult("Other", asList(new Type("firstname", "text")), Collections.emptyList()); @@ -199,8 +224,12 @@ public void testFailureDueToInconsistencyAndExceptionMixed() { .thenThrow(new RuntimeException("Unsupported feature")); TestReport expected = new TestReport(); - expected.addTestCase(new FailedTestCase(1, "SELECT * FROM accounts", - asList(openSearchResult, otherResult), "Unsupported feature;")); + expected.addTestCase( + new FailedTestCase( + 1, + "SELECT * FROM accounts", + asList(openSearchResult, otherResult), + "Unsupported feature;")); TestReport actual = correctnessTest.verify(querySet("SELECT * FROM accounts")); assertEquals(expected, actual); } @@ -208,5 +237,4 @@ public void testFailureDueToInconsistencyAndExceptionMixed() { private TestQuerySet querySet(String query) { return new TestQuerySet(new String[] {query}); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/DBResultTest.java b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/DBResultTest.java index 3f6da0c39d..793728a9e9 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/DBResultTest.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/DBResultTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.tests; import static java.util.Collections.emptyList; @@ -19,9 +18,7 @@ import org.opensearch.sql.correctness.runner.resultset.Row; import org.opensearch.sql.correctness.runner.resultset.Type; -/** - * Unit tests for {@link DBResult} - */ +/** Unit tests for {@link DBResult} */ public class DBResultTest { @Test @@ -80,35 +77,45 @@ public void dbResultWithDifferentColumnTypeShouldNotEqual() { @Test public void shouldExplainColumnTypeDifference() { - DBResult result1 = new DBResult("DB 1", - Arrays.asList(new Type("name", "VARCHAR"), new Type("age", "FLOAT")), emptyList()); - DBResult result2 = new DBResult("DB 2", - Arrays.asList(new Type("name", "VARCHAR"), new Type("age", "INT")), emptyList()); + DBResult result1 = + new DBResult( + "DB 1", + Arrays.asList(new Type("name", "VARCHAR"), new Type("age", "FLOAT")), + emptyList()); + DBResult result2 = + new DBResult( + "DB 2", + Arrays.asList(new Type("name", "VARCHAR"), new Type("age", "INT")), + emptyList()); assertEquals( "Schema type at [1] is different: " + "this=[Type(name=age, type=FLOAT)], other=[Type(name=age, type=INT)]", - result1.diff(result2) - ); + result1.diff(result2)); } @Test public void shouldExplainDataRowsDifference() { - DBResult result1 = new DBResult("DB 1", Arrays.asList(new Type("name", "VARCHAR")), - Sets.newHashSet( - new Row(Arrays.asList("hello")), - new Row(Arrays.asList("world")), - new Row(Lists.newArrayList((Object) null)))); - DBResult result2 = new DBResult("DB 2",Arrays.asList(new Type("name", "VARCHAR")), - Sets.newHashSet( - new Row(Lists.newArrayList((Object) null)), - new Row(Arrays.asList("hello")), - new Row(Arrays.asList("world123")))); + DBResult result1 = + new DBResult( + "DB 1", + Arrays.asList(new Type("name", "VARCHAR")), + Sets.newHashSet( + new Row(Arrays.asList("hello")), + new Row(Arrays.asList("world")), + new Row(Lists.newArrayList((Object) null)))); + DBResult result2 = + new DBResult( + "DB 2", + Arrays.asList(new Type("name", "VARCHAR")), + Sets.newHashSet( + new Row(Lists.newArrayList((Object) null)), + new Row(Arrays.asList("hello")), + new Row(Arrays.asList("world123")))); assertEquals( "Data row at [1] is different: this=[Row(values=[world])], other=[Row(values=[world123])]", - result1.diff(result2) - ); + result1.diff(result2)); } @Test @@ -125,8 +132,6 @@ public void shouldExplainDataRowsOrderDifference() { assertEquals( "Data row at [0] is different: this=[Row(values=[hello])], other=[Row(values=[world])]", - result1.diff(result2) - ); + result1.diff(result2)); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/JDBCConnectionTest.java b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/JDBCConnectionTest.java index 0e70066136..a8e01145e7 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/JDBCConnectionTest.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/JDBCConnectionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.tests; import static org.junit.Assert.assertEquals; @@ -36,17 +35,13 @@ import org.opensearch.sql.correctness.runner.resultset.DBResult; import org.opensearch.sql.correctness.runner.resultset.Type; -/** - * Tests for {@link JDBCConnection} - */ +/** Tests for {@link JDBCConnection} */ @RunWith(MockitoJUnitRunner.class) public class JDBCConnectionTest { - @Mock - private Connection connection; + @Mock private Connection connection; - @Mock - private Statement statement; + @Mock private Statement statement; private JDBCConnection conn; @@ -60,7 +55,8 @@ public void setUp() throws SQLException { @Test public void testCreateTable() throws SQLException { - conn.create("test", + conn.create( + "test", "{\"mappings\":{\"properties\":{\"name\":{\"type\":\"keyword\"},\"age\":{\"type\":\"INT\"}}}}"); ArgumentCaptor argCap = ArgumentCaptor.forClass(String.class); @@ -83,7 +79,9 @@ public void testDropTable() throws SQLException { @Test public void testInsertData() throws SQLException { - conn.insert("test", new String[] {"name", "age"}, + conn.insert( + "test", + new String[] {"name", "age"}, Arrays.asList(new String[] {"John", "25"}, new String[] {"Hank", "30"})); ArgumentCaptor argCap = ArgumentCaptor.forClass(String.class); @@ -93,18 +91,17 @@ public void testInsertData() throws SQLException { assertEquals( Arrays.asList( "INSERT INTO test(`name`,`age`) VALUES ('John','25')", - "INSERT INTO test(`name`,`age`) VALUES ('Hank','30')" - ), actual - ); + "INSERT INTO test(`name`,`age`) VALUES ('Hank','30')"), + actual); } @Test public void testInsertNullData() throws SQLException { - conn.insert("test", new String[] {"name", "age"}, + conn.insert( + "test", + new String[] {"name", "age"}, Arrays.asList( - new Object[] {"John", null}, - new Object[] {null, 25}, - new Object[] {"Hank", 30})); + new Object[] {"John", null}, new Object[] {null, 25}, new Object[] {"Hank", 30})); ArgumentCaptor argCap = ArgumentCaptor.forClass(String.class); verify(statement, times(3)).addBatch(argCap.capture()); @@ -114,9 +111,8 @@ public void testInsertNullData() throws SQLException { Arrays.asList( "INSERT INTO test(`name`,`age`) VALUES ('John',NULL)", "INSERT INTO test(`name`,`age`) VALUES (NULL,'25')", - "INSERT INTO test(`name`,`age`) VALUES ('Hank','30')" - ), actual - ); + "INSERT INTO test(`name`,`age`) VALUES ('Hank','30')"), + actual); } @Test @@ -129,19 +125,10 @@ public void testSelectQuery() throws SQLException { DBResult result = conn.select("SELECT * FROM test"); assertEquals("Test DB", result.getDatabaseName()); assertEquals( - Arrays.asList( - new Type("NAME", "VARCHAR"), - new Type("AGE", "INT") - ), - result.getSchema() - ); + Arrays.asList(new Type("NAME", "VARCHAR"), new Type("AGE", "INT")), result.getSchema()); assertEquals( - HashMultiset.create(ImmutableList.of( - Arrays.asList("John", 25), - Arrays.asList("Hank", 30) - )), - result.getDataRows() - ); + HashMultiset.create(ImmutableList.of(Arrays.asList("John", 25), Arrays.asList("Hank", 30))), + result.getDataRows()); } @Test @@ -153,24 +140,18 @@ public void testSelectQueryWithAlias() throws SQLException { when(resultSet.getMetaData()).thenReturn(metaData); DBResult result = conn.select("SELECT * FROM test"); - assertEquals( - Arrays.asList( - new Type("N", "VARCHAR"), - new Type("A", "INT") - ), - result.getSchema() - ); + assertEquals(Arrays.asList(new Type("N", "VARCHAR"), new Type("A", "INT")), result.getSchema()); } @Test public void testSelectQueryWithFloatInResultSet() throws SQLException { ResultSetMetaData metaData = mockMetaData(ImmutableMap.of("name", "VARCHAR", "balance", "FLOAT")); - ResultSet resultSet = mockResultSet( - new Object[] {"John", 25.123}, - new Object[] {"Hank", 30.456}, - new Object[] {"Allen", 15.1} - ); + ResultSet resultSet = + mockResultSet( + new Object[] {"John", 25.123}, + new Object[] {"Hank", 30.456}, + new Object[] {"Allen", 15.1}); when(statement.executeQuery(anyString())).thenReturn(resultSet); when(resultSet.getMetaData()).thenReturn(metaData); @@ -178,18 +159,15 @@ public void testSelectQueryWithFloatInResultSet() throws SQLException { assertEquals( Arrays.asList( new Type("NAME", "VARCHAR"), - new Type("BALANCE", "[FLOAT, DOUBLE, REAL, DOUBLE PRECISION, DECFLOAT]") - ), - result.getSchema() - ); + new Type("BALANCE", "[FLOAT, DOUBLE, REAL, DOUBLE PRECISION, DECFLOAT]")), + result.getSchema()); assertEquals( - HashMultiset.create(ImmutableList.of( - Arrays.asList("John", 25.13), - Arrays.asList("Hank", 30.46), - Arrays.asList("Allen", 15.1) - )), - result.getDataRows() - ); + HashMultiset.create( + ImmutableList.of( + Arrays.asList("John", 25.13), + Arrays.asList("Hank", 30.46), + Arrays.asList("Allen", 15.1))), + result.getDataRows()); } private ResultSet mockResultSet(Object[]... rows) throws SQLException { @@ -233,5 +211,4 @@ private ResultSetMetaData mockMetaData(Map nameAndTypes, String. when(metaData.getColumnCount()).thenReturn(nameAndTypes.size()); return metaData; } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/OpenSearchConnectionTest.java b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/OpenSearchConnectionTest.java index 73659525f2..2a7880d0c4 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/OpenSearchConnectionTest.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/OpenSearchConnectionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.tests; import static org.junit.Assert.assertEquals; @@ -30,14 +29,11 @@ import org.opensearch.client.RestClient; import org.opensearch.sql.correctness.runner.connection.OpenSearchConnection; -/** - * Tests for {@link OpenSearchConnection} - */ +/** Tests for {@link OpenSearchConnection} */ @RunWith(MockitoJUnitRunner.class) public class OpenSearchConnectionTest { - @Mock - private RestClient client; + @Mock private RestClient client; private OpenSearchConnection conn; @@ -63,36 +59,30 @@ public void testCreateTable() throws IOException { @Test public void testInsertData() throws IOException { - conn.insert("test", new String[] {"name"}, - Arrays.asList(new String[] {"John"}, new String[] {"Hank"})); + conn.insert( + "test", new String[] {"name"}, Arrays.asList(new String[] {"John"}, new String[] {"Hank"})); Request actual = captureActualArg(); assertEquals("POST", actual.getMethod()); assertEquals("/test/_bulk?refresh=true", actual.getEndpoint()); assertEquals( - "{\"index\":{}}\n" - + "{\"name\":\"John\"}\n" - + "{\"index\":{}}\n" - + "{\"name\":\"Hank\"}\n", - getBody(actual) - ); + "{\"index\":{}}\n" + "{\"name\":\"John\"}\n" + "{\"index\":{}}\n" + "{\"name\":\"Hank\"}\n", + getBody(actual)); } @Test public void testInsertNullData() throws IOException { - conn.insert("test", new String[] {"name", "age"}, + conn.insert( + "test", + new String[] {"name", "age"}, Arrays.asList(new Object[] {null, 30}, new Object[] {"Hank", null})); Request actual = captureActualArg(); assertEquals("POST", actual.getMethod()); assertEquals("/test/_bulk?refresh=true", actual.getEndpoint()); assertEquals( - "{\"index\":{}}\n" - + "{\"age\":30}\n" - + "{\"index\":{}}\n" - + "{\"name\":\"Hank\"}\n", - getBody(actual) - ); + "{\"index\":{}}\n" + "{\"age\":30}\n" + "{\"index\":{}}\n" + "{\"name\":\"Hank\"}\n", + getBody(actual)); } @Test @@ -114,5 +104,4 @@ private String getBody(Request request) throws IOException { InputStream inputStream = request.getEntity().getContent(); return CharStreams.toString(new InputStreamReader(inputStream)); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/RowTest.java b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/RowTest.java index 66cc1a0500..79e134fe7b 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/RowTest.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/RowTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.tests; import static org.junit.Assert.assertEquals; @@ -12,9 +11,7 @@ import org.junit.Test; import org.opensearch.sql.correctness.runner.resultset.Row; -/** - * Unit test {@link Row} - */ +/** Unit test {@link Row} */ public class RowTest { @Test @@ -47,5 +44,4 @@ public void shouldConsiderNullGreater() { row2.add("world"); assertEquals(1, row1.compareTo(row2)); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestConfigTest.java b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestConfigTest.java index c75bca15b1..42e75a7c36 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestConfigTest.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestConfigTest.java @@ -3,24 +3,21 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.tests; import static java.util.Collections.emptyMap; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.emptyString; import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.is; -import static org.hamcrest.MatcherAssert.assertThat; import com.google.common.collect.ImmutableMap; import java.util.Map; import org.junit.Test; import org.opensearch.sql.correctness.TestConfig; -/** - * Tests for {@link TestConfig} - */ +/** Tests for {@link TestConfig} */ public class TestConfigTest { @Test @@ -31,9 +28,7 @@ public void testDefaultConfig() { config.getOtherDbConnectionNameAndUrls(), allOf( hasEntry("H2", "jdbc:h2:mem:test;DB_CLOSE_DELAY=-1"), - hasEntry("SQLite", "jdbc:sqlite::memory:") - ) - ); + hasEntry("SQLite", "jdbc:sqlite::memory:"))); } @Test @@ -45,18 +40,16 @@ public void testCustomESUrls() { @Test public void testCustomDbUrls() { - Map args = ImmutableMap.of("otherDbUrls", - "H2=jdbc:h2:mem:test;DB_CLOSE_DELAY=-1," - + "Derby=jdbc:derby:memory:myDb;create=true"); + Map args = + ImmutableMap.of( + "otherDbUrls", + "H2=jdbc:h2:mem:test;DB_CLOSE_DELAY=-1," + "Derby=jdbc:derby:memory:myDb;create=true"); TestConfig config = new TestConfig(args); assertThat( config.getOtherDbConnectionNameAndUrls(), allOf( hasEntry("H2", "jdbc:h2:mem:test;DB_CLOSE_DELAY=-1"), - hasEntry("Derby", "jdbc:derby:memory:myDb;create=true") - ) - ); + hasEntry("Derby", "jdbc:derby:memory:myDb;create=true"))); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestDataSetTest.java b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestDataSetTest.java index 284e167d6b..7411df6a54 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestDataSetTest.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestDataSetTest.java @@ -3,19 +3,16 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.tests; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.contains; import static org.junit.Assert.assertEquals; -import static org.hamcrest.MatcherAssert.assertThat; import org.junit.Test; import org.opensearch.sql.correctness.testset.TestDataSet; -/** - * Tests for {@link TestDataSet} - */ +/** Tests for {@link TestDataSet} */ public class TestDataSetTest { @Test @@ -40,9 +37,7 @@ public void testDataSetWithSingleColumnData() { new Object[] {"field"}, new Object[] {"hello"}, new Object[] {"world"}, - new Object[] {"123"} - ) - ); + new Object[] {"123"})); } @Test @@ -61,16 +56,13 @@ public void testDataSetWithMultiColumnsData() { + " }\n" + "}"; - TestDataSet dataSet = new TestDataSet("test", mappings, - "field1,field2\nhello,123\nworld,456"); + TestDataSet dataSet = new TestDataSet("test", mappings, "field1,field2\nhello,123\nworld,456"); assertThat( dataSet.getDataRows(), contains( new Object[] {"field1", "field2"}, new Object[] {"hello", 123}, - new Object[] {"world", 456} - ) - ); + new Object[] {"world", 456})); } @Test @@ -86,17 +78,15 @@ public void testDataSetWithEscapedComma() { + " }\n" + "}"; - TestDataSet dataSet = new TestDataSet("test", mappings, - "field\n\"hello,world,123\"\n123\n\"[abc,def,ghi]\""); + TestDataSet dataSet = + new TestDataSet("test", mappings, "field\n\"hello,world,123\"\n123\n\"[abc,def,ghi]\""); assertThat( dataSet.getDataRows(), contains( new Object[] {"field"}, new Object[] {"hello,world,123"}, new Object[] {"123"}, - new Object[] {"[abc,def,ghi]"} - ) - ); + new Object[] {"[abc,def,ghi]"})); } @Test @@ -115,17 +105,13 @@ public void testDataSetWithNullData() { + " }\n" + "}"; - TestDataSet dataSet = new TestDataSet("test", mappings, - "field1,field2\n,123\nworld,\n,"); + TestDataSet dataSet = new TestDataSet("test", mappings, "field1,field2\n,123\nworld,\n,"); assertThat( dataSet.getDataRows(), contains( new Object[] {"field1", "field2"}, new Object[] {null, 123}, new Object[] {"world", null}, - new Object[] {null, null} - ) - ); + new Object[] {null, null})); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestQuerySetTest.java b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestQuerySetTest.java index 8ad9e6b921..08d360dfc7 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestQuerySetTest.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestQuerySetTest.java @@ -3,31 +3,21 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.tests; -import static org.hamcrest.Matchers.contains; import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.contains; import org.junit.Test; import org.opensearch.sql.correctness.testset.TestQuerySet; -/** - * Tests for {@link TestQuerySet} - */ +/** Tests for {@link TestQuerySet} */ public class TestQuerySetTest { @Test public void testQuerySet() { TestQuerySet querySet = new TestQuerySet("SELECT * FROM accounts\nSELECT * FROM accounts LIMIT 5"); - assertThat( - querySet, - contains( - "SELECT * FROM accounts", - "SELECT * FROM accounts LIMIT 5" - ) - ); + assertThat(querySet, contains("SELECT * FROM accounts", "SELECT * FROM accounts LIMIT 5")); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestReportTest.java b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestReportTest.java index 35b64fd5d6..515394c705 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestReportTest.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/TestReportTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.tests; import static java.util.Arrays.asList; @@ -20,9 +19,7 @@ import org.opensearch.sql.correctness.runner.resultset.Row; import org.opensearch.sql.correctness.runner.resultset.Type; -/** - * Test for {@link TestReport} - */ +/** Test for {@link TestReport} */ public class TestReportTest { private TestReport report = new TestReport(); @@ -31,22 +28,22 @@ public class TestReportTest { public void testSuccessReport() { report.addTestCase(new SuccessTestCase(1, "SELECT * FROM accounts")); JSONObject actual = new JSONObject(report); - JSONObject expected = new JSONObject( - "{" + - " \"summary\": {" + - " \"total\": 1," + - " \"success\": 1," + - " \"failure\": 0" + - " }," + - " \"tests\": [" + - " {" + - " \"id\": 1," + - " \"result\": 'Success'," + - " \"sql\": \"SELECT * FROM accounts\"," + - " }" + - " ]" + - "}" - ); + JSONObject expected = + new JSONObject( + "{" + + " \"summary\": {" + + " \"total\": 1," + + " \"success\": 1," + + " \"failure\": 0" + + " }," + + " \"tests\": [" + + " {" + + " \"id\": 1," + + " \"result\": 'Success'," + + " \"sql\": \"SELECT * FROM accounts\"," + + " }" + + " ]" + + "}"); if (!actual.similar(expected)) { fail("Actual JSON is different from expected: " + actual.toString(2)); @@ -55,54 +52,34 @@ public void testSuccessReport() { @Test public void testFailedReport() { - report.addTestCase(new FailedTestCase(1, "SELECT * FROM accounts", asList( - new DBResult("OpenSearch", singleton(new Type("firstName", "text")), - singleton(new Row(asList("hello")))), - new DBResult("H2", singleton(new Type("firstName", "text")), - singleton(new Row(asList("world"))))), - "[SQLITE_ERROR] SQL error or missing database;" - )); + report.addTestCase( + new FailedTestCase( + 1, + "SELECT * FROM accounts", + asList( + new DBResult( + "OpenSearch", + singleton(new Type("firstName", "text")), + singleton(new Row(asList("hello")))), + new DBResult( + "H2", + singleton(new Type("firstName", "text")), + singleton(new Row(asList("world"))))), + "[SQLITE_ERROR] SQL error or missing database;")); JSONObject actual = new JSONObject(report); - JSONObject expected = new JSONObject( - "{" + - " \"summary\": {" + - " \"total\": 1," + - " \"success\": 0," + - " \"failure\": 1" + - " }," + - " \"tests\": [" + - " {" + - " \"id\": 1," + - " \"result\": 'Failed'," + - " \"sql\": \"SELECT * FROM accounts\"," + - " \"explain\": \"Data row at [0] is different: this=[Row(values=[world])], other=[Row(values=[hello])]\"," + - " \"errors\": \"[SQLITE_ERROR] SQL error or missing database;\"," + - " \"resultSets\": [" + - " {" + - " \"database\": \"H2\"," + - " \"schema\": [" + - " {" + - " \"name\": \"firstName\"," + - " \"type\": \"text\"" + - " }" + - " ]," + - " \"dataRows\": [[\"world\"]]" + - " }," + - " {" + - " \"database\": \"OpenSearch\"," + - " \"schema\": [" + - " {" + - " \"name\": \"firstName\"," + - " \"type\": \"text\"" + - " }" + - " ]," + - " \"dataRows\": [[\"hello\"]]" + - " }" + - " ]" + - " }" + - " ]" + - "}" - ); + JSONObject expected = + new JSONObject( + "{ \"summary\": { \"total\": 1, \"success\": 0, \"failure\": 1 }, " + + " \"tests\": [ { \"id\": 1, \"result\": 'Failed', \"sql\":" + + " \"SELECT * FROM accounts\", \"explain\": \"Data row at [0] is different:" + + " this=[Row(values=[world])], other=[Row(values=[hello])]\", \"errors\":" + + " \"[SQLITE_ERROR] SQL error or missing database;\", \"resultSets\": [ " + + " { \"database\": \"H2\", \"schema\": [ { " + + " \"name\": \"firstName\", \"type\": \"text\" } " + + " ], \"dataRows\": [[\"world\"]] }, { " + + " \"database\": \"OpenSearch\", \"schema\": [ { " + + " \"name\": \"firstName\", \"type\": \"text\" } " + + " ], \"dataRows\": [[\"hello\"]] } ] } ]}"); if (!actual.similar(expected)) { fail("Actual JSON is different from expected: " + actual.toString(2)); @@ -113,27 +90,26 @@ public void testFailedReport() { public void testErrorReport() { report.addTestCase(new ErrorTestCase(1, "SELECT * FROM", "Missing table name in query")); JSONObject actual = new JSONObject(report); - JSONObject expected = new JSONObject( - "{" + - " \"summary\": {" + - " \"total\": 1," + - " \"success\": 0," + - " \"failure\": 1" + - " }," + - " \"tests\": [" + - " {" + - " \"id\": 1," + - " \"result\": 'Failed'," + - " \"sql\": \"SELECT * FROM\"," + - " \"reason\": \"Missing table name in query\"," + - " }" + - " ]" + - "}" - ); + JSONObject expected = + new JSONObject( + "{" + + " \"summary\": {" + + " \"total\": 1," + + " \"success\": 0," + + " \"failure\": 1" + + " }," + + " \"tests\": [" + + " {" + + " \"id\": 1," + + " \"result\": 'Failed'," + + " \"sql\": \"SELECT * FROM\"," + + " \"reason\": \"Missing table name in query\"," + + " }" + + " ]" + + "}"); if (!actual.similar(expected)) { fail("Actual JSON is different from expected: " + actual.toString(2)); } } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/UnitTests.java b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/UnitTests.java index 0bc5456069..367e2e10bf 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/tests/UnitTests.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/tests/UnitTests.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.tests; import org.junit.runner.RunWith; @@ -11,15 +10,14 @@ @RunWith(Suite.class) @Suite.SuiteClasses({ - ComparisonTestTest.class, - TestConfigTest.class, - TestDataSetTest.class, - TestQuerySetTest.class, - TestReportTest.class, - OpenSearchConnectionTest.class, - JDBCConnectionTest.class, - DBResultTest.class, - RowTest.class, + ComparisonTestTest.class, + TestConfigTest.class, + TestDataSetTest.class, + TestQuerySetTest.class, + TestReportTest.class, + OpenSearchConnectionTest.class, + JDBCConnectionTest.class, + DBResultTest.class, + RowTest.class, }) -public class UnitTests { -} +public class UnitTests {} diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/testset/TestDataSet.java b/integ-test/src/test/java/org/opensearch/sql/correctness/testset/TestDataSet.java index 66fc7c88af..3031260d0a 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/testset/TestDataSet.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/testset/TestDataSet.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.testset; import static java.util.stream.Collectors.joining; @@ -15,9 +14,7 @@ import org.json.JSONObject; import org.opensearch.sql.legacy.utils.StringUtils; -/** - * Test data set - */ +/** Test data set */ public class TestDataSet { private final String tableName; @@ -42,9 +39,7 @@ public List getDataRows() { return dataRows; } - /** - * Split columns in each line by separator and ignore escaped separator(s) in quoted string. - */ + /** Split columns in each line by separator and ignore escaped separator(s) in quoted string. */ private List splitColumns(String content, char separator) { List result = new ArrayList<>(); for (String line : content.split("\\r?\\n")) { @@ -76,8 +71,8 @@ private List splitColumns(String content, char separator) { } /** - * Convert column string values (read from CSV file) to objects of its real type - * based on the type information in index mapping file. + * Convert column string values (read from CSV file) to objects of its real type based on the type + * information in index mapping file. */ private List convertStringDataToActualType(List rows) { JSONObject types = new JSONObject(schema); @@ -93,7 +88,8 @@ private List convertStringDataToActualType(List rows) { return result; } - private Object[] convertStringArrayToObjectArray(JSONObject types, String[] columnNames, String[] row) { + private Object[] convertStringArrayToObjectArray( + JSONObject types, String[] columnNames, String[] row) { Object[] result = new Object[row.length]; for (int i = 0; i < row.length; i++) { String colName = columnNames[i]; @@ -126,8 +122,8 @@ private Object convertStringToObject(String type, String str) { case "boolean": return Boolean.valueOf(str); default: - throw new IllegalStateException(StringUtils.format( - "Data type %s is not supported yet for value: %s", type, str)); + throw new IllegalStateException( + StringUtils.format("Data type %s is not supported yet for value: %s", type, str)); } } @@ -135,13 +131,15 @@ private Object convertStringToObject(String type, String str) { public String toString() { int total = dataRows.size(); return "Test data set :\n" - + " Table name: " + tableName + '\n' - + " Schema: " + schema + '\n' - + " Data rows (first 5 in " + total + "):" - + dataRows.stream(). - limit(5). - map(Arrays::toString). - collect(joining("\n ", "\n ", "\n")); + + " Table name: " + + tableName + + '\n' + + " Schema: " + + schema + + '\n' + + " Data rows (first 5 in " + + total + + "):" + + dataRows.stream().limit(5).map(Arrays::toString).collect(joining("\n ", "\n ", "\n")); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/correctness/testset/TestQuerySet.java b/integ-test/src/test/java/org/opensearch/sql/correctness/testset/TestQuerySet.java index 7eee2cde9f..161d314c1d 100644 --- a/integ-test/src/test/java/org/opensearch/sql/correctness/testset/TestQuerySet.java +++ b/integ-test/src/test/java/org/opensearch/sql/correctness/testset/TestQuerySet.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.correctness.testset; import static java.util.stream.Collectors.joining; @@ -12,9 +11,7 @@ import java.util.Iterator; import java.util.List; -/** - * Test query set including SQL queries for comparison testing. - */ +/** Test query set including SQL queries for comparison testing. */ public class TestQuerySet implements Iterable { private List queries; @@ -49,10 +46,9 @@ private List lines(String content) { @Override public String toString() { int total = queries.size(); - return "SQL queries (first 5 in " + total + "):" - + queries.stream(). - limit(5). - collect(joining("\n ", "\n ", "\n")); + return "SQL queries (first 5 in " + + total + + "):" + + queries.stream().limit(5).collect(joining("\n ", "\n ", "\n")); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/datasource/DataSourceAPIsIT.java b/integ-test/src/test/java/org/opensearch/sql/datasource/DataSourceAPIsIT.java index 86af85727d..e1d071d522 100644 --- a/integ-test/src/test/java/org/opensearch/sql/datasource/DataSourceAPIsIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/datasource/DataSourceAPIsIT.java @@ -47,130 +47,155 @@ protected static void deleteDataSourcesCreated() throws IOException { @SneakyThrows @Test public void createDataSourceAPITest() { - //create datasource + // create datasource DataSourceMetadata createDSM = - new DataSourceMetadata("create_prometheus", DataSourceType.PROMETHEUS, - ImmutableList.of(), ImmutableMap.of("prometheus.uri", "https://localhost:9090", - "prometheus.auth.type","basicauth", - "prometheus.auth.username", "username", - "prometheus.auth.password", "password")); + new DataSourceMetadata( + "create_prometheus", + DataSourceType.PROMETHEUS, + ImmutableList.of(), + ImmutableMap.of( + "prometheus.uri", + "https://localhost:9090", + "prometheus.auth.type", + "basicauth", + "prometheus.auth.username", + "username", + "prometheus.auth.password", + "password")); Request createRequest = getCreateDataSourceRequest(createDSM); Response response = client().performRequest(createRequest); Assert.assertEquals(201, response.getStatusLine().getStatusCode()); String createResponseString = getResponseBody(response); Assert.assertEquals("Created DataSource with name create_prometheus", createResponseString); - //Datasource is not immediately created. so introducing a sleep of 2s. + // Datasource is not immediately created. so introducing a sleep of 2s. Thread.sleep(2000); - //get datasource to validate the creation. + // get datasource to validate the creation. Request getRequest = getFetchDataSourceRequest("create_prometheus"); Response getResponse = client().performRequest(getRequest); Assert.assertEquals(200, getResponse.getStatusLine().getStatusCode()); String getResponseString = getResponseBody(getResponse); DataSourceMetadata dataSourceMetadata = new Gson().fromJson(getResponseString, DataSourceMetadata.class); - Assert.assertEquals("https://localhost:9090", - dataSourceMetadata.getProperties().get("prometheus.uri")); + Assert.assertEquals( + "https://localhost:9090", dataSourceMetadata.getProperties().get("prometheus.uri")); } - @SneakyThrows @Test public void updateDataSourceAPITest() { - //create datasource + // create datasource DataSourceMetadata createDSM = - new DataSourceMetadata("update_prometheus", DataSourceType.PROMETHEUS, - ImmutableList.of(), ImmutableMap.of("prometheus.uri", "https://localhost:9090")); + new DataSourceMetadata( + "update_prometheus", + DataSourceType.PROMETHEUS, + ImmutableList.of(), + ImmutableMap.of("prometheus.uri", "https://localhost:9090")); Request createRequest = getCreateDataSourceRequest(createDSM); client().performRequest(createRequest); - //Datasource is not immediately created. so introducing a sleep of 2s. + // Datasource is not immediately created. so introducing a sleep of 2s. Thread.sleep(2000); - //update datasource + // update datasource DataSourceMetadata updateDSM = - new DataSourceMetadata("update_prometheus", DataSourceType.PROMETHEUS, - ImmutableList.of(), ImmutableMap.of("prometheus.uri", "https://randomtest.com:9090")); + new DataSourceMetadata( + "update_prometheus", + DataSourceType.PROMETHEUS, + ImmutableList.of(), + ImmutableMap.of("prometheus.uri", "https://randomtest.com:9090")); Request updateRequest = getUpdateDataSourceRequest(updateDSM); Response updateResponse = client().performRequest(updateRequest); Assert.assertEquals(200, updateResponse.getStatusLine().getStatusCode()); String updateResponseString = getResponseBody(updateResponse); Assert.assertEquals("Updated DataSource with name update_prometheus", updateResponseString); - //Datasource is not immediately updated. so introducing a sleep of 2s. + // Datasource is not immediately updated. so introducing a sleep of 2s. Thread.sleep(2000); - //update datasource with invalid URI + // update datasource with invalid URI updateDSM = - new DataSourceMetadata("update_prometheus", DataSourceType.PROMETHEUS, - ImmutableList.of(), ImmutableMap.of("prometheus.uri", "https://randomtest:9090")); - final Request illFormedUpdateRequest - = getUpdateDataSourceRequest(updateDSM); - ResponseException updateResponseException - = Assert.assertThrows(ResponseException.class, () -> client().performRequest(illFormedUpdateRequest)); + new DataSourceMetadata( + "update_prometheus", + DataSourceType.PROMETHEUS, + ImmutableList.of(), + ImmutableMap.of("prometheus.uri", "https://randomtest:9090")); + final Request illFormedUpdateRequest = getUpdateDataSourceRequest(updateDSM); + ResponseException updateResponseException = + Assert.assertThrows( + ResponseException.class, () -> client().performRequest(illFormedUpdateRequest)); Assert.assertEquals(400, updateResponseException.getResponse().getStatusLine().getStatusCode()); updateResponseString = getResponseBody(updateResponseException.getResponse()); JsonObject errorMessage = new Gson().fromJson(updateResponseString, JsonObject.class); - Assert.assertEquals("Invalid hostname in the uri: https://randomtest:9090", + Assert.assertEquals( + "Invalid hostname in the uri: https://randomtest:9090", errorMessage.get("error").getAsJsonObject().get("details").getAsString()); Thread.sleep(2000); - //get datasource to validate the modification. - //get datasource + // get datasource to validate the modification. + // get datasource Request getRequest = getFetchDataSourceRequest("update_prometheus"); Response getResponse = client().performRequest(getRequest); Assert.assertEquals(200, getResponse.getStatusLine().getStatusCode()); String getResponseString = getResponseBody(getResponse); DataSourceMetadata dataSourceMetadata = new Gson().fromJson(getResponseString, DataSourceMetadata.class); - Assert.assertEquals("https://randomtest.com:9090", - dataSourceMetadata.getProperties().get("prometheus.uri")); + Assert.assertEquals( + "https://randomtest.com:9090", dataSourceMetadata.getProperties().get("prometheus.uri")); } - @SneakyThrows @Test public void deleteDataSourceTest() { - //create datasource for deletion + // create datasource for deletion DataSourceMetadata createDSM = - new DataSourceMetadata("delete_prometheus", DataSourceType.PROMETHEUS, - ImmutableList.of(), ImmutableMap.of("prometheus.uri", "https://localhost:9090")); + new DataSourceMetadata( + "delete_prometheus", + DataSourceType.PROMETHEUS, + ImmutableList.of(), + ImmutableMap.of("prometheus.uri", "https://localhost:9090")); Request createRequest = getCreateDataSourceRequest(createDSM); client().performRequest(createRequest); - //Datasource is not immediately created. so introducing a sleep of 2s. + // Datasource is not immediately created. so introducing a sleep of 2s. Thread.sleep(2000); - //delete datasource + // delete datasource Request deleteRequest = getDeleteDataSourceRequest("delete_prometheus"); Response deleteResponse = client().performRequest(deleteRequest); Assert.assertEquals(204, deleteResponse.getStatusLine().getStatusCode()); - //Datasource is not immediately deleted. so introducing a sleep of 2s. + // Datasource is not immediately deleted. so introducing a sleep of 2s. Thread.sleep(2000); - //get datasources to verify the deletion + // get datasources to verify the deletion final Request prometheusGetRequest = getFetchDataSourceRequest("delete_prometheus"); - ResponseException prometheusGetResponseException - = Assert.assertThrows(ResponseException.class, () -> client().performRequest(prometheusGetRequest)); - Assert.assertEquals( 400, prometheusGetResponseException.getResponse().getStatusLine().getStatusCode()); - String prometheusGetResponseString = getResponseBody(prometheusGetResponseException.getResponse()); + ResponseException prometheusGetResponseException = + Assert.assertThrows( + ResponseException.class, () -> client().performRequest(prometheusGetRequest)); + Assert.assertEquals( + 400, prometheusGetResponseException.getResponse().getStatusLine().getStatusCode()); + String prometheusGetResponseString = + getResponseBody(prometheusGetResponseException.getResponse()); JsonObject errorMessage = new Gson().fromJson(prometheusGetResponseString, JsonObject.class); - Assert.assertEquals("DataSource with name: delete_prometheus doesn't exist.", + Assert.assertEquals( + "DataSource with name: delete_prometheus doesn't exist.", errorMessage.get("error").getAsJsonObject().get("details").getAsString()); - } @SneakyThrows @Test public void getAllDataSourceTest() { -//create datasource for deletion + // create datasource for deletion DataSourceMetadata createDSM = - new DataSourceMetadata("get_all_prometheus", DataSourceType.PROMETHEUS, - ImmutableList.of(), ImmutableMap.of("prometheus.uri", "https://localhost:9090")); + new DataSourceMetadata( + "get_all_prometheus", + DataSourceType.PROMETHEUS, + ImmutableList.of(), + ImmutableMap.of("prometheus.uri", "https://localhost:9090")); Request createRequest = getCreateDataSourceRequest(createDSM); client().performRequest(createRequest); - //Datasource is not immediately created. so introducing a sleep of 2s. + // Datasource is not immediately created. so introducing a sleep of 2s. Thread.sleep(2000); Request getRequest = getFetchDataSourceRequest(null); @@ -183,5 +208,4 @@ public void getAllDataSourceTest() { Assert.assertTrue( dataSourceMetadataList.stream().anyMatch(ds -> ds.getName().equals("get_all_prometheus"))); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/datasource/DatasourceClusterSettingsIT.java b/integ-test/src/test/java/org/opensearch/sql/datasource/DatasourceClusterSettingsIT.java index 7d562c1e9b..01e3509e12 100644 --- a/integ-test/src/test/java/org/opensearch/sql/datasource/DatasourceClusterSettingsIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/datasource/DatasourceClusterSettingsIT.java @@ -19,25 +19,33 @@ public class DatasourceClusterSettingsIT extends PPLIntegTestCase { private static final Logger LOG = LogManager.getLogger(); + @Test public void testGetDatasourceClusterSettings() throws IOException { JSONObject clusterSettings = getAllClusterSettings(); - assertThat(clusterSettings.query("/defaults/plugins.query.datasources.encryption.masterkey"), + assertThat( + clusterSettings.query("/defaults/plugins.query.datasources.encryption.masterkey"), equalTo(null)); } - @Test public void testPutDatasourceClusterSettings() throws IOException { final ResponseException exception = - expectThrows(ResponseException.class, () -> updateClusterSettings(new ClusterSetting(PERSISTENT, - "plugins.query.datasources.encryption.masterkey", - "masterkey"))); + expectThrows( + ResponseException.class, + () -> + updateClusterSettings( + new ClusterSetting( + PERSISTENT, + "plugins.query.datasources.encryption.masterkey", + "masterkey"))); JSONObject resp = new JSONObject(TestUtils.getResponseBody(exception.getResponse())); assertThat(resp.getInt("status"), equalTo(400)); - assertThat(resp.query("/error/root_cause/0/reason"), - equalTo("final persistent setting [plugins.query.datasources.encryption.masterkey], not updateable")); + assertThat( + resp.query("/error/root_cause/0/reason"), + equalTo( + "final persistent setting [plugins.query.datasources.encryption.masterkey], not" + + " updateable")); assertThat(resp.query("/error/type"), equalTo("illegal_argument_exception")); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/jdbc/CursorIT.java b/integ-test/src/test/java/org/opensearch/sql/jdbc/CursorIT.java index 7691c00ea5..b3f2c1f0ca 100644 --- a/integ-test/src/test/java/org/opensearch/sql/jdbc/CursorIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/jdbc/CursorIT.java @@ -28,12 +28,12 @@ import org.junit.AfterClass; import org.junit.Assume; import org.junit.BeforeClass; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.DisplayNameGeneration; import org.junit.jupiter.api.DisplayNameGenerator; +import org.junit.jupiter.api.Test; import org.opensearch.client.Request; import org.opensearch.client.RequestOptions; import org.opensearch.client.Response; @@ -66,12 +66,14 @@ public void init() { public static void initConnection() { var driverFile = System.getProperty("jdbcFile"); if (driverFile != null) { - URLClassLoader loader = new URLClassLoader( - new URL[]{new File(driverFile).toURI().toURL()}, - ClassLoader.getSystemClassLoader() - ); - Driver driver = (Driver) Class.forName("org.opensearch.jdbc.Driver", true, loader) - .getDeclaredConstructor().newInstance(); + URLClassLoader loader = + new URLClassLoader( + new URL[] {new File(driverFile).toURI().toURL()}, ClassLoader.getSystemClassLoader()); + Driver driver = + (Driver) + Class.forName("org.opensearch.jdbc.Driver", true, loader) + .getDeclaredConstructor() + .newInstance(); connection = driver.connect(getConnectionString(), null); } else { connection = DriverManager.getConnection(getConnectionString()); @@ -93,7 +95,8 @@ public static void closeConnection() { @SneakyThrows public void check_driver_version() { var version = System.getProperty("jdbcDriverVersion"); - Assume.assumeTrue("Parameter `jdbcDriverVersion` is not given, test platform uses default driver version", + Assume.assumeTrue( + "Parameter `jdbcDriverVersion` is not given, test platform uses default driver version", version != null); assertEquals(version, connection.getMetaData().getDriverVersion()); } @@ -103,11 +106,13 @@ public void check_driver_version() { public void select_all_no_cursor() { Statement stmt = connection.createStatement(); - for (var table : List.of(TEST_INDEX_CALCS, TEST_INDEX_ONLINE, TEST_INDEX_BANK, TEST_INDEX_ACCOUNT)) { + for (var table : + List.of(TEST_INDEX_CALCS, TEST_INDEX_ONLINE, TEST_INDEX_BANK, TEST_INDEX_ACCOUNT)) { var query = String.format("SELECT * FROM %s", table); ResultSet rs = stmt.executeQuery(query); int rows = 0; - for (; rs.next(); rows++) ; + for (; rs.next(); rows++) + ; var restResponse = executeRestQuery(query, null); assertEquals(rows, restResponse.getInt("total")); @@ -119,11 +124,13 @@ public void select_all_no_cursor() { public void select_count_all_no_cursor() { Statement stmt = connection.createStatement(); - for (var table : List.of(TEST_INDEX_CALCS, TEST_INDEX_ONLINE, TEST_INDEX_BANK, TEST_INDEX_ACCOUNT)) { + for (var table : + List.of(TEST_INDEX_CALCS, TEST_INDEX_ONLINE, TEST_INDEX_BANK, TEST_INDEX_ACCOUNT)) { var query = String.format("SELECT COUNT(*) FROM %s", table); ResultSet rs = stmt.executeQuery(query); int rows = 0; - for (; rs.next(); rows++) ; + for (; rs.next(); rows++) + ; var restResponse = executeRestQuery(query, null); assertEquals(rows, restResponse.getInt("total")); @@ -140,7 +147,8 @@ public void select_all_small_table_big_cursor() { stmt.setFetchSize(200); ResultSet rs = stmt.executeQuery(query); int rows = 0; - for (; rs.next(); rows++) ; + for (; rs.next(); rows++) + ; var restResponse = executeRestQuery(query, null); assertEquals(rows, restResponse.getInt("total")); @@ -157,7 +165,8 @@ public void select_all_small_table_small_cursor() { stmt.setFetchSize(3); ResultSet rs = stmt.executeQuery(query); int rows = 0; - for (; rs.next(); rows++) ; + for (; rs.next(); rows++) + ; var restResponse = executeRestQuery(query, null); assertEquals(rows, restResponse.getInt("total")); @@ -174,7 +183,8 @@ public void select_all_big_table_small_cursor() { stmt.setFetchSize(10); ResultSet rs = stmt.executeQuery(query); int rows = 0; - for (; rs.next(); rows++) ; + for (; rs.next(); rows++) + ; var restResponse = executeRestQuery(query, null); assertEquals(rows, restResponse.getInt("total")); @@ -191,16 +201,15 @@ public void select_all_big_table_big_cursor() { stmt.setFetchSize(500); ResultSet rs = stmt.executeQuery(query); int rows = 0; - for (; rs.next(); rows++) ; + for (; rs.next(); rows++) + ; var restResponse = executeRestQuery(query, null); assertEquals(rows, restResponse.getInt("total")); } } - /** - * Use OpenSearch cluster initialized by OpenSearch Gradle task. - */ + /** Use OpenSearch cluster initialized by OpenSearch Gradle task. */ private static String getConnectionString() { // string like "[::1]:46751,127.0.0.1:34403" var clusterUrls = System.getProperty("tests.rest.cluster").split(","); @@ -211,7 +220,8 @@ private static String getConnectionString() { protected JSONObject executeRestQuery(String query, @Nullable Integer fetch_size) { Request request = new Request("POST", QUERY_API_ENDPOINT); if (fetch_size != null) { - request.setJsonEntity(String.format("{ \"query\": \"%s\", \"fetch_size\": %d }", query, fetch_size)); + request.setJsonEntity( + String.format("{ \"query\": \"%s\", \"fetch_size\": %d }", query, fetch_size)); } else { request.setJsonEntity(String.format("{ \"query\": \"%s\" }", query)); } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/AggregationExpressionIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/AggregationExpressionIT.java index af6e2ad492..37398220ff 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/AggregationExpressionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/AggregationExpressionIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.opensearch.sql.util.MatcherUtils.rows; @@ -12,8 +11,6 @@ import static org.opensearch.sql.util.MatcherUtils.verifySchema; import org.json.JSONObject; -import org.junit.Assume; -import org.junit.Ignore; import org.junit.Test; public class AggregationExpressionIT extends SQLIntegTestCase { @@ -26,10 +23,9 @@ protected void init() throws Exception { @Test public void noGroupKeySingleFuncOverAggWithoutAliasShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT abs(MAX(age)) " + - "FROM %s", - Index.ACCOUNT.getName())); + JSONObject response = + executeJdbcRequest( + String.format("SELECT abs(MAX(age)) " + "FROM %s", Index.ACCOUNT.getName())); verifySchema(response, schema("abs(MAX(age))", null, "long")); verifyDataRows(response, rows(40)); @@ -37,10 +33,10 @@ public void noGroupKeySingleFuncOverAggWithoutAliasShouldPass() { @Test public void noGroupKeyMaxAddMinShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT MAX(age) + MIN(age) as addValue " + - "FROM %s", - Index.ACCOUNT.getName())); + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT MAX(age) + MIN(age) as addValue " + "FROM %s", Index.ACCOUNT.getName())); verifySchema(response, schema("MAX(age) + MIN(age)", "addValue", "long")); verifyDataRows(response, rows(60)); @@ -48,10 +44,9 @@ public void noGroupKeyMaxAddMinShouldPass() { @Test public void noGroupKeyMaxAddLiteralShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT MAX(age) + 1 as `add` " + - "FROM %s", - Index.ACCOUNT.getName())); + JSONObject response = + executeJdbcRequest( + String.format("SELECT MAX(age) + 1 as `add` " + "FROM %s", Index.ACCOUNT.getName())); verifySchema(response, schema("MAX(age) + 1", "add", "long")); verifyDataRows(response, rows(41)); @@ -59,10 +54,9 @@ public void noGroupKeyMaxAddLiteralShouldPass() { @Test public void noGroupKeyAvgOnIntegerShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT AVG(age) as `avg` " + - "FROM %s", - Index.BANK.getName())); + JSONObject response = + executeJdbcRequest( + String.format("SELECT AVG(age) as `avg` " + "FROM %s", Index.BANK.getName())); verifySchema(response, schema("AVG(age)", "avg", "double")); verifyDataRows(response, rows(34D)); @@ -70,58 +64,49 @@ public void noGroupKeyAvgOnIntegerShouldPass() { @Test public void hasGroupKeyAvgOnIntegerShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT gender, AVG(age) as `avg` " + - "FROM %s " + - "GROUP BY gender", - Index.BANK.getName())); + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT gender, AVG(age) as `avg` " + "FROM %s " + "GROUP BY gender", + Index.BANK.getName())); - verifySchema(response, - schema("gender", null, "text"), - schema("AVG(age)", "avg", "double")); - verifyDataRows(response, - rows("m", 34.25), - rows("f", 33.666666666666664d)); + verifySchema(response, schema("gender", null, "text"), schema("AVG(age)", "avg", "double")); + verifyDataRows(response, rows("m", 34.25), rows("f", 33.666666666666664d)); } @Test public void hasGroupKeyMaxAddMinShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT gender, MAX(age) + MIN(age) as addValue " + - "FROM %s " + - "GROUP BY gender", - Index.ACCOUNT.getName())); - - verifySchema(response, + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT gender, MAX(age) + MIN(age) as addValue " + "FROM %s " + "GROUP BY gender", + Index.ACCOUNT.getName())); + + verifySchema( + response, schema("gender", null, "text"), schema("MAX(age) + MIN(age)", "addValue", "long")); - verifyDataRows(response, - rows("m", 60), - rows("f", 60)); + verifyDataRows(response, rows("m", 60), rows("f", 60)); } @Test public void hasGroupKeyMaxAddLiteralShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT gender, MAX(age) + 1 as `add` " + - "FROM %s " + - "GROUP BY gender", - Index.ACCOUNT.getName())); + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT gender, MAX(age) + 1 as `add` " + "FROM %s " + "GROUP BY gender", + Index.ACCOUNT.getName())); - verifySchema(response, - schema("gender", null, "text"), - schema("MAX(age) + 1", "add", "long")); - verifyDataRows(response, - rows("m", 41), - rows("f", 41)); + verifySchema(response, schema("gender", null, "text"), schema("MAX(age) + 1", "add", "long")); + verifyDataRows(response, rows("m", 41), rows("f", 41)); } @Test public void noGroupKeyLogMaxAddMinShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT Log(MAX(age) + MIN(age)) as `log` " + - "FROM %s", - Index.ACCOUNT.getName())); + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT Log(MAX(age) + MIN(age)) as `log` " + "FROM %s", Index.ACCOUNT.getName())); verifySchema(response, schema("Log(MAX(age) + MIN(age))", "log", "double")); verifyDataRows(response, rows(4.0943445622221d)); @@ -129,117 +114,124 @@ public void noGroupKeyLogMaxAddMinShouldPass() { @Test public void hasGroupKeyLogMaxAddMinShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT gender, Log(MAX(age) + MIN(age)) as logValue " + - "FROM %s " + - "GROUP BY gender", - Index.ACCOUNT.getName())); - - verifySchema(response, + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT gender, Log(MAX(age) + MIN(age)) as logValue " + + "FROM %s " + + "GROUP BY gender", + Index.ACCOUNT.getName())); + + verifySchema( + response, schema("gender", null, "text"), schema("Log(MAX(age) + MIN(age))", "logValue", "double")); - verifyDataRows(response, - rows("m", 4.0943445622221d), - rows("f", 4.0943445622221d)); + verifyDataRows(response, rows("m", 4.0943445622221d), rows("f", 4.0943445622221d)); } @Test public void AddLiteralOnGroupKeyShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT gender, age+10, max(balance) as `max` " + - "FROM %s " + - "WHERE gender = 'm' and age < 22 " + - "GROUP BY gender, age " + - "ORDER BY age", - Index.ACCOUNT.getName())); - - verifySchema(response, + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT gender, age+10, max(balance) as `max` " + + "FROM %s " + + "WHERE gender = 'm' and age < 22 " + + "GROUP BY gender, age " + + "ORDER BY age", + Index.ACCOUNT.getName())); + + verifySchema( + response, schema("gender", null, "text"), schema("age+10", null, "long"), schema("max(balance)", "max", "long")); - verifyDataRows(response, - rows("m", 30, 49568), - rows("m", 31, 49433)); + verifyDataRows(response, rows("m", 30, 49568), rows("m", 31, 49433)); } @Test public void logWithAddLiteralOnGroupKeyShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT gender, Log(age+10) as logAge, max(balance) as max " + - "FROM %s " + - "WHERE gender = 'm' and age < 22 " + - "GROUP BY gender, age " + - "ORDER BY age", - Index.ACCOUNT.getName())); - - verifySchema(response, + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT gender, Log(age+10) as logAge, max(balance) as max " + + "FROM %s " + + "WHERE gender = 'm' and age < 22 " + + "GROUP BY gender, age " + + "ORDER BY age", + Index.ACCOUNT.getName())); + + verifySchema( + response, schema("gender", null, "text"), schema("Log(age+10)", "logAge", "double"), schema("max(balance)", "max", "long")); - verifyDataRows(response, - rows("m", 3.4011973816621555d, 49568), - rows("m", 3.4339872044851463d, 49433)); + verifyDataRows( + response, rows("m", 3.4011973816621555d, 49568), rows("m", 3.4339872044851463d, 49433)); } @Test public void logWithAddLiteralOnGroupKeyAndMaxSubtractLiteralShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT gender, Log(age+10) as logAge, max(balance) - 100 as max " + - "FROM %s " + - "WHERE gender = 'm' and age < 22 " + - "GROUP BY gender, age " + - "ORDER BY age", - Index.ACCOUNT.getName())); - - verifySchema(response, + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT gender, Log(age+10) as logAge, max(balance) - 100 as max " + + "FROM %s " + + "WHERE gender = 'm' and age < 22 " + + "GROUP BY gender, age " + + "ORDER BY age", + Index.ACCOUNT.getName())); + + verifySchema( + response, schema("gender", null, "text"), schema("Log(age+10)", "logAge", "double"), schema("max(balance) - 100", "max", "long")); - verifyDataRows(response, - rows("m", 3.4011973816621555d, 49468), - rows("m", 3.4339872044851463d, 49333)); + verifyDataRows( + response, rows("m", 3.4011973816621555d, 49468), rows("m", 3.4339872044851463d, 49333)); } - /** - * The date is in JDBC format. - */ + /** The date is in JDBC format. */ @Test public void groupByDateShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT birthdate, count(*) as `count` " + - "FROM %s " + - "WHERE age < 30 " + - "GROUP BY birthdate ", - Index.BANK.getName())); - - verifySchema(response, - schema("birthdate", null, "timestamp"), - schema("count(*)", "count", "integer")); - verifyDataRows(response, - rows("2018-06-23 00:00:00", 1)); + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT birthdate, count(*) as `count` " + + "FROM %s " + + "WHERE age < 30 " + + "GROUP BY birthdate ", + Index.BANK.getName())); + + verifySchema( + response, schema("birthdate", null, "timestamp"), schema("count(*)", "count", "integer")); + verifyDataRows(response, rows("2018-06-23 00:00:00", 1)); } @Test public void groupByDateWithAliasShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT birthdate as birth, count(*) as `count` " + - "FROM %s " + - "WHERE age < 30 " + - "GROUP BY birthdate ", - Index.BANK.getName())); - - verifySchema(response, + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT birthdate as birth, count(*) as `count` " + + "FROM %s " + + "WHERE age < 30 " + + "GROUP BY birthdate ", + Index.BANK.getName())); + + verifySchema( + response, schema("birthdate", "birth", "timestamp"), schema("count(*)", "count", "integer")); - verifyDataRows(response, - rows("2018-06-23 00:00:00", 1)); + verifyDataRows(response, rows("2018-06-23 00:00:00", 1)); } @Test public void aggregateCastStatementShouldNotReturnZero() { - JSONObject response = executeJdbcRequest(String.format( - "SELECT SUM(CAST(male AS INT)) AS male_sum FROM %s", - Index.BANK.getName())); + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT SUM(CAST(male AS INT)) AS male_sum FROM %s", Index.BANK.getName())); verifySchema(response, schema("SUM(CAST(male AS INT))", "male_sum", "integer")); verifyDataRows(response, rows(4)); @@ -247,8 +239,8 @@ public void aggregateCastStatementShouldNotReturnZero() { @Test public void groupByConstantShouldPass() { - JSONObject response = executeJdbcRequest(String.format( - "select 1 from %s GROUP BY 1", Index.BANK.getName())); + JSONObject response = + executeJdbcRequest(String.format("select 1 from %s GROUP BY 1", Index.BANK.getName())); verifySchema(response, schema("1", null, "integer")); verifyDataRows(response, rows(1)); diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/AggregationIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/AggregationIT.java index 3abf57ddcb..38fefe0fe1 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/AggregationIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/AggregationIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.containsString; @@ -60,8 +59,9 @@ public void countTest() throws IOException { @Ignore("The distinct is not supported in new engine") public void countDistinctTest() { - JSONObject response = executeJdbcRequest( - String.format("SELECT COUNT(distinct gender) FROM %s", TEST_INDEX_ACCOUNT)); + JSONObject response = + executeJdbcRequest( + String.format("SELECT COUNT(distinct gender) FROM %s", TEST_INDEX_ACCOUNT)); verifySchema(response, schema("COUNT(DISTINCT gender)", null, "integer")); verifyDataRows(response, rows(2)); @@ -71,8 +71,9 @@ public void countDistinctTest() { public void countWithDocsHintTest() throws Exception { JSONObject result = - executeQuery(String.format("SELECT /*! DOCS_WITH_AGGREGATION(10) */ count(*) from %s", - TEST_INDEX_ACCOUNT)); + executeQuery( + String.format( + "SELECT /*! DOCS_WITH_AGGREGATION(10) */ count(*) from %s", TEST_INDEX_ACCOUNT)); JSONArray hits = (JSONArray) result.query("/hits/hits"); Assert.assertThat(hits.length(), equalTo(10)); } @@ -83,8 +84,8 @@ public void sumTest() throws IOException { JSONObject result = executeQuery(String.format("SELECT SUM(balance) FROM %s", TEST_INDEX_ACCOUNT)); Assert.assertThat(getTotalHits(result), equalTo(1000)); - Assert.assertThat(getDoubleAggregationValue(result, "SUM(balance)", "value"), - equalTo(25714837.0)); + Assert.assertThat( + getDoubleAggregationValue(result, "SUM(balance)", "value"), equalTo(25714837.0)); } @Test @@ -127,23 +128,28 @@ public void statsTest() throws IOException { @Test public void extendedStatsTest() throws IOException { - JSONObject result = executeQuery(String.format("SELECT EXTENDED_STATS(age) FROM %s", - TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery(String.format("SELECT EXTENDED_STATS(age) FROM %s", TEST_INDEX_ACCOUNT)); Assert.assertThat(getTotalHits(result), equalTo(1000)); - Assert - .assertThat(getDoubleAggregationValue(result, "EXTENDED_STATS(age)", "min"), equalTo(20.0)); - Assert - .assertThat(getDoubleAggregationValue(result, "EXTENDED_STATS(age)", "max"), equalTo(40.0)); - Assert.assertThat(getDoubleAggregationValue(result, "EXTENDED_STATS(age)", "avg"), - equalTo(30.171)); - Assert.assertThat(getDoubleAggregationValue(result, "EXTENDED_STATS(age)", "sum"), - equalTo(30171.0)); - Assert.assertThat(getDoubleAggregationValue(result, "EXTENDED_STATS(age)", "sum_of_squares"), + Assert.assertThat( + getDoubleAggregationValue(result, "EXTENDED_STATS(age)", "min"), equalTo(20.0)); + Assert.assertThat( + getDoubleAggregationValue(result, "EXTENDED_STATS(age)", "max"), equalTo(40.0)); + Assert.assertThat( + getDoubleAggregationValue(result, "EXTENDED_STATS(age)", "avg"), equalTo(30.171)); + Assert.assertThat( + getDoubleAggregationValue(result, "EXTENDED_STATS(age)", "sum"), equalTo(30171.0)); + Assert.assertThat( + getDoubleAggregationValue(result, "EXTENDED_STATS(age)", "sum_of_squares"), equalTo(946393.0)); - Assert.assertEquals(6.008640362012022, - getDoubleAggregationValue(result, "EXTENDED_STATS(age)", "std_deviation"), 0.0001); - Assert.assertEquals(36.10375899999996, - getDoubleAggregationValue(result, "EXTENDED_STATS(age)", "variance"), 0.0001); + Assert.assertEquals( + 6.008640362012022, + getDoubleAggregationValue(result, "EXTENDED_STATS(age)", "std_deviation"), + 0.0001); + Assert.assertEquals( + 36.10375899999996, + getDoubleAggregationValue(result, "EXTENDED_STATS(age)", "variance"), + 0.0001); } @Test @@ -152,72 +158,73 @@ public void percentileTest() throws IOException { JSONObject result = executeQuery(String.format("SELECT PERCENTILES(age) FROM %s", TEST_INDEX_ACCOUNT)); Assert.assertThat(getTotalHits(result), equalTo(1000)); - Assert - .assertEquals(20.0, getDoubleAggregationValue(result, "PERCENTILES(age)", "values", "1.0"), - 0.001); - Assert - .assertEquals(21.0, getDoubleAggregationValue(result, "PERCENTILES(age)", "values", "5.0"), - 0.001); - Assert - .assertEquals(25.0, getDoubleAggregationValue(result, "PERCENTILES(age)", "values", "25.0"), - 0.001); - // All percentiles are approximations calculated by t-digest, however, P50 has the widest distribution (not sure why) - Assert - .assertEquals(30.5, getDoubleAggregationValue(result, "PERCENTILES(age)", "values", "50.0"), - 0.6); - Assert - .assertEquals(35.0, getDoubleAggregationValue(result, "PERCENTILES(age)", "values", "75.0"), - 0.001); - Assert - .assertEquals(39.0, getDoubleAggregationValue(result, "PERCENTILES(age)", "values", "95.0"), - 0.001); - Assert - .assertEquals(40.0, getDoubleAggregationValue(result, "PERCENTILES(age)", "values", "99.0"), - 0.001); + Assert.assertEquals( + 20.0, getDoubleAggregationValue(result, "PERCENTILES(age)", "values", "1.0"), 0.001); + Assert.assertEquals( + 21.0, getDoubleAggregationValue(result, "PERCENTILES(age)", "values", "5.0"), 0.001); + Assert.assertEquals( + 25.0, getDoubleAggregationValue(result, "PERCENTILES(age)", "values", "25.0"), 0.001); + // All percentiles are approximations calculated by t-digest, however, P50 has the widest + // distribution (not sure why) + Assert.assertEquals( + 30.5, getDoubleAggregationValue(result, "PERCENTILES(age)", "values", "50.0"), 0.6); + Assert.assertEquals( + 35.0, getDoubleAggregationValue(result, "PERCENTILES(age)", "values", "75.0"), 0.001); + Assert.assertEquals( + 39.0, getDoubleAggregationValue(result, "PERCENTILES(age)", "values", "95.0"), 0.001); + Assert.assertEquals( + 40.0, getDoubleAggregationValue(result, "PERCENTILES(age)", "values", "99.0"), 0.001); } @Test public void percentileTestSpecific() throws IOException { - JSONObject result = executeQuery(String.format("SELECT PERCENTILES(age,25.0,75.0) FROM %s", - TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + String.format("SELECT PERCENTILES(age,25.0,75.0) FROM %s", TEST_INDEX_ACCOUNT)); Assert.assertThat(getTotalHits(result), equalTo(1000)); - Assert.assertEquals(25.0, - getDoubleAggregationValue(result, "PERCENTILES(age,25.0,75.0)", "values", "25.0"), 0.001); - Assert.assertEquals(35.0, - getDoubleAggregationValue(result, "PERCENTILES(age,25.0,75.0)", "values", "75.0"), 0.001); + Assert.assertEquals( + 25.0, + getDoubleAggregationValue(result, "PERCENTILES(age,25.0,75.0)", "values", "25.0"), + 0.001); + Assert.assertEquals( + 35.0, + getDoubleAggregationValue(result, "PERCENTILES(age,25.0,75.0)", "values", "75.0"), + 0.001); } @Test public void aliasTest() throws IOException { - JSONObject result = executeQuery(String.format("SELECT COUNT(*) AS mycount FROM %s", - TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery(String.format("SELECT COUNT(*) AS mycount FROM %s", TEST_INDEX_ACCOUNT)); Assert.assertThat(getTotalHits(result), equalTo(1000)); Assert.assertThat(getIntAggregationValue(result, "mycount", "value"), equalTo(1000)); } @Test public void groupByTest() throws Exception { - JSONObject result = executeQuery(String.format("SELECT COUNT(*) FROM %s GROUP BY gender", - TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery(String.format("SELECT COUNT(*) FROM %s GROUP BY gender", TEST_INDEX_ACCOUNT)); assertResultForGroupByTest(result); } @Test public void groupByUsingTableAliasTest() throws Exception { - JSONObject result = executeQuery(String.format("SELECT COUNT(*) FROM %s a GROUP BY a.gender", - TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + String.format("SELECT COUNT(*) FROM %s a GROUP BY a.gender", TEST_INDEX_ACCOUNT)); assertResultForGroupByTest(result); } @Test public void groupByUsingTableNamePrefixTest() throws Exception { - JSONObject result = executeQuery(String.format( - "SELECT COUNT(*) FROM %s GROUP BY opensearch-sql_test_index_account.gender", - TEST_INDEX_ACCOUNT - )); + JSONObject result = + executeQuery( + String.format( + "SELECT COUNT(*) FROM %s GROUP BY opensearch-sql_test_index_account.gender", + TEST_INDEX_ACCOUNT)); assertResultForGroupByTest(result); } @@ -241,31 +248,34 @@ private void assertResultForGroupByTest(JSONObject result) { @Test public void groupByHavingTest() throws Exception { - JSONObject result = executeQuery(String.format( - "SELECT gender " + - "FROM %s " + - "GROUP BY gender " + - "HAVING COUNT(*) > 0", TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + String.format( + "SELECT gender " + "FROM %s " + "GROUP BY gender " + "HAVING COUNT(*) > 0", + TEST_INDEX_ACCOUNT)); assertResultForGroupByHavingTest(result); } @Test public void groupByHavingUsingTableAliasTest() throws Exception { - JSONObject result = executeQuery(String.format( - "SELECT a.gender " + - "FROM %s a " + - "GROUP BY a.gender " + - "HAVING COUNT(*) > 0", TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + String.format( + "SELECT a.gender " + "FROM %s a " + "GROUP BY a.gender " + "HAVING COUNT(*) > 0", + TEST_INDEX_ACCOUNT)); assertResultForGroupByHavingTest(result); } @Test public void groupByHavingUsingTableNamePrefixTest() throws Exception { - JSONObject result = executeQuery(String.format( - "SELECT opensearch-sql_test_index_account.gender " + - "FROM %s " + - "GROUP BY opensearch-sql_test_index_account.gender " + - "HAVING COUNT(*) > 0", TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + String.format( + "SELECT opensearch-sql_test_index_account.gender " + + "FROM %s " + + "GROUP BY opensearch-sql_test_index_account.gender " + + "HAVING COUNT(*) > 0", + TEST_INDEX_ACCOUNT)); assertResultForGroupByHavingTest(result); } @@ -287,15 +297,17 @@ private void assertResultForGroupByHavingTest(JSONObject result) { Assert.assertThat(gender.query(femaleBucketPrefix + "/count_0/value"), equalTo(493)); } - @Ignore //todo VerificationException: table alias or field name missing + @Ignore // todo VerificationException: table alias or field name missing @Test public void groupBySubqueryTest() throws Exception { - JSONObject result = executeQuery(String.format( - "SELECT COUNT(*) FROM %s " + - "WHERE firstname IN (SELECT firstname FROM %s) " + - "GROUP BY gender", - TEST_INDEX_ACCOUNT, TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + String.format( + "SELECT COUNT(*) FROM %s " + + "WHERE firstname IN (SELECT firstname FROM %s) " + + "GROUP BY gender", + TEST_INDEX_ACCOUNT, TEST_INDEX_ACCOUNT)); Assert.assertThat(getTotalHits(result), equalTo(1000)); JSONObject gender = getAggregation(result, "gender"); Assert.assertThat(gender.getJSONArray("buckets").length(), equalTo(2)); @@ -316,9 +328,12 @@ public void groupBySubqueryTest() throws Exception { @Test public void postFilterTest() throws Exception { - JSONObject result = executeQuery(String.format("SELECT /*! POST_FILTER({\\\"term\\\":" + - "{\\\"gender\\\":\\\"m\\\"}}) */ COUNT(*) FROM %s GROUP BY gender", - TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + String.format( + "SELECT /*! POST_FILTER({\\\"term\\\":" + + "{\\\"gender\\\":\\\"m\\\"}}) */ COUNT(*) FROM %s GROUP BY gender", + TEST_INDEX_ACCOUNT)); Assert.assertThat(getTotalHits(result), equalTo(507)); JSONObject gender = getAggregation(result, "gender"); Assert.assertThat(gender.getJSONArray("buckets").length(), equalTo(2)); @@ -339,9 +354,12 @@ public void postFilterTest() throws Exception { @Test public void multipleGroupByTest() throws Exception { - JSONObject result = executeQuery(String.format("SELECT COUNT(*) FROM %s GROUP BY gender," + - " terms('field'='age','size'=200,'alias'='age')", - TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + String.format( + "SELECT COUNT(*) FROM %s GROUP BY gender," + + " terms('field'='age','size'=200,'alias'='age')", + TEST_INDEX_ACCOUNT)); Assert.assertThat(getTotalHits(result), equalTo(1000)); JSONObject gender = getAggregation(result, "gender"); Assert.assertThat(gender.getJSONArray("buckets").length(), equalTo(2)); @@ -364,9 +382,11 @@ public void multipleGroupByTest() throws Exception { final Set actualAgesM = new HashSet<>(expectedAges.size()); final Set actualAgesF = new HashSet<>(expectedAges.size()); - mAgeBuckets.iterator() + mAgeBuckets + .iterator() .forEachRemaining(json -> actualAgesM.add(((JSONObject) json).getInt("key"))); - fAgeBuckets.iterator() + fAgeBuckets + .iterator() .forEachRemaining(json -> actualAgesF.add(((JSONObject) json).getInt("key"))); Assert.assertThat(actualAgesM, equalTo(expectedAges)); @@ -376,9 +396,12 @@ public void multipleGroupByTest() throws Exception { @Test public void multipleGroupBysWithSize() throws Exception { - JSONObject result = executeQuery(String.format("SELECT COUNT(*) FROM %s GROUP BY gender," + - " terms('alias'='ageAgg','field'='age','size'=3)", - TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + String.format( + "SELECT COUNT(*) FROM %s GROUP BY gender," + + " terms('alias'='ageAgg','field'='age','size'=3)", + TEST_INDEX_ACCOUNT)); Assert.assertThat(getTotalHits(result), equalTo(1000)); JSONObject gender = getAggregation(result, "gender"); Assert.assertThat(gender.getJSONArray("buckets").length(), equalTo(2)); @@ -393,9 +416,12 @@ public void multipleGroupBysWithSize() throws Exception { @Test public void termsWithSize() throws Exception { - JSONObject result = executeQuery(String.format("SELECT COUNT(*) FROM %s GROUP BY terms" + - "('alias'='ageAgg','field'='age','size'=3)", - TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + String.format( + "SELECT COUNT(*) FROM %s GROUP BY terms" + + "('alias'='ageAgg','field'='age','size'=3)", + TEST_INDEX_ACCOUNT)); Assert.assertThat(getTotalHits(result), equalTo(1000)); JSONObject gender = getAggregation(result, "ageAgg"); Assert.assertThat(gender.getJSONArray("buckets").length(), equalTo(3)); @@ -404,9 +430,12 @@ public void termsWithSize() throws Exception { @Test public void termsWithMissing() throws Exception { - JSONObject result = executeQuery(String.format("SELECT count(*) FROM %s GROUP BY terms" + - "('alias'='nick','field'='nickname','missing'='no_nickname')", - TEST_INDEX_GAME_OF_THRONES)); + JSONObject result = + executeQuery( + String.format( + "SELECT count(*) FROM %s GROUP BY terms" + + "('alias'='nick','field'='nickname','missing'='no_nickname')", + TEST_INDEX_GAME_OF_THRONES)); JSONObject nick = getAggregation(result, "nick"); Optional noNicknameBucket = Optional.empty(); @@ -427,9 +456,12 @@ public void termsWithOrder() throws Exception { final String dog1 = "snoopy"; final String dog2 = "rex"; - JSONObject result = executeQuery(String.format("SELECT count(*) FROM %s GROUP BY terms" + - "('field'='dog_name', 'alias'='dog_name', 'order'='desc')", - TEST_INDEX_DOG)); + JSONObject result = + executeQuery( + String.format( + "SELECT count(*) FROM %s GROUP BY terms" + + "('field'='dog_name', 'alias'='dog_name', 'order'='desc')", + TEST_INDEX_DOG)); JSONObject dogName = getAggregation(result, "dog_name"); String firstDog = (String) (dogName.optQuery("/buckets/0/key")); @@ -437,8 +469,12 @@ public void termsWithOrder() throws Exception { Assert.assertThat(firstDog, equalTo(dog1)); Assert.assertThat(secondDog, equalTo(dog2)); - result = executeQuery(String.format("SELECT count(*) FROM %s GROUP BY terms" + - "('field'='dog_name', 'alias'='dog_name', 'order'='asc')", TEST_INDEX_DOG)); + result = + executeQuery( + String.format( + "SELECT count(*) FROM %s GROUP BY terms" + + "('field'='dog_name', 'alias'='dog_name', 'order'='asc')", + TEST_INDEX_DOG)); dogName = getAggregation(result, "dog_name"); @@ -450,92 +486,96 @@ public void termsWithOrder() throws Exception { @Test public void orderByAscTest() { - JSONObject response = executeJdbcRequest(String.format("SELECT COUNT(*) FROM %s " + - "GROUP BY gender ORDER BY COUNT(*)", TEST_INDEX_ACCOUNT)); + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT COUNT(*) FROM %s " + "GROUP BY gender ORDER BY COUNT(*)", + TEST_INDEX_ACCOUNT)); verifySchema(response, schema("COUNT(*)", null, "integer")); - verifyDataRows(response, - rows(493), - rows(507)); + verifyDataRows(response, rows(493), rows(507)); } @Test public void orderByAliasAscTest() { - JSONObject response = executeJdbcRequest(String.format("SELECT COUNT(*) as count FROM %s " + - "GROUP BY gender ORDER BY count", TEST_INDEX_ACCOUNT)); + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT COUNT(*) as count FROM %s " + "GROUP BY gender ORDER BY count", + TEST_INDEX_ACCOUNT)); verifySchema(response, schema("COUNT(*)", "count", "integer")); - verifyDataRowsInOrder(response, - rows(493), - rows(507)); + verifyDataRowsInOrder(response, rows(493), rows(507)); } @Test public void orderByDescTest() throws IOException { - JSONObject response = executeJdbcRequest(String.format("SELECT COUNT(*) FROM %s " + - "GROUP BY gender ORDER BY COUNT(*) DESC", TEST_INDEX_ACCOUNT)); + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT COUNT(*) FROM %s " + "GROUP BY gender ORDER BY COUNT(*) DESC", + TEST_INDEX_ACCOUNT)); verifySchema(response, schema("COUNT(*)", null, "integer")); - verifyDataRowsInOrder(response, - rows(507), - rows(493)); + verifyDataRowsInOrder(response, rows(507), rows(493)); } @Test public void orderByAliasDescTest() throws IOException { - JSONObject response = executeJdbcRequest(String.format("SELECT COUNT(*) as count FROM %s " + - "GROUP BY gender ORDER BY count DESC", TEST_INDEX_ACCOUNT)); + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT COUNT(*) as count FROM %s " + "GROUP BY gender ORDER BY count DESC", + TEST_INDEX_ACCOUNT)); verifySchema(response, schema("COUNT(*)", "count", "integer")); - verifyDataRowsInOrder(response, - rows(507), - rows(493)); + verifyDataRowsInOrder(response, rows(507), rows(493)); } @Test public void orderByGroupFieldWithAlias() throws IOException { // ORDER BY field name - JSONObject response = executeJdbcRequest(String.format("SELECT gender as g, COUNT(*) as count " - + "FROM %s GROUP BY gender ORDER BY gender", TEST_INDEX_ACCOUNT)); + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT gender as g, COUNT(*) as count " + + "FROM %s GROUP BY gender ORDER BY gender", + TEST_INDEX_ACCOUNT)); - verifySchema(response, - schema("gender", "g", "text"), - schema("COUNT(*)", "count", "integer")); - verifyDataRowsInOrder(response, - rows("f", 493), - rows("m", 507)); + verifySchema(response, schema("gender", "g", "text"), schema("COUNT(*)", "count", "integer")); + verifyDataRowsInOrder(response, rows("f", 493), rows("m", 507)); // ORDER BY field alias - response = executeJdbcRequest(String.format("SELECT gender as g, COUNT(*) as count " - + "FROM %s GROUP BY gender ORDER BY g", TEST_INDEX_ACCOUNT)); - - verifySchema(response, - schema("gender", "g", "text"), - schema("COUNT(*)", "count", "integer")); - verifyDataRowsInOrder(response, - rows("f", 493), - rows("m", 507)); + response = + executeJdbcRequest( + String.format( + "SELECT gender as g, COUNT(*) as count " + "FROM %s GROUP BY gender ORDER BY g", + TEST_INDEX_ACCOUNT)); + + verifySchema(response, schema("gender", "g", "text"), schema("COUNT(*)", "count", "integer")); + verifyDataRowsInOrder(response, rows("f", 493), rows("m", 507)); } @Test public void limitTest() throws IOException { - JSONObject response = executeJdbcRequest(String.format("SELECT COUNT(*) FROM %s " + - "GROUP BY age ORDER BY COUNT(*) LIMIT 5", TEST_INDEX_ACCOUNT)); + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT COUNT(*) FROM %s " + "GROUP BY age ORDER BY COUNT(*) LIMIT 5", + TEST_INDEX_ACCOUNT)); verifySchema(response, schema("COUNT(*)", null, "integer")); - verifyDataRowsInOrder(response, - rows(35), - rows(39), - rows(39), - rows(42), - rows(42)); + verifyDataRowsInOrder(response, rows(35), rows(39), rows(39), rows(42), rows(42)); } @Test public void countGroupByRange() throws IOException { - JSONObject result = executeQuery(String.format("SELECT COUNT(age) FROM %s" + - " GROUP BY range(age, 20,25,30,35,40)", TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + String.format( + "SELECT COUNT(age) FROM %s" + " GROUP BY range(age, 20,25,30,35,40)", + TEST_INDEX_ACCOUNT)); JSONObject ageAgg = getAggregation(result, "range(age,20,25,30,35,40)"); JSONArray buckets = ageAgg.getJSONArray("buckets"); Assert.assertThat(buckets.length(), equalTo(4)); @@ -544,7 +584,8 @@ public void countGroupByRange() throws IOException { for (int i = 0; i < expectedResults.length; ++i) { - Assert.assertThat(buckets.query(String.format(Locale.ROOT, "/%d/COUNT(age)/value", i)), + Assert.assertThat( + buckets.query(String.format(Locale.ROOT, "/%d/COUNT(age)/value", i)), equalTo(expectedResults[i])); } } @@ -556,42 +597,56 @@ public void countGroupByRange() throws IOException { public void countGroupByDateTest() throws IOException { String result = - explainQuery(String.format("select insert_time from %s group by date_histogram" + - "('field'='insert_time','fixed_interval'='1h','format'='yyyy-MM','min_doc_count'=5) ", - TEST_INDEX_ONLINE)); - Assert.assertThat(result.replaceAll("\\s+", ""), - containsString("{\"date_histogram\":{\"field\":\"insert_time\",\"format\":\"yyyy-MM\"," + - "\"fixed_interval\":\"1h\",\"offset\":0,\"order\":{\"_key\":\"asc\"},\"keyed\":false," + - "\"min_doc_count\":5}")); + explainQuery( + String.format( + "select insert_time from %s group by" + + " date_histogram('field'='insert_time','fixed_interval'='1h','format'='yyyy-MM','min_doc_count'=5)" + + " ", + TEST_INDEX_ONLINE)); + Assert.assertThat( + result.replaceAll("\\s+", ""), + containsString( + "{\"date_histogram\":{\"field\":\"insert_time\",\"format\":\"yyyy-MM\"," + + "\"fixed_interval\":\"1h\",\"offset\":0,\"order\":{\"_key\":\"asc\"},\"keyed\":false," + + "\"min_doc_count\":5}")); } @Test public void countGroupByDateTestWithAlias() throws IOException { String result = - explainQuery(String.format("select insert_time from %s group by date_histogram" + - "('field'='insert_time','fixed_interval'='1h','format'='yyyy-MM','alias'='myAlias')", - TEST_INDEX_ONLINE)); - Assert.assertThat(result.replaceAll("\\s+", ""), - containsString("myAlias\":{\"date_histogram\":{\"field\":\"insert_time\"," + - "\"format\":\"yyyy-MM\",\"fixed_interval\":\"1h\"")); + explainQuery( + String.format( + "select insert_time from %s group by date_histogram" + + "('field'='insert_time','fixed_interval'='1h','format'='yyyy-MM','alias'='myAlias')", + TEST_INDEX_ONLINE)); + Assert.assertThat( + result.replaceAll("\\s+", ""), + containsString( + "myAlias\":{\"date_histogram\":{\"field\":\"insert_time\"," + + "\"format\":\"yyyy-MM\",\"fixed_interval\":\"1h\"")); } -// /** -// * http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-aggregations-bucket-daterange-aggregation.html -// */ -// @Test -// public void countDateRangeTest() throws IOException, SqlParseException, SQLFeatureNotSupportedException { -// String result = explainQuery(String.format("select online from %s group by date_range(field='insert_time'," + -// "'format'='yyyy-MM-dd' ,'2014-08-18','2014-08-17','now-8d','now-7d','now-6d','now')", -// TEST_INDEX_ONLINE)); -// // TODO: fix the query or fix the code for the query to work -// } + // /** + // * + // http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-aggregations-bucket-daterange-aggregation.html + // */ + // @Test + // public void countDateRangeTest() throws IOException, SqlParseException, + // SQLFeatureNotSupportedException { + // String result = explainQuery(String.format("select online from %s group by + // date_range(field='insert_time'," + + // "'format'='yyyy-MM-dd' + // ,'2014-08-18','2014-08-17','now-8d','now-7d','now-6d','now')", + // TEST_INDEX_ONLINE)); + // // TODO: fix the query or fix the code for the query to work + // } @Test public void topHitTest() throws IOException { - String query = String - .format("select topHits('size'=3,age='desc') from %s group by gender", TEST_INDEX_ACCOUNT); + String query = + String.format( + "select topHits('size'=3,age='desc') from %s group by gender", TEST_INDEX_ACCOUNT); JSONObject result = executeQuery(query); JSONObject gender = getAggregation(result, "gender"); Assert.assertThat(gender.getJSONArray("buckets").length(), equalTo(2)); @@ -604,7 +659,8 @@ public void topHitTest() throws IOException { final String femaleBucketPrefix = String.format(Locale.ROOT, "/buckets/%d", femaleBucketId); Assert.assertThat(gender.query(maleBucketPrefix + "/key"), equalTo("m")); - Assert.assertThat(gender.query(maleBucketPrefix + "/topHits(size=3,age=desc)/hits/total/value"), + Assert.assertThat( + gender.query(maleBucketPrefix + "/topHits(size=3,age=desc)/hits/total/value"), equalTo(507)); Assert.assertThat( gender.query(maleBucketPrefix + "/topHits(size=3,age=desc)/hits/total/relation"), @@ -614,9 +670,9 @@ public void topHitTest() throws IOException { .length(), equalTo(3)); Assert.assertThat(gender.query(femaleBucketPrefix + "/key"), equalTo("f")); - Assert - .assertThat(gender.query(femaleBucketPrefix + "/topHits(size=3,age=desc)/hits/total/value"), - equalTo(493)); + Assert.assertThat( + gender.query(femaleBucketPrefix + "/topHits(size=3,age=desc)/hits/total/value"), + equalTo(493)); Assert.assertThat( gender.query(femaleBucketPrefix + "/topHits(size=3,age=desc)/hits/total/relation"), equalTo("eq")); @@ -630,7 +686,8 @@ public void topHitTest() throws IOException { public void topHitTest_WithInclude() throws IOException { String query = - String.format("select topHits('size'=3,age='desc','include'=age) from %s group by gender", + String.format( + "select topHits('size'=3,age='desc','include'=age) from %s group by gender", TEST_INDEX_ACCOUNT); JSONObject result = executeQuery(query); JSONObject gender = getAggregation(result, "gender"); @@ -647,28 +704,41 @@ public void topHitTest_WithInclude() throws IOException { Assert.assertThat( gender.query(maleBucketPrefix + "/topHits(size=3,age=desc,include=age)/hits/total/value"), equalTo(507)); - Assert.assertThat(gender - .query(maleBucketPrefix + "/topHits(size=3,age=desc,include=age)/hits/total/relation"), + Assert.assertThat( + gender.query( + maleBucketPrefix + "/topHits(size=3,age=desc,include=age)/hits/total/relation"), equalTo("eq")); - Assert.assertThat(((JSONArray) gender.query( - maleBucketPrefix + "/topHits(size=3,age=desc,include=age)/hits/hits")).length(), + Assert.assertThat( + ((JSONArray) + gender.query(maleBucketPrefix + "/topHits(size=3,age=desc,include=age)/hits/hits")) + .length(), equalTo(3)); Assert.assertThat(gender.query(femaleBucketPrefix + "/key"), equalTo("f")); Assert.assertThat( gender.query(femaleBucketPrefix + "/topHits(size=3,age=desc,include=age)/hits/total/value"), equalTo(493)); - Assert.assertThat(gender - .query(femaleBucketPrefix + "/topHits(size=3,age=desc,include=age)/hits/total/relation"), + Assert.assertThat( + gender.query( + femaleBucketPrefix + "/topHits(size=3,age=desc,include=age)/hits/total/relation"), equalTo("eq")); - Assert.assertThat(((JSONArray) gender.query( - femaleBucketPrefix + "/topHits(size=3,age=desc,include=age)/hits/hits")).length(), + Assert.assertThat( + ((JSONArray) + gender.query( + femaleBucketPrefix + "/topHits(size=3,age=desc,include=age)/hits/hits")) + .length(), equalTo(3)); for (int i = 0; i < 2; ++i) { for (int j = 0; j < 3; ++j) { - JSONObject source = (JSONObject) gender.query(String.format(Locale.ROOT, - "/buckets/%d/topHits(size=3,age=desc,include=age)/hits/hits/%d/_source", i, j)); + JSONObject source = + (JSONObject) + gender.query( + String.format( + Locale.ROOT, + "/buckets/%d/topHits(size=3,age=desc,include=age)/hits/hits/%d/_source", + i, + j)); Assert.assertThat(source.length(), equalTo(1)); Assert.assertTrue(source.has("age")); Assert.assertThat(source.getInt("age"), equalTo(40)); @@ -680,17 +750,24 @@ public void topHitTest_WithInclude() throws IOException { public void topHitTest_WithIncludeTwoFields() throws IOException { String query = - String.format("select topHits('size'=3,'include'='age,firstname',age='desc') from %s " + - "group by gender", TEST_INDEX_ACCOUNT); + String.format( + "select topHits('size'=3,'include'='age,firstname',age='desc') from %s " + + "group by gender", + TEST_INDEX_ACCOUNT); JSONObject result = executeQuery(query); JSONObject gender = getAggregation(result, "gender"); Assert.assertThat(gender.getJSONArray("buckets").length(), equalTo(2)); for (int i = 0; i < 2; ++i) { for (int j = 0; j < 3; ++j) { - JSONObject source = (JSONObject) gender.query(String.format(Locale.ROOT, - "/buckets/%d/topHits(size=3,include=age,firstname,age=desc)/hits/hits/%d/_source", i, - j)); + JSONObject source = + (JSONObject) + gender.query( + String.format( + Locale.ROOT, + "/buckets/%d/topHits(size=3,include=age,firstname,age=desc)/hits/hits/%d/_source", + i, + j)); Assert.assertThat(source.length(), equalTo(2)); Assert.assertTrue(source.has("age")); Assert.assertThat(source.getInt("age"), equalTo(40)); @@ -704,8 +781,10 @@ public void topHitTest_WithIncludeTwoFields() throws IOException { @Test public void topHitTest_WithExclude() throws IOException { - String query = String.format("select topHits('size'=3,'exclude'='lastname',age='desc') from " + - "%s group by gender", TEST_INDEX_ACCOUNT); + String query = + String.format( + "select topHits('size'=3,'exclude'='lastname',age='desc') from " + "%s group by gender", + TEST_INDEX_ACCOUNT); JSONObject result = executeQuery(query); JSONObject gender = getAggregation(result, "gender"); Assert.assertThat(gender.getJSONArray("buckets").length(), equalTo(2)); @@ -718,44 +797,61 @@ public void topHitTest_WithExclude() throws IOException { final String femaleBucketPrefix = String.format(Locale.ROOT, "/buckets/%d", femaleBucketId); Assert.assertThat(gender.query(maleBucketPrefix + "/key"), equalTo("m")); - Assert.assertThat(gender - .query(maleBucketPrefix + "/topHits(size=3,exclude=lastname,age=desc)/hits/total/value"), + Assert.assertThat( + gender.query( + maleBucketPrefix + "/topHits(size=3,exclude=lastname,age=desc)/hits/total/value"), equalTo(507)); - Assert.assertThat(gender - .query(maleBucketPrefix + "/topHits(size=3,exclude=lastname,age=desc)/hits/total/relation"), + Assert.assertThat( + gender.query( + maleBucketPrefix + "/topHits(size=3,exclude=lastname,age=desc)/hits/total/relation"), equalTo("eq")); - Assert.assertThat(((JSONArray) gender.query( - maleBucketPrefix + "/topHits(size=3,exclude=lastname,age=desc)/hits/hits")).length(), + Assert.assertThat( + ((JSONArray) + gender.query( + maleBucketPrefix + "/topHits(size=3,exclude=lastname,age=desc)/hits/hits")) + .length(), equalTo(3)); Assert.assertThat(gender.query(femaleBucketPrefix + "/key"), equalTo("f")); - Assert.assertThat(gender - .query(femaleBucketPrefix + "/topHits(size=3,exclude=lastname,age=desc)/hits/total/value"), + Assert.assertThat( + gender.query( + femaleBucketPrefix + "/topHits(size=3,exclude=lastname,age=desc)/hits/total/value"), equalTo(493)); - Assert.assertThat(gender.query( - femaleBucketPrefix + "/topHits(size=3,exclude=lastname,age=desc)/hits/total/relation"), + Assert.assertThat( + gender.query( + femaleBucketPrefix + "/topHits(size=3,exclude=lastname,age=desc)/hits/total/relation"), equalTo("eq")); - Assert.assertThat(((JSONArray) gender.query( - femaleBucketPrefix + "/topHits(size=3,exclude=lastname,age=desc)/hits/hits")).length(), + Assert.assertThat( + ((JSONArray) + gender.query( + femaleBucketPrefix + "/topHits(size=3,exclude=lastname,age=desc)/hits/hits")) + .length(), equalTo(3)); - final Set expectedFields = new HashSet<>(Arrays.asList( - "account_number", - "firstname", - "address", - "balance", - "gender", - "city", - "employer", - "state", - "age", - "email" - )); + final Set expectedFields = + new HashSet<>( + Arrays.asList( + "account_number", + "firstname", + "address", + "balance", + "gender", + "city", + "employer", + "state", + "age", + "email")); for (int i = 0; i < 2; ++i) { for (int j = 0; j < 3; ++j) { - JSONObject source = (JSONObject) gender.query(String.format(Locale.ROOT, - "/buckets/%d/topHits(size=3,exclude=lastname,age=desc)/hits/hits/%d/_source", i, j)); + JSONObject source = + (JSONObject) + gender.query( + String.format( + Locale.ROOT, + "/buckets/%d/topHits(size=3,exclude=lastname,age=desc)/hits/hits/%d/_source", + i, + j)); Assert.assertThat(source.length(), equalTo(expectedFields.size())); Assert.assertFalse(source.has("lastname")); Assert.assertThat(source.keySet().containsAll(expectedFields), equalTo(true)); @@ -763,254 +859,300 @@ public void topHitTest_WithExclude() throws IOException { } } - //region not migrated - - // script on metric aggregation tests. uncomment if your elastic has scripts enable (disabled by default) -// @Test -// public void sumWithScriptTest() throws IOException, SqlParseException, SQLFeatureNotSupportedException { -// Aggregations result = query(String.format("SELECT SUM(script('','doc[\\'balance\\'].value + doc[\\'balance\\'].value')) as doubleSum FROM %s", TEST_INDEX)); -// Sum sum = result.get("doubleSum"); -// assertThat(sum.getValue(), equalTo(25714837.0*2)); -// } -// -// @Test -// public void sumWithImplicitScriptTest() throws IOException, SqlParseException, SQLFeatureNotSupportedException { -// Aggregations result = query(String.format("SELECT SUM(balance + balance) as doubleSum FROM %s", TEST_INDEX)); -// Sum sum = result.get("doubleSum"); -// assertThat(sum.getValue(), equalTo(25714837.0*2)); -// } -// -// @Test -// public void sumWithScriptTestNoAlias() throws IOException, SqlParseException, SQLFeatureNotSupportedException { -// Aggregations result = query(String.format("SELECT SUM(balance + balance) FROM %s", TEST_INDEX)); -// Sum sum = result.get("SUM(script=script(balance + balance,doc('balance').value + doc('balance').value))"); -// assertThat(sum.getValue(), equalTo(25714837.0*2)); -// } -// -// @Test -// public void scriptedMetricAggregation() throws SQLFeatureNotSupportedException, SqlParseException { -// Aggregations result = query ("select scripted_metric('map_script'='if(doc[\\'balance\\'].value > 49670){ if(!_agg.containsKey(\\'ages\\')) { _agg.put(\\'ages\\',doc[\\'age\\'].value); } " + -// "else { _agg.put(\\'ages\\',_agg.get(\\'ages\\')+doc[\\'age\\'].value); }}'," + -// "'reduce_script'='sumThem = 0; for (a in _aggs) { if(a.containsKey(\\'ages\\')){ sumThem += a.get(\\'ages\\');} }; return sumThem;') as wierdSum from " + TEST_INDEX + ""); -// ScriptedMetric metric = result.get("wierdSum"); -// Assert.assertEquals(136L,metric.aggregation()); -// } -// -// @Test -// public void scriptedMetricConcatWithStringParamAndReduceParamAggregation() throws SQLFeatureNotSupportedException, SqlParseException { -// String query = "select scripted_metric(\n" + -// " 'init_script' = '_agg[\"concat\"]=[] ',\n" + -// " 'map_script'='_agg.concat.add(doc[field].value)' ,\n" + -// " 'combine_script'='return _agg.concat.join(delim);',\t\t\t\t\n" + -// " 'reduce_script'='_aggs.removeAll(\"\"); return _aggs.join(delim)'," + -// "'@field' = 'name.firstname' , '@delim'=';',@reduce_delim =';' ) as all_characters \n" + -// "from "+TEST_INDEX+""; -// Aggregations result = query (query); -// ScriptedMetric metric = result.get("all_characters"); -// List names = Arrays.asList(metric.aggregation().toString().split(";")); -// -// -// Assert.assertEquals(4,names.size()); -// String[] expectedNames = new String[]{"brandon","daenerys","eddard","jaime"}; -// for(String name : expectedNames){ -// Assert.assertTrue("not contains:" + name,names.contains(name)); -// } -// } -// -// @Test -// public void scriptedMetricAggregationWithNumberParams() throws SQLFeatureNotSupportedException, SqlParseException { -// Aggregations result = query ("select scripted_metric('map_script'='if(doc[\\'balance\\'].value > 49670){ if(!_agg.containsKey(\\'ages\\')) { _agg.put(\\'ages\\',doc[\\'age\\'].value+x); } " + -// "else { _agg.put(\\'ages\\',_agg.get(\\'ages\\')+doc[\\'age\\'].value+x); }}'," + -// "'reduce_script'='sumThem = 0; for (a in _aggs) { if(a.containsKey(\\'ages\\')){ sumThem += a.get(\\'ages\\');} }; return sumThem;'" + -// ",'@x'=3) as wierdSum from " + TEST_INDEX + ""); -// ScriptedMetric metric = result.get("wierdSum"); -// Assert.assertEquals(148L,metric.aggregation()); -// } -// - -// @Test -// public void topHitTest_WithIncludeAndExclude() throws IOException, SqlParseException, SQLFeatureNotSupportedException { -// Aggregations result = query(String.format("select topHits('size'=3,'exclude'='lastname','include'='firstname,lastname',age='desc') from %s group by gender ", TEST_INDEX_ACCOUNT)); -// List buckets = ((Terms) (result.asList().get(0))).getBuckets(); -// for (Terms.Bucket bucket : buckets) { -// SearchHits hits = ((InternalTopHits) bucket.getAggregations().asList().get(0)).getHits(); -// for (SearchHit hit : hits) { -// Set fields = hit.getSourceAsMap().keySet(); -// Assert.assertEquals(1, fields.size()); -// Assert.assertTrue(fields.contains("firstname")); -// } -// } -// } -// -// private Aggregations query(String query) throws SqlParseException, SQLFeatureNotSupportedException { -// SqlElasticSearchRequestBuilder select = getSearchRequestBuilder(query); -// return ((SearchResponse)select.get()).getAggregations(); -// } -// -// private SqlElasticSearchRequestBuilder getSearchRequestBuilder(String query) throws SqlParseException, SQLFeatureNotSupportedException { -// SearchDao searchDao = MainTestSuite.getSearchDao(); -// return (SqlElasticSearchRequestBuilder) searchDao.explain(query).explain(); -// } -// -// @Test -// public void testFromSizeWithAggregations() throws Exception { -// final String query1 = String.format("SELECT /*! DOCS_WITH_AGGREGATION(0,1) */" + -// " account_number FROM %s GROUP BY gender", TEST_INDEX_ACCOUNT); -// SearchResponse response1 = (SearchResponse) getSearchRequestBuilder(query1).get(); -// -// Assert.assertEquals(1, response1.getHits().getHits().length); -// Terms gender1 = response1.getAggregations().get("gender"); -// Assert.assertEquals(2, gender1.getBuckets().size()); -// Object account1 = response1.getHits().getHits()[0].getSourceAsMap().get("account_number"); -// -// final String query2 = String.format("SELECT /*! DOCS_WITH_AGGREGATION(1,1) */" + -// " account_number FROM %s GROUP BY gender", TEST_INDEX_ACCOUNT); -// SearchResponse response2 = (SearchResponse) getSearchRequestBuilder(query2).get(); -// -// Assert.assertEquals(1, response2.getHits().getHits().length); -// Terms gender2 = response2.getAggregations().get("gender"); -// Assert.assertEquals(2, gender2.getBuckets().size()); -// Object account2 = response2.getHits().getHits()[0].getSourceAsMap().get("account_number"); -// -// Assert.assertEquals(response1.getHits().getTotalHits(), response2.getHits().getTotalHits()); -// Assert.assertNotEquals(account1, account2); -// } -// -// @Test -// public void testSubAggregations() throws Exception { -// Set expectedAges = new HashSet<>(ContiguousSet.create(Range.closed(20, 40), DiscreteDomain.integers())); -// final String query = String.format("SELECT /*! DOCS_WITH_AGGREGATION(10) */" + -// " * FROM %s GROUP BY (gender, terms('field'='age','size'=200,'alias'='age')), (state) LIMIT 200,200", TEST_INDEX_ACCOUNT); -// -// Map> buckets = new HashMap<>(); -// -// SqlElasticSearchRequestBuilder select = getSearchRequestBuilder(query); -// SearchResponse response = (SearchResponse) select.get(); -// Aggregations result = response.getAggregations(); -// -// Terms gender = result.get("gender"); -// for(Terms.Bucket genderBucket : gender.getBuckets()) { -// String genderKey = genderBucket.getKey().toString(); -// buckets.put(genderKey, new HashSet()); -// Terms ageBuckets = (Terms) genderBucket.getAggregations().get("age"); -// for(Terms.Bucket ageBucket : ageBuckets.getBuckets()) { -// buckets.get(genderKey).add(Integer.parseInt(ageBucket.getKey().toString())); -// } -// } -// -// Assert.assertEquals(2, buckets.keySet().size()); -// Assert.assertEquals(expectedAges, buckets.get("m")); -// Assert.assertEquals(expectedAges, buckets.get("f")); -// -// Terms state = result.get("state.keyword"); -// for(Terms.Bucket stateBucket : state.getBuckets()) { -// if(stateBucket.getKey().toString().equalsIgnoreCase("ak")) { -// Assert.assertTrue("There are 22 entries for state ak", stateBucket.getDocCount() == 22); -// } -// } -// -// Assert.assertEquals(response.getHits().getTotalHits(), 1000); -// Assert.assertEquals(response.getHits().getHits().length, 10); -// } -// -// @Test -// public void testSimpleSubAggregations() throws Exception { -// final String query = String.format("SELECT /*! DOCS_WITH_AGGREGATION(10) */ * FROM %s GROUP BY (gender), (state) ", TEST_INDEX_ACCOUNT); -// -// SqlElasticSearchRequestBuilder select = getSearchRequestBuilder(query); -// SearchResponse response = (SearchResponse) select.get(); -// Aggregations result = response.getAggregations(); -// -// Terms gender = result.get("gender"); -// for(Terms.Bucket genderBucket : gender.getBuckets()) { -// String genderKey = genderBucket.getKey().toString(); -// Assert.assertTrue("Gender should be m or f", genderKey.equals("m") || genderKey.equals("f")); -// } -// -// Assert.assertEquals(2, gender.getBuckets().size()); -// -// Terms state = result.get("state.keyword"); -// for(Terms.Bucket stateBucket : state.getBuckets()) { -// if(stateBucket.getKey().toString().equalsIgnoreCase("ak")) { -// Assert.assertTrue("There are 22 entries for state ak", stateBucket.getDocCount() == 22); -// } -// } -// -// Assert.assertEquals(response.getHits().getTotalHits(), 1000); -// Assert.assertEquals(response.getHits().getHits().length, 10); -// } -// -// @Test -// public void geoHashGrid() throws SQLFeatureNotSupportedException, SqlParseException { -// Aggregations result = query(String.format("SELECT COUNT(*) FROM %s/location GROUP BY geohash_grid(field='center',precision=5) ", TEST_INDEX_LOCATION)); -// InternalGeoHashGrid grid = result.get("geohash_grid(field=center,precision=5)"); -// Collection buckets = grid.getBuckets(); -// for (InternalMultiBucketAggregation.InternalBucket bucket : buckets) { -// Assert.assertTrue(bucket.getKeyAsString().equals("w2fsm") || bucket.getKeyAsString().equals("w0p6y") ); -// Assert.assertEquals(1,bucket.getDocCount()); -// } -// } -// -// @Test -// public void geoBounds() throws SQLFeatureNotSupportedException, SqlParseException { -// Aggregations result = query(String.format("SELECT * FROM %s/location GROUP BY geo_bounds(field='center',alias='bounds') ", TEST_INDEX_LOCATION)); -// InternalGeoBounds bounds = result.get("bounds"); -// Assert.assertEquals(0.5,bounds.bottomRight().getLat(),0.001); -// Assert.assertEquals(105.0,bounds.bottomRight().getLon(),0.001); -// Assert.assertEquals(5.0,bounds.topLeft().getLat(),0.001); -// Assert.assertEquals(100.5,bounds.topLeft().getLon(),0.001); -// } -// -// @Test -// public void groupByOnNestedFieldTest() throws Exception { -// Aggregations result = query(String.format("SELECT COUNT(*) FROM %s GROUP BY nested(message.info)", TEST_INDEX_NESTED_TYPE)); -// InternalNested nested = result.get("message.info@NESTED"); -// Terms infos = nested.getAggregations().get("message.info"); -// Assert.assertEquals(3,infos.getBuckets().size()); -// for(Terms.Bucket bucket : infos.getBuckets()) { -// String key = bucket.getKey().toString(); -// long count = ((ValueCount) bucket.getAggregations().get("COUNT(*)")).getValue(); -// if(key.equalsIgnoreCase("a")) { -// Assert.assertEquals(2, count); -// } -// else if(key.equalsIgnoreCase("c")) { -// Assert.assertEquals(2, count); -// } -// else if(key.equalsIgnoreCase("b")) { -// Assert.assertEquals(1, count); -// } -// else { -// throw new Exception(String.format("Unexpected key. expected: a OR b OR c . found: %s", key)); -// } -// } -// } -// -// @Test -// public void groupByTestWithFilter() throws Exception { -// Aggregations result = query(String.format("SELECT COUNT(*) FROM %s GROUP BY filter(gender='m'),gender", TEST_INDEX_ACCOUNT)); -// InternalFilter filter = result.get("filter(gender = 'm')@FILTER"); -// Terms gender = filter.getAggregations().get("gender"); -// -// for(Terms.Bucket bucket : gender.getBuckets()) { -// String key = bucket.getKey().toString(); -// long count = ((ValueCount) bucket.getAggregations().get("COUNT(*)")).getValue(); -// if(key.equalsIgnoreCase("m")) { -// Assert.assertEquals(507, count); -// } -// else { -// throw new Exception(String.format("Unexpected key. expected: only m. found: %s", key)); -// } -// } -// } -// -// - //endregion not migrated + // region not migrated + + // script on metric aggregation tests. uncomment if your elastic has scripts enable (disabled by + // default) + // @Test + // public void sumWithScriptTest() throws IOException, SqlParseException, + // SQLFeatureNotSupportedException { + // Aggregations result = query(String.format("SELECT + // SUM(script('','doc[\\'balance\\'].value + doc[\\'balance\\'].value')) as doubleSum FROM %s", + // TEST_INDEX)); + // Sum sum = result.get("doubleSum"); + // assertThat(sum.getValue(), equalTo(25714837.0*2)); + // } + // + // @Test + // public void sumWithImplicitScriptTest() throws IOException, SqlParseException, + // SQLFeatureNotSupportedException { + // Aggregations result = query(String.format("SELECT SUM(balance + balance) as doubleSum + // FROM %s", TEST_INDEX)); + // Sum sum = result.get("doubleSum"); + // assertThat(sum.getValue(), equalTo(25714837.0*2)); + // } + // + // @Test + // public void sumWithScriptTestNoAlias() throws IOException, SqlParseException, + // SQLFeatureNotSupportedException { + // Aggregations result = query(String.format("SELECT SUM(balance + balance) FROM %s", + // TEST_INDEX)); + // Sum sum = result.get("SUM(script=script(balance + balance,doc('balance').value + + // doc('balance').value))"); + // assertThat(sum.getValue(), equalTo(25714837.0*2)); + // } + // + // @Test + // public void scriptedMetricAggregation() throws SQLFeatureNotSupportedException, + // SqlParseException { + // Aggregations result = query ("select + // scripted_metric('map_script'='if(doc[\\'balance\\'].value > 49670){ + // if(!_agg.containsKey(\\'ages\\')) { _agg.put(\\'ages\\',doc[\\'age\\'].value); } " + + // "else { _agg.put(\\'ages\\',_agg.get(\\'ages\\')+doc[\\'age\\'].value); }}'," + + // "'reduce_script'='sumThem = 0; for (a in _aggs) { if(a.containsKey(\\'ages\\')){ + // sumThem += a.get(\\'ages\\');} }; return sumThem;') as wierdSum from " + TEST_INDEX + ""); + // ScriptedMetric metric = result.get("wierdSum"); + // Assert.assertEquals(136L,metric.aggregation()); + // } + // + // @Test + // public void scriptedMetricConcatWithStringParamAndReduceParamAggregation() throws + // SQLFeatureNotSupportedException, SqlParseException { + // String query = "select scripted_metric(\n" + + // " 'init_script' = '_agg[\"concat\"]=[] ',\n" + + // " 'map_script'='_agg.concat.add(doc[field].value)' ,\n" + + // " 'combine_script'='return _agg.concat.join(delim);',\t\t\t\t\n" + + // " 'reduce_script'='_aggs.removeAll(\"\"); return _aggs.join(delim)'," + + // "'@field' = 'name.firstname' , '@delim'=';',@reduce_delim =';' ) as + // all_characters \n" + + // "from "+TEST_INDEX+""; + // Aggregations result = query (query); + // ScriptedMetric metric = result.get("all_characters"); + // List names = Arrays.asList(metric.aggregation().toString().split(";")); + // + // + // Assert.assertEquals(4,names.size()); + // String[] expectedNames = new String[]{"brandon","daenerys","eddard","jaime"}; + // for(String name : expectedNames){ + // Assert.assertTrue("not contains:" + name,names.contains(name)); + // } + // } + // + // @Test + // public void scriptedMetricAggregationWithNumberParams() throws + // SQLFeatureNotSupportedException, SqlParseException { + // Aggregations result = query ("select + // scripted_metric('map_script'='if(doc[\\'balance\\'].value > 49670){ + // if(!_agg.containsKey(\\'ages\\')) { _agg.put(\\'ages\\',doc[\\'age\\'].value+x); } " + + // "else { _agg.put(\\'ages\\',_agg.get(\\'ages\\')+doc[\\'age\\'].value+x); }}'," + // + + // "'reduce_script'='sumThem = 0; for (a in _aggs) { if(a.containsKey(\\'ages\\')){ + // sumThem += a.get(\\'ages\\');} }; return sumThem;'" + + // ",'@x'=3) as wierdSum from " + TEST_INDEX + ""); + // ScriptedMetric metric = result.get("wierdSum"); + // Assert.assertEquals(148L,metric.aggregation()); + // } + // + + // @Test + // public void topHitTest_WithIncludeAndExclude() throws IOException, SqlParseException, + // SQLFeatureNotSupportedException { + // Aggregations result = query(String.format("select + // topHits('size'=3,'exclude'='lastname','include'='firstname,lastname',age='desc') from %s group + // by gender ", TEST_INDEX_ACCOUNT)); + // List buckets = ((Terms) (result.asList().get(0))).getBuckets(); + // for (Terms.Bucket bucket : buckets) { + // SearchHits hits = ((InternalTopHits) + // bucket.getAggregations().asList().get(0)).getHits(); + // for (SearchHit hit : hits) { + // Set fields = hit.getSourceAsMap().keySet(); + // Assert.assertEquals(1, fields.size()); + // Assert.assertTrue(fields.contains("firstname")); + // } + // } + // } + // + // private Aggregations query(String query) throws SqlParseException, + // SQLFeatureNotSupportedException { + // SqlElasticSearchRequestBuilder select = getSearchRequestBuilder(query); + // return ((SearchResponse)select.get()).getAggregations(); + // } + // + // private SqlElasticSearchRequestBuilder getSearchRequestBuilder(String query) throws + // SqlParseException, SQLFeatureNotSupportedException { + // SearchDao searchDao = MainTestSuite.getSearchDao(); + // return (SqlElasticSearchRequestBuilder) searchDao.explain(query).explain(); + // } + // + // @Test + // public void testFromSizeWithAggregations() throws Exception { + // final String query1 = String.format("SELECT /*! DOCS_WITH_AGGREGATION(0,1) */" + + // " account_number FROM %s GROUP BY gender", TEST_INDEX_ACCOUNT); + // SearchResponse response1 = (SearchResponse) getSearchRequestBuilder(query1).get(); + // + // Assert.assertEquals(1, response1.getHits().getHits().length); + // Terms gender1 = response1.getAggregations().get("gender"); + // Assert.assertEquals(2, gender1.getBuckets().size()); + // Object account1 = + // response1.getHits().getHits()[0].getSourceAsMap().get("account_number"); + // + // final String query2 = String.format("SELECT /*! DOCS_WITH_AGGREGATION(1,1) */" + + // " account_number FROM %s GROUP BY gender", TEST_INDEX_ACCOUNT); + // SearchResponse response2 = (SearchResponse) getSearchRequestBuilder(query2).get(); + // + // Assert.assertEquals(1, response2.getHits().getHits().length); + // Terms gender2 = response2.getAggregations().get("gender"); + // Assert.assertEquals(2, gender2.getBuckets().size()); + // Object account2 = + // response2.getHits().getHits()[0].getSourceAsMap().get("account_number"); + // + // Assert.assertEquals(response1.getHits().getTotalHits(), + // response2.getHits().getTotalHits()); + // Assert.assertNotEquals(account1, account2); + // } + // + // @Test + // public void testSubAggregations() throws Exception { + // Set expectedAges = new HashSet<>(ContiguousSet.create(Range.closed(20, 40), + // DiscreteDomain.integers())); + // final String query = String.format("SELECT /*! DOCS_WITH_AGGREGATION(10) */" + + // " * FROM %s GROUP BY (gender, terms('field'='age','size'=200,'alias'='age')), + // (state) LIMIT 200,200", TEST_INDEX_ACCOUNT); + // + // Map> buckets = new HashMap<>(); + // + // SqlElasticSearchRequestBuilder select = getSearchRequestBuilder(query); + // SearchResponse response = (SearchResponse) select.get(); + // Aggregations result = response.getAggregations(); + // + // Terms gender = result.get("gender"); + // for(Terms.Bucket genderBucket : gender.getBuckets()) { + // String genderKey = genderBucket.getKey().toString(); + // buckets.put(genderKey, new HashSet()); + // Terms ageBuckets = (Terms) genderBucket.getAggregations().get("age"); + // for(Terms.Bucket ageBucket : ageBuckets.getBuckets()) { + // buckets.get(genderKey).add(Integer.parseInt(ageBucket.getKey().toString())); + // } + // } + // + // Assert.assertEquals(2, buckets.keySet().size()); + // Assert.assertEquals(expectedAges, buckets.get("m")); + // Assert.assertEquals(expectedAges, buckets.get("f")); + // + // Terms state = result.get("state.keyword"); + // for(Terms.Bucket stateBucket : state.getBuckets()) { + // if(stateBucket.getKey().toString().equalsIgnoreCase("ak")) { + // Assert.assertTrue("There are 22 entries for state ak", stateBucket.getDocCount() + // == 22); + // } + // } + // + // Assert.assertEquals(response.getHits().getTotalHits(), 1000); + // Assert.assertEquals(response.getHits().getHits().length, 10); + // } + // + // @Test + // public void testSimpleSubAggregations() throws Exception { + // final String query = String.format("SELECT /*! DOCS_WITH_AGGREGATION(10) */ * FROM %s + // GROUP BY (gender), (state) ", TEST_INDEX_ACCOUNT); + // + // SqlElasticSearchRequestBuilder select = getSearchRequestBuilder(query); + // SearchResponse response = (SearchResponse) select.get(); + // Aggregations result = response.getAggregations(); + // + // Terms gender = result.get("gender"); + // for(Terms.Bucket genderBucket : gender.getBuckets()) { + // String genderKey = genderBucket.getKey().toString(); + // Assert.assertTrue("Gender should be m or f", genderKey.equals("m") || + // genderKey.equals("f")); + // } + // + // Assert.assertEquals(2, gender.getBuckets().size()); + // + // Terms state = result.get("state.keyword"); + // for(Terms.Bucket stateBucket : state.getBuckets()) { + // if(stateBucket.getKey().toString().equalsIgnoreCase("ak")) { + // Assert.assertTrue("There are 22 entries for state ak", stateBucket.getDocCount() + // == 22); + // } + // } + // + // Assert.assertEquals(response.getHits().getTotalHits(), 1000); + // Assert.assertEquals(response.getHits().getHits().length, 10); + // } + // + // @Test + // public void geoHashGrid() throws SQLFeatureNotSupportedException, SqlParseException { + // Aggregations result = query(String.format("SELECT COUNT(*) FROM %s/location GROUP BY + // geohash_grid(field='center',precision=5) ", TEST_INDEX_LOCATION)); + // InternalGeoHashGrid grid = result.get("geohash_grid(field=center,precision=5)"); + // Collection buckets = + // grid.getBuckets(); + // for (InternalMultiBucketAggregation.InternalBucket bucket : buckets) { + // Assert.assertTrue(bucket.getKeyAsString().equals("w2fsm") || + // bucket.getKeyAsString().equals("w0p6y") ); + // Assert.assertEquals(1,bucket.getDocCount()); + // } + // } + // + // @Test + // public void geoBounds() throws SQLFeatureNotSupportedException, SqlParseException { + // Aggregations result = query(String.format("SELECT * FROM %s/location GROUP BY + // geo_bounds(field='center',alias='bounds') ", TEST_INDEX_LOCATION)); + // InternalGeoBounds bounds = result.get("bounds"); + // Assert.assertEquals(0.5,bounds.bottomRight().getLat(),0.001); + // Assert.assertEquals(105.0,bounds.bottomRight().getLon(),0.001); + // Assert.assertEquals(5.0,bounds.topLeft().getLat(),0.001); + // Assert.assertEquals(100.5,bounds.topLeft().getLon(),0.001); + // } + // + // @Test + // public void groupByOnNestedFieldTest() throws Exception { + // Aggregations result = query(String.format("SELECT COUNT(*) FROM %s GROUP BY + // nested(message.info)", TEST_INDEX_NESTED_TYPE)); + // InternalNested nested = result.get("message.info@NESTED"); + // Terms infos = nested.getAggregations().get("message.info"); + // Assert.assertEquals(3,infos.getBuckets().size()); + // for(Terms.Bucket bucket : infos.getBuckets()) { + // String key = bucket.getKey().toString(); + // long count = ((ValueCount) bucket.getAggregations().get("COUNT(*)")).getValue(); + // if(key.equalsIgnoreCase("a")) { + // Assert.assertEquals(2, count); + // } + // else if(key.equalsIgnoreCase("c")) { + // Assert.assertEquals(2, count); + // } + // else if(key.equalsIgnoreCase("b")) { + // Assert.assertEquals(1, count); + // } + // else { + // throw new Exception(String.format("Unexpected key. expected: a OR b OR c . + // found: %s", key)); + // } + // } + // } + // + // @Test + // public void groupByTestWithFilter() throws Exception { + // Aggregations result = query(String.format("SELECT COUNT(*) FROM %s GROUP BY + // filter(gender='m'),gender", TEST_INDEX_ACCOUNT)); + // InternalFilter filter = result.get("filter(gender = 'm')@FILTER"); + // Terms gender = filter.getAggregations().get("gender"); + // + // for(Terms.Bucket bucket : gender.getBuckets()) { + // String key = bucket.getKey().toString(); + // long count = ((ValueCount) bucket.getAggregations().get("COUNT(*)")).getValue(); + // if(key.equalsIgnoreCase("m")) { + // Assert.assertEquals(507, count); + // } + // else { + // throw new Exception(String.format("Unexpected key. expected: only m. found: %s", + // key)); + // } + // } + // } + // + // + // endregion not migrated @Test public void groupByOnNestedFieldWithFilterTest() throws Exception { - String query = String.format("SELECT COUNT(*) FROM %s GROUP BY nested(message.info)," + - "filter('myFilter',message.info = 'a')", TEST_INDEX_NESTED_TYPE); + String query = + String.format( + "SELECT COUNT(*) FROM %s GROUP BY nested(message.info)," + + "filter('myFilter',message.info = 'a')", + TEST_INDEX_NESTED_TYPE); JSONObject result = executeQuery(query); JSONObject aggregation = getAggregation(result, "message.info@NESTED"); @@ -1026,29 +1168,36 @@ public void groupByOnNestedFieldWithFilterTest() throws Exception { @Test public void minOnNestedField() throws Exception { - String query = String.format("SELECT min(nested(message.dayOfWeek)) as minDays FROM %s", - TEST_INDEX_NESTED_TYPE); + String query = + String.format( + "SELECT min(nested(message.dayOfWeek)) as minDays FROM %s", TEST_INDEX_NESTED_TYPE); JSONObject result = executeQuery(query); JSONObject aggregation = getAggregation(result, "message.dayOfWeek@NESTED"); - Assert.assertEquals(1.0, ((BigDecimal) aggregation.query("/minDays/value")).doubleValue(), 0.0001); + Assert.assertEquals( + 1.0, ((BigDecimal) aggregation.query("/minDays/value")).doubleValue(), 0.0001); } @Test public void sumOnNestedField() throws Exception { - String query = String.format("SELECT sum(nested(message.dayOfWeek)) as sumDays FROM %s", - TEST_INDEX_NESTED_TYPE); + String query = + String.format( + "SELECT sum(nested(message.dayOfWeek)) as sumDays FROM %s", TEST_INDEX_NESTED_TYPE); JSONObject result = executeQuery(query); JSONObject aggregation = getAggregation(result, "message.dayOfWeek@NESTED"); - Assert.assertEquals(19.0, ((BigDecimal) aggregation.query("/sumDays/value")).doubleValue(), 0.0001); + Assert.assertEquals( + 19.0, ((BigDecimal) aggregation.query("/sumDays/value")).doubleValue(), 0.0001); } @Test public void histogramOnNestedField() throws Exception { - String query = String.format("select count(*) from %s group by histogram" + - "('field'='message.dayOfWeek','nested'='message','interval'='2' , 'alias' = 'someAlias' )", - TEST_INDEX_NESTED_TYPE); + String query = + String.format( + "select count(*) from %s group by" + + " histogram('field'='message.dayOfWeek','nested'='message','interval'='2' ," + + " 'alias' = 'someAlias' )", + TEST_INDEX_NESTED_TYPE); JSONObject result = executeQuery(query); JSONObject aggregation = getAggregation(result, "message@NESTED"); @@ -1061,22 +1210,26 @@ public void histogramOnNestedField() throws Exception { JSONArray buckets = (JSONArray) aggregation.query("/someAlias/buckets"); Assert.assertThat(buckets.length(), equalTo(4)); - buckets.forEach(obj -> { - JSONObject bucket = (JSONObject) obj; - final double key = bucket.getDouble("key"); - Assert.assertTrue(expectedCountsByKey.containsKey(key)); - Assert.assertThat(bucket.getJSONObject("COUNT(*)").getInt("value"), - equalTo(expectedCountsByKey.get(key))); - }); + buckets.forEach( + obj -> { + JSONObject bucket = (JSONObject) obj; + final double key = bucket.getDouble("key"); + Assert.assertTrue(expectedCountsByKey.containsKey(key)); + Assert.assertThat( + bucket.getJSONObject("COUNT(*)").getInt("value"), + equalTo(expectedCountsByKey.get(key))); + }); } @Test public void reverseToRootGroupByOnNestedFieldWithFilterTestWithReverseNestedAndEmptyPath() throws Exception { - String query = String.format("SELECT COUNT(*) FROM %s GROUP BY nested(message.info)," + - "filter('myFilter',message.info = 'a'),reverse_nested(someField,'')", - TEST_INDEX_NESTED_TYPE); + String query = + String.format( + "SELECT COUNT(*) FROM %s GROUP BY nested(message.info)," + + "filter('myFilter',message.info = 'a'),reverse_nested(someField,'')", + TEST_INDEX_NESTED_TYPE); JSONObject result = executeQuery(query); JSONObject aggregation = getAggregation(result, "message.info@NESTED"); @@ -1097,8 +1250,11 @@ public void reverseToRootGroupByOnNestedFieldWithFilterTestWithReverseNestedAndE public void reverseToRootGroupByOnNestedFieldWithFilterTestWithReverseNestedNoPath() throws Exception { - String query = String.format("SELECT COUNT(*) FROM %s GROUP BY nested(message.info),filter" + - "('myFilter',message.info = 'a'),reverse_nested(someField)", TEST_INDEX_NESTED_TYPE); + String query = + String.format( + "SELECT COUNT(*) FROM %s GROUP BY nested(message.info),filter" + + "('myFilter',message.info = 'a'),reverse_nested(someField)", + TEST_INDEX_NESTED_TYPE); JSONObject result = executeQuery(query); JSONObject aggregation = getAggregation(result, "message.info@NESTED"); @@ -1119,9 +1275,12 @@ public void reverseToRootGroupByOnNestedFieldWithFilterTestWithReverseNestedNoPa public void reverseToRootGroupByOnNestedFieldWithFilterTestWithReverseNestedOnHistogram() throws Exception { - String query = String.format("SELECT COUNT(*) FROM %s GROUP BY nested(message.info)," + - "filter('myFilter',message.info = 'a'),histogram('field'='myNum','reverse_nested'='','interval'='2', " + - "'alias' = 'someAlias' )", TEST_INDEX_NESTED_TYPE); + String query = + String.format( + "SELECT COUNT(*) FROM %s GROUP BY nested(message.info),filter('myFilter',message.info" + + " = 'a'),histogram('field'='myNum','reverse_nested'='','interval'='2', 'alias' =" + + " 'someAlias' )", + TEST_INDEX_NESTED_TYPE); JSONObject result = executeQuery(query); JSONObject aggregation = getAggregation(result, "message.info@NESTED"); @@ -1140,21 +1299,26 @@ public void reverseToRootGroupByOnNestedFieldWithFilterTestWithReverseNestedOnHi expectedCountsByKey.put(2.0, 0); expectedCountsByKey.put(4.0, 1); - someAliasBuckets.forEach(obj -> { - JSONObject bucket = (JSONObject) obj; - final double key = bucket.getDouble("key"); - Assert.assertTrue(expectedCountsByKey.containsKey(key)); - Assert.assertThat(bucket.getJSONObject("COUNT(*)").getInt("value"), - equalTo(expectedCountsByKey.get(key))); - }); + someAliasBuckets.forEach( + obj -> { + JSONObject bucket = (JSONObject) obj; + final double key = bucket.getDouble("key"); + Assert.assertTrue(expectedCountsByKey.containsKey(key)); + Assert.assertThat( + bucket.getJSONObject("COUNT(*)").getInt("value"), + equalTo(expectedCountsByKey.get(key))); + }); } @Test public void reverseToRootGroupByOnNestedFieldWithFilterAndSumOnReverseNestedField() throws Exception { - String query = String.format("SELECT sum(reverse_nested(myNum)) bla FROM %s GROUP BY " + - "nested(message.info),filter('myFilter',message.info = 'a')", TEST_INDEX_NESTED_TYPE); + String query = + String.format( + "SELECT sum(reverse_nested(myNum)) bla FROM %s GROUP BY " + + "nested(message.info),filter('myFilter',message.info = 'a')", + TEST_INDEX_NESTED_TYPE); JSONObject result = executeQuery(query); JSONObject aggregation = getAggregation(result, "message.info@NESTED"); @@ -1172,9 +1336,11 @@ public void reverseToRootGroupByOnNestedFieldWithFilterAndSumOnReverseNestedFiel public void reverseAnotherNestedGroupByOnNestedFieldWithFilterTestWithReverseNestedNoPath() throws Exception { - String query = String.format("SELECT COUNT(*) FROM %s GROUP BY nested(message.info)," + - "filter('myFilter',message.info = 'a'),reverse_nested(comment.data,'~comment')", - TEST_INDEX_NESTED_TYPE); + String query = + String.format( + "SELECT COUNT(*) FROM %s GROUP BY nested(message.info)," + + "filter('myFilter',message.info = 'a'),reverse_nested(comment.data,'~comment')", + TEST_INDEX_NESTED_TYPE); JSONObject result = executeQuery(query); JSONObject aggregation = getAggregation(result, "message.info@NESTED"); @@ -1184,8 +1350,9 @@ public void reverseAnotherNestedGroupByOnNestedFieldWithFilterTestWithReverseNes Assert.assertThat(msgInfoBuckets.length(), equalTo(1)); JSONArray commentDataBuckets = - (JSONArray) msgInfoBuckets.optQuery("/0/comment.data@NESTED_REVERSED" + - "/comment.data@NESTED/comment.data/buckets"); + (JSONArray) + msgInfoBuckets.optQuery( + "/0/comment.data@NESTED_REVERSED" + "/comment.data@NESTED/comment.data/buckets"); Assert.assertNotNull(commentDataBuckets); Assert.assertThat(commentDataBuckets.length(), equalTo(1)); Assert.assertThat(commentDataBuckets.query("/0/key"), equalTo("ab")); @@ -1196,9 +1363,12 @@ public void reverseAnotherNestedGroupByOnNestedFieldWithFilterTestWithReverseNes public void reverseAnotherNestedGroupByOnNestedFieldWithFilterTestWithReverseNestedOnHistogram() throws Exception { - String query = String.format("SELECT COUNT(*) FROM %s GROUP BY nested(message.info),filter" + - "('myFilter',message.info = 'a'),histogram('field'='comment.likes','reverse_nested'='~comment'," + - "'interval'='2' , 'alias' = 'someAlias' )", TEST_INDEX_NESTED_TYPE); + String query = + String.format( + "SELECT COUNT(*) FROM %s GROUP BY nested(message.info),filter('myFilter',message.info" + + " = 'a'),histogram('field'='comment.likes','reverse_nested'='~comment','interval'='2'" + + " , 'alias' = 'someAlias' )", + TEST_INDEX_NESTED_TYPE); JSONObject result = executeQuery(query); JSONObject aggregation = getAggregation(result, "message.info@NESTED"); @@ -1207,8 +1377,10 @@ public void reverseAnotherNestedGroupByOnNestedFieldWithFilterTestWithReverseNes Assert.assertNotNull(msgInfoBuckets); Assert.assertThat(msgInfoBuckets.length(), equalTo(1)); - JSONArray someAliasBuckets = (JSONArray) msgInfoBuckets.optQuery( - "/0/~comment@NESTED_REVERSED/~comment@NESTED/someAlias/buckets"); + JSONArray someAliasBuckets = + (JSONArray) + msgInfoBuckets.optQuery( + "/0/~comment@NESTED_REVERSED/~comment@NESTED/someAlias/buckets"); Assert.assertNotNull(msgInfoBuckets); Assert.assertThat(someAliasBuckets.length(), equalTo(2)); @@ -1216,13 +1388,15 @@ public void reverseAnotherNestedGroupByOnNestedFieldWithFilterTestWithReverseNes expectedCountsByKey.put(0.0, 1); expectedCountsByKey.put(2.0, 1); - someAliasBuckets.forEach(obj -> { - JSONObject bucket = (JSONObject) obj; - final double key = bucket.getDouble("key"); - Assert.assertTrue(expectedCountsByKey.containsKey(key)); - Assert.assertThat(bucket.getJSONObject("COUNT(*)").getInt("value"), - equalTo(expectedCountsByKey.get(key))); - }); + someAliasBuckets.forEach( + obj -> { + JSONObject bucket = (JSONObject) obj; + final double key = bucket.getDouble("key"); + Assert.assertTrue(expectedCountsByKey.containsKey(key)); + Assert.assertThat( + bucket.getJSONObject("COUNT(*)").getInt("value"), + equalTo(expectedCountsByKey.get(key))); + }); } @Test @@ -1230,8 +1404,9 @@ public void reverseAnotherNestedGroupByOnNestedFieldWithFilterAndSumOnReverseNes throws Exception { String query = - String.format("SELECT sum(reverse_nested(comment.likes,'~comment')) bla FROM %s " + - "GROUP BY nested(message.info),filter('myFilter',message.info = 'a')", + String.format( + "SELECT sum(reverse_nested(comment.likes,'~comment')) bla FROM %s " + + "GROUP BY nested(message.info),filter('myFilter',message.info = 'a')", TEST_INDEX_NESTED_TYPE); JSONObject result = executeQuery(query); JSONObject aggregation = getAggregation(result, "message.info@NESTED"); @@ -1241,10 +1416,11 @@ public void reverseAnotherNestedGroupByOnNestedFieldWithFilterAndSumOnReverseNes Assert.assertNotNull(msgInfoBuckets); Assert.assertThat(msgInfoBuckets.length(), equalTo(1)); - Assert.assertNotNull(msgInfoBuckets.optQuery( - "/0/comment.likes@NESTED_REVERSED/comment.likes@NESTED/bla/value")); - JSONObject bla = (JSONObject) msgInfoBuckets - .query("/0/comment.likes@NESTED_REVERSED/comment.likes@NESTED/bla"); + Assert.assertNotNull( + msgInfoBuckets.optQuery("/0/comment.likes@NESTED_REVERSED/comment.likes@NESTED/bla/value")); + JSONObject bla = + (JSONObject) + msgInfoBuckets.query("/0/comment.likes@NESTED_REVERSED/comment.likes@NESTED/bla"); Assert.assertEquals(4.0, bla.getDouble("value"), 0.000001); } @@ -1257,8 +1433,9 @@ public void docsReturnedTestWithoutDocsHint() throws Exception { @Test public void docsReturnedTestWithDocsHint() throws Exception { - String query = String.format("SELECT /*! DOCS_WITH_AGGREGATION(10) */ count(*) from %s", - TEST_INDEX_ACCOUNT); + String query = + String.format( + "SELECT /*! DOCS_WITH_AGGREGATION(10) */ count(*) from %s", TEST_INDEX_ACCOUNT); JSONObject result = executeQuery(query); Assert.assertThat(getHits(result).length(), equalTo(10)); } @@ -1267,9 +1444,11 @@ public void docsReturnedTestWithDocsHint() throws Exception { @Test public void termsWithScript() throws Exception { String query = - String.format("select count(*), avg(all_client) from %s group by terms('alias'='asdf'," + - " substring(field, 0, 1)), date_histogram('alias'='time', 'field'='timestamp', " + - "'interval'='20d ', 'format'='yyyy-MM-dd') limit 1000", TEST_INDEX_ONLINE); + String.format( + "select count(*), avg(all_client) from %s group by terms('alias'='asdf'," + + " substring(field, 0, 1)), date_histogram('alias'='time', 'field'='timestamp', " + + "'interval'='20d ', 'format'='yyyy-MM-dd') limit 1000", + TEST_INDEX_ONLINE); String result = explainQuery(query); Assert.assertThat(result, containsString("\"script\":{\"source\"")); @@ -1278,9 +1457,10 @@ public void termsWithScript() throws Exception { @Test public void groupByScriptedDateHistogram() throws Exception { - String query = String - .format("select count(*), avg(all_client) from %s group by date_histogram('alias'='time'," + - " ceil(all_client), 'fixed_interval'='20d ', 'format'='yyyy-MM-dd') limit 1000", + String query = + String.format( + "select count(*), avg(all_client) from %s group by date_histogram('alias'='time'," + + " ceil(all_client), 'fixed_interval'='20d ', 'format'='yyyy-MM-dd') limit 1000", TEST_INDEX_ONLINE); String result = explainQuery(query); @@ -1290,9 +1470,10 @@ public void groupByScriptedDateHistogram() throws Exception { @Test public void groupByScriptedHistogram() throws Exception { - String query = String.format( - "select count(*) from %s group by histogram('alias'='all_field', pow(all_client,1))", - TEST_INDEX_ONLINE); + String query = + String.format( + "select count(*) from %s group by histogram('alias'='all_field', pow(all_client,1))", + TEST_INDEX_ONLINE); String result = explainQuery(query); Assert.assertThat(result, containsString("Math.pow(doc['all_client'].value, 1)")); @@ -1303,18 +1484,17 @@ public void groupByScriptedHistogram() throws Exception { public void distinctWithOneField() { Assert.assertEquals( executeQuery("SELECT DISTINCT name.lastname FROM " + TEST_INDEX_GAME_OF_THRONES, "jdbc"), - executeQuery("SELECT name.lastname FROM " + TEST_INDEX_GAME_OF_THRONES - + " GROUP BY name.lastname", "jdbc") - ); + executeQuery( + "SELECT name.lastname FROM " + TEST_INDEX_GAME_OF_THRONES + " GROUP BY name.lastname", + "jdbc")); } @Test public void distinctWithMultipleFields() { Assert.assertEquals( executeQuery("SELECT DISTINCT age, gender FROM " + TEST_INDEX_ACCOUNT, "jdbc"), - executeQuery("SELECT age, gender FROM " + TEST_INDEX_ACCOUNT - + " GROUP BY age, gender", "jdbc") - ); + executeQuery( + "SELECT age, gender FROM " + TEST_INDEX_ACCOUNT + " GROUP BY age, gender", "jdbc")); } private JSONObject getAggregation(final JSONObject queryResult, final String aggregationName) { @@ -1326,26 +1506,27 @@ private JSONObject getAggregation(final JSONObject queryResult, final String agg return aggregations.getJSONObject(aggregationName); } - private int getIntAggregationValue(final JSONObject queryResult, final String aggregationName, - final String fieldName) { + private int getIntAggregationValue( + final JSONObject queryResult, final String aggregationName, final String fieldName) { final JSONObject targetAggregation = getAggregation(queryResult, aggregationName); Assert.assertTrue(targetAggregation.has(fieldName)); return targetAggregation.getInt(fieldName); } - private double getDoubleAggregationValue(final JSONObject queryResult, - final String aggregationName, - final String fieldName) { + private double getDoubleAggregationValue( + final JSONObject queryResult, final String aggregationName, final String fieldName) { final JSONObject targetAggregation = getAggregation(queryResult, aggregationName); Assert.assertTrue(targetAggregation.has(fieldName)); return targetAggregation.getDouble(fieldName); } - private double getDoubleAggregationValue(final JSONObject queryResult, - final String aggregationName, - final String fieldName, final String subFieldName) { + private double getDoubleAggregationValue( + final JSONObject queryResult, + final String aggregationName, + final String fieldName, + final String subFieldName) { final JSONObject targetAggregation = getAggregation(queryResult, aggregationName); Assert.assertTrue(targetAggregation.has(fieldName)); diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/CsvFormatResponseIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/CsvFormatResponseIT.java index 52dcf9a068..508cd1555e 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/CsvFormatResponseIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/CsvFormatResponseIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.anyOf; @@ -33,18 +32,14 @@ import org.hamcrest.Matcher; import org.hamcrest.core.AnyOf; import org.junit.Assert; -import org.junit.Assume; import org.junit.Ignore; import org.junit.Test; -import org.junit.jupiter.api.Disabled; import org.opensearch.client.Request; import org.opensearch.client.RequestOptions; import org.opensearch.client.Response; import org.opensearch.sql.legacy.executor.csv.CSVResult; -/** - * Tests to cover requests with "?format=csv" parameter - */ +/** Tests to cover requests with "?format=csv" parameter */ public class CsvFormatResponseIT extends SQLIntegTestCase { private boolean flatOption = false; @@ -77,16 +72,16 @@ public void allPercentilesByDefault() throws IOException { final String result = executeQueryWithStringOutput(query); final String expectedHeaders = - "PERCENTILES(age).1.0,PERCENTILES(age).5.0,PERCENTILES(age).25.0," + - "PERCENTILES(age).50.0,PERCENTILES(age).75.0,PERCENTILES(age).95.0,PERCENTILES(age).99.0"; + "PERCENTILES(age).1.0,PERCENTILES(age).5.0,PERCENTILES(age).25.0," + + "PERCENTILES(age).50.0,PERCENTILES(age).75.0,PERCENTILES(age).95.0,PERCENTILES(age).99.0"; Assert.assertThat(result, containsString(expectedHeaders)); } @Test public void specificPercentilesIntAndDouble() throws IOException { - final String query = String.format(Locale.ROOT, "SELECT PERCENTILES(age,10,49.0) FROM %s", - TEST_INDEX_ACCOUNT); + final String query = + String.format(Locale.ROOT, "SELECT PERCENTILES(age,10,49.0) FROM %s", TEST_INDEX_ACCOUNT); final String result = executeQueryWithStringOutput(query); final String[] unexpectedPercentiles = {"1.0", "5.0", "25.0", "50.0", "75.0", "95.0", "99.0"}; @@ -94,14 +89,14 @@ public void specificPercentilesIntAndDouble() throws IOException { "\"PERCENTILES(age,10,49.0).10.0\",\"PERCENTILES(age,10,49.0).49.0\""; Assert.assertThat(result, containsString(expectedHeaders)); for (final String unexpectedPercentile : unexpectedPercentiles) { - Assert.assertThat(result, - not(containsString("PERCENTILES(age,10,49.0)." + unexpectedPercentile))); + Assert.assertThat( + result, not(containsString("PERCENTILES(age,10,49.0)." + unexpectedPercentile))); } } public void nestedObjectsAndArraysAreQuoted() throws IOException { - final String query = String.format(Locale.ROOT, "SELECT * FROM %s WHERE _id = 5", - TEST_INDEX_NESTED_TYPE); + final String query = + String.format(Locale.ROOT, "SELECT * FROM %s WHERE _id = 5", TEST_INDEX_NESTED_TYPE); final String result = executeQueryWithStringOutput(query); final String expectedMyNum = "\"[3, 4]\""; @@ -116,8 +111,8 @@ public void nestedObjectsAndArraysAreQuoted() throws IOException { public void arraysAreQuotedInFlatMode() throws IOException { setFlatOption(true); - final String query = String.format(Locale.ROOT, "SELECT * FROM %s WHERE _id = 5", - TEST_INDEX_NESTED_TYPE); + final String query = + String.format(Locale.ROOT, "SELECT * FROM %s WHERE _id = 5", TEST_INDEX_NESTED_TYPE); final String result = executeQueryWithStringOutput(query); final String expectedMyNum = "\"[3, 4]\""; @@ -168,15 +163,19 @@ public void fieldOrderOther() throws IOException { public void fieldOrderWithScriptFields() throws IOException { final String[] expectedFields = {"email", "script1", "script2", "gender", "address"}; - final String query = String.format(Locale.ROOT, "SELECT email, " + - "script(script1, \"doc['balance'].value * 2\"), " + - "script(script2, painless, \"doc['balance'].value + 10\"), gender, address " + - "FROM %s WHERE email='amberduke@pyrami.com'", TEST_INDEX_ACCOUNT); + final String query = + String.format( + Locale.ROOT, + "SELECT email, " + + "script(script1, \"doc['balance'].value * 2\"), " + + "script(script2, painless, \"doc['balance'].value + 10\"), gender, address " + + "FROM %s WHERE email='amberduke@pyrami.com'", + TEST_INDEX_ACCOUNT); verifyFieldOrder(expectedFields, query); } - //region Tests migrated from CSVResultsExtractorTests + // region Tests migrated from CSVResultsExtractorTests @Test public void simpleSearchResultNotNestedNotFlatNoAggs() throws Exception { @@ -197,8 +196,8 @@ public void simpleSearchResultNotNestedNotFlatNoAggs() throws Exception { @Test public void simpleSearchResultWithNestedNotFlatNoAggs() throws Exception { - String query = String.format(Locale.ROOT, "select name,house from %s", - TEST_INDEX_GAME_OF_THRONES); + String query = + String.format(Locale.ROOT, "select name,house from %s", TEST_INDEX_GAME_OF_THRONES); CSVResult csvResult = executeCsvRequest(query, false); List headers = csvResult.getHeaders(); @@ -209,21 +208,42 @@ public void simpleSearchResultWithNestedNotFlatNoAggs() throws Exception { List lines = csvResult.getLines(); Assert.assertEquals(7, lines.size()); - Assert.assertThat(lines, hasRow(null, "Targaryen", - Arrays.asList("firstname=Daenerys", "lastname=Targaryen", "ofHerName=1"), true)); - Assert.assertThat(lines, hasRow(null, "Stark", - Arrays.asList("firstname=Eddard", "lastname=Stark", "ofHisName=1"), true)); - Assert.assertThat(lines, hasRow(null, "Stark", - Arrays.asList("firstname=Brandon", "lastname=Stark", "ofHisName=4"), true)); - Assert.assertThat(lines, hasRow(null, "Lannister", - Arrays.asList("firstname=Jaime", "lastname=Lannister", "ofHisName=1"), true)); + Assert.assertThat( + lines, + hasRow( + null, + "Targaryen", + Arrays.asList("firstname=Daenerys", "lastname=Targaryen", "ofHerName=1"), + true)); + Assert.assertThat( + lines, + hasRow( + null, + "Stark", + Arrays.asList("firstname=Eddard", "lastname=Stark", "ofHisName=1"), + true)); + Assert.assertThat( + lines, + hasRow( + null, + "Stark", + Arrays.asList("firstname=Brandon", "lastname=Stark", "ofHisName=4"), + true)); + Assert.assertThat( + lines, + hasRow( + null, + "Lannister", + Arrays.asList("firstname=Jaime", "lastname=Lannister", "ofHisName=1"), + true)); } @Ignore("headers incorrect in case of nested fields") @Test public void simpleSearchResultWithNestedOneFieldNotFlatNoAggs() throws Exception { - String query = String.format(Locale.ROOT, "select name.firstname,house from %s", - TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + Locale.ROOT, "select name.firstname,house from %s", TEST_INDEX_GAME_OF_THRONES); CSVResult csvResult = executeCsvRequest(query, false); List headers = csvResult.getHeaders(); @@ -237,14 +257,16 @@ public void simpleSearchResultWithNestedOneFieldNotFlatNoAggs() throws Exception Assert.assertThat(lines, hasItem("{firstname=Eddard},Stark")); Assert.assertThat(lines, hasItem("{firstname=Brandon},Stark")); Assert.assertThat(lines, hasItem("{firstname=Jaime},Lannister")); - } @Ignore("headers incorrect in case of nested fields") @Test public void simpleSearchResultWithNestedTwoFieldsFromSameNestedNotFlatNoAggs() throws Exception { - String query = String.format(Locale.ROOT, "select name.firstname,name.lastname,house from %s", - TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + Locale.ROOT, + "select name.firstname,name.lastname,house from %s", + TEST_INDEX_GAME_OF_THRONES); CSVResult csvResult = executeCsvRequest(query, false); List headers = csvResult.getHeaders(); @@ -255,20 +277,23 @@ public void simpleSearchResultWithNestedTwoFieldsFromSameNestedNotFlatNoAggs() t List lines = csvResult.getLines(); Assert.assertEquals(7, lines.size()); - Assert.assertThat(lines, hasRow(null, "Targaryen", - Arrays.asList("firstname=Daenerys", "lastname=Targaryen"), true)); - Assert.assertThat(lines, hasRow(null, "Stark", - Arrays.asList("firstname=Eddard", "lastname=Stark"), true)); - Assert.assertThat(lines, hasRow(null, "Stark", - Arrays.asList("firstname=Brandon", "lastname=Stark"), true)); - Assert.assertThat(lines, hasRow(null, "Lannister", - Arrays.asList("firstname=Jaime", "lastname=Lannister"), true)); + Assert.assertThat( + lines, + hasRow(null, "Targaryen", Arrays.asList("firstname=Daenerys", "lastname=Targaryen"), true)); + Assert.assertThat( + lines, hasRow(null, "Stark", Arrays.asList("firstname=Eddard", "lastname=Stark"), true)); + Assert.assertThat( + lines, hasRow(null, "Stark", Arrays.asList("firstname=Brandon", "lastname=Stark"), true)); + Assert.assertThat( + lines, + hasRow(null, "Lannister", Arrays.asList("firstname=Jaime", "lastname=Lannister"), true)); } @Test public void simpleSearchResultWithNestedWithFlatNoAggs() throws Exception { - String query = String.format(Locale.ROOT, "select name.firstname,house from %s", - TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + Locale.ROOT, "select name.firstname,house from %s", TEST_INDEX_GAME_OF_THRONES); CSVResult csvResult = executeCsvRequest(query, true); List headers = csvResult.getHeaders(); @@ -286,9 +311,12 @@ public void simpleSearchResultWithNestedWithFlatNoAggs() throws Exception { @Test public void joinSearchResultNotNestedNotFlatNoAggs() throws Exception { - String query = String.format(Locale.ROOT, "select c.gender , h.hname,h.words from %s c " + - "JOIN %s h " + - "on h.hname = c.house ", TEST_INDEX_GAME_OF_THRONES, TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + Locale.ROOT, + "select c.gender , h.hname,h.words from %s c " + "JOIN %s h " + "on h.hname = c.house ", + TEST_INDEX_GAME_OF_THRONES, + TEST_INDEX_GAME_OF_THRONES); CSVResult csvResult = executeCsvRequest(query, false); List headers = csvResult.getHeaders(); @@ -300,8 +328,8 @@ public void joinSearchResultNotNestedNotFlatNoAggs() throws Exception { List lines = csvResult.getLines(); Assert.assertEquals(4, lines.size()); - Assert.assertThat(lines, - hasRow(null, null, Arrays.asList("F", "fireAndBlood", "Targaryen"), false)); + Assert.assertThat( + lines, hasRow(null, null, Arrays.asList("F", "fireAndBlood", "Targaryen"), false)); } @Test @@ -313,7 +341,6 @@ public void simpleNumericValueAgg() throws Exception { Assert.assertEquals(1, headers.size()); Assert.assertEquals("count(*)", headers.get(0)); - List lines = csvResult.getLines(); Assert.assertEquals(1, lines.size()); Assert.assertEquals("2", lines.get(0)); @@ -329,18 +356,16 @@ public void simpleNumericValueAggWithAlias() throws Exception { Assert.assertEquals(1, headers.size()); Assert.assertEquals("myAlias", headers.get(0)); - List lines = csvResult.getLines(); Assert.assertEquals(1, lines.size()); Assert.assertEquals("3.0", lines.get(0)); - } @Ignore("only work for legacy engine") public void twoNumericAggWithAlias() throws Exception { String query = - String.format(Locale.ROOT, "select count(*) as count, avg(age) as myAlias from %s ", - TEST_INDEX_DOG); + String.format( + Locale.ROOT, "select count(*) as count, avg(age) as myAlias from %s ", TEST_INDEX_DOG); CSVResult csvResult = executeCsvRequest(query, false); List headers = csvResult.getHeaders(); @@ -349,7 +374,6 @@ public void twoNumericAggWithAlias() throws Exception { Assert.assertTrue(headers.contains("count")); Assert.assertTrue(headers.contains("myAlias")); - List lines = csvResult.getLines(); Assert.assertEquals(1, lines.size()); Assert.assertEquals("2,3.0", lines.get(0)); @@ -357,8 +381,8 @@ public void twoNumericAggWithAlias() throws Exception { @Test public void aggAfterTermsGroupBy() throws Exception { - String query = String.format(Locale.ROOT, "SELECT COUNT(*) FROM %s GROUP BY gender", - TEST_INDEX_ACCOUNT); + String query = + String.format(Locale.ROOT, "SELECT COUNT(*) FROM %s GROUP BY gender", TEST_INDEX_ACCOUNT); CSVResult csvResult = executeCsvRequest(query, false); List headers = csvResult.getHeaders(); Assert.assertEquals(1, headers.size()); @@ -371,9 +395,11 @@ public void aggAfterTermsGroupBy() throws Exception { @Test public void aggAfterTwoTermsGroupBy() throws Exception { - String query = String.format(Locale.ROOT, - "SELECT COUNT(*) FROM %s where age in (35,36) GROUP BY gender,age", - TEST_INDEX_ACCOUNT); + String query = + String.format( + Locale.ROOT, + "SELECT COUNT(*) FROM %s where age in (35,36) GROUP BY gender,age", + TEST_INDEX_ACCOUNT); CSVResult csvResult = executeCsvRequest(query, false); List headers = csvResult.getHeaders(); Assert.assertEquals(1, headers.size()); @@ -381,18 +407,17 @@ public void aggAfterTwoTermsGroupBy() throws Exception { List lines = csvResult.getLines(); Assert.assertEquals(4, lines.size()); - assertThat(lines, containsInAnyOrder( - equalTo("31"), - equalTo("28"), - equalTo("21"), - equalTo("24"))); + assertThat( + lines, containsInAnyOrder(equalTo("31"), equalTo("28"), equalTo("21"), equalTo("24"))); } @Test public void multipleAggAfterTwoTermsGroupBy() throws Exception { - String query = String.format(Locale.ROOT, - "SELECT COUNT(*) , sum(balance) FROM %s where age in (35,36) GROUP BY gender,age", - TEST_INDEX_ACCOUNT); + String query = + String.format( + Locale.ROOT, + "SELECT COUNT(*) , sum(balance) FROM %s where age in (35,36) GROUP BY gender,age", + TEST_INDEX_ACCOUNT); CSVResult csvResult = executeCsvRequest(query, false); List headers = csvResult.getHeaders(); Assert.assertEquals(2, headers.size()); @@ -400,18 +425,23 @@ public void multipleAggAfterTwoTermsGroupBy() throws Exception { List lines = csvResult.getLines(); Assert.assertEquals(4, lines.size()); - assertThat(lines, containsInAnyOrder( - equalTo("31,647425"), - equalTo("28,678337"), - equalTo("21,505660"), - equalTo("24,472771"))); + assertThat( + lines, + containsInAnyOrder( + equalTo("31,647425"), + equalTo("28,678337"), + equalTo("21,505660"), + equalTo("24,472771"))); } @Test public void dateHistogramTest() throws Exception { - String query = String.format(Locale.ROOT, "select count(*) from %s" + - " group by date_histogram('field'='insert_time','fixed_interval'='4d','alias'='days')", - TEST_INDEX_ONLINE); + String query = + String.format( + Locale.ROOT, + "select count(*) from %s group by" + + " date_histogram('field'='insert_time','fixed_interval'='4d','alias'='days')", + TEST_INDEX_ONLINE); CSVResult csvResult = executeCsvRequest(query, false); List headers = csvResult.getHeaders(); Assert.assertEquals(1, headers.size()); @@ -419,10 +449,7 @@ public void dateHistogramTest() throws Exception { List lines = csvResult.getLines(); Assert.assertEquals(3, lines.size()); - assertThat(lines, containsInAnyOrder( - equalTo("477.0"), - equalTo("5664.0"), - equalTo("3795.0"))); + assertThat(lines, containsInAnyOrder(equalTo("477.0"), equalTo("5664.0"), equalTo("3795.0"))); } @Test @@ -449,10 +476,16 @@ public void extendedStatsAggregationTest() throws Exception { CSVResult csvResult = executeCsvRequest(query, false); List headers = csvResult.getHeaders(); - final String[] expectedHeaders = {"EXTENDED_STATS(age).count", "EXTENDED_STATS(age).sum", - "EXTENDED_STATS(age).avg", "EXTENDED_STATS(age).min", "EXTENDED_STATS(age).max", - "EXTENDED_STATS(age).sumOfSquares", "EXTENDED_STATS(age).variance", - "EXTENDED_STATS(age).stdDeviation"}; + final String[] expectedHeaders = { + "EXTENDED_STATS(age).count", + "EXTENDED_STATS(age).sum", + "EXTENDED_STATS(age).avg", + "EXTENDED_STATS(age).min", + "EXTENDED_STATS(age).max", + "EXTENDED_STATS(age).sumOfSquares", + "EXTENDED_STATS(age).variance", + "EXTENDED_STATS(age).stdDeviation" + }; Assert.assertEquals(expectedHeaders.length, headers.size()); Assert.assertThat(headers, contains(expectedHeaders)); @@ -468,7 +501,9 @@ public void extendedStatsAggregationTest() throws Exception { @Test public void percentileAggregationTest() throws Exception { String query = - String.format(Locale.ROOT, "select percentiles(age) as per from %s where age > 31", + String.format( + Locale.ROOT, + "select percentiles(age) as per from %s where age > 31", TEST_INDEX_ACCOUNT); CSVResult csvResult = executeCsvRequest(query, false); List headers = csvResult.getHeaders(); @@ -481,7 +516,6 @@ public void percentileAggregationTest() throws Exception { Assert.assertEquals("per.95.0", headers.get(5)); Assert.assertEquals("per.99.0", headers.get(6)); - List lines = csvResult.getLines(); Assert.assertEquals(1, lines.size()); Assert.assertEquals("32.0,32.0,34.0,36.0,38.0,40.0,40.0", lines.get(0)); @@ -489,9 +523,11 @@ public void percentileAggregationTest() throws Exception { @Test public void includeScore() throws Exception { - String query = String.format(Locale.ROOT, - "select age, firstname, _score from %s where age > 31 order by _score desc limit 2 ", - TEST_INDEX_ACCOUNT); + String query = + String.format( + Locale.ROOT, + "select age, firstname, _score from %s where age > 31 order by _score desc limit 2 ", + TEST_INDEX_ACCOUNT); CSVResult csvResult = executeCsvRequest(query, false, true, false); List headers = csvResult.getHeaders(); Assert.assertEquals(3, headers.size()); @@ -511,9 +547,11 @@ public void includeScore() throws Exception { @Test public void scriptedField() throws Exception { - String query = String.format(Locale.ROOT, - "select age+1 as agePlusOne ,age , firstname from %s where age = 31 limit 1", - TEST_INDEX_ACCOUNT); + String query = + String.format( + Locale.ROOT, + "select age+1 as agePlusOne ,age , firstname from %s where age = 31 limit 1", + TEST_INDEX_ACCOUNT); CSVResult csvResult = executeCsvRequest(query, false); List headers = csvResult.getHeaders(); Assert.assertEquals(3, headers.size()); @@ -521,8 +559,11 @@ public void scriptedField() throws Exception { Assert.assertTrue(headers.contains("age")); Assert.assertTrue(headers.contains("firstname")); List lines = csvResult.getLines(); - Assert.assertTrue(lines.get(0).contains("32,31") || lines.get(0).contains("32.0,31.0") || - lines.get(0).contains("31,32") || lines.get(0).contains("31.0,32.0")); + Assert.assertTrue( + lines.get(0).contains("32,31") + || lines.get(0).contains("32.0,31.0") + || lines.get(0).contains("31,32") + || lines.get(0).contains("31.0,32.0")); } @Ignore("separator not exposed") @@ -541,13 +582,15 @@ public void twoCharsSeperator() throws Exception { Assert.assertEquals(2, lines.size()); Assert.assertTrue("rex||2".equals(lines.get(0)) || "2||rex".equals(lines.get(0))); Assert.assertTrue("snoopy||4".equals(lines.get(1)) || "4||snoopy".equals(lines.get(1))); - } @Ignore("tested in @see: org.opensearch.sql.sql.IdentifierIT.testMetafieldIdentifierTest") public void includeIdAndNotTypeOrScore() throws Exception { - String query = String.format(Locale.ROOT, - "select age, firstname, _id from %s where lastname = 'Marquez' ", TEST_INDEX_ACCOUNT); + String query = + String.format( + Locale.ROOT, + "select age, firstname, _id from %s where lastname = 'Marquez' ", + TEST_INDEX_ACCOUNT); CSVResult csvResult = executeCsvRequest(query, false, false, true); List headers = csvResult.getHeaders(); Assert.assertEquals(3, headers.size()); @@ -557,15 +600,16 @@ public void includeIdAndNotTypeOrScore() throws Exception { List lines = csvResult.getLines(); Assert.assertTrue(lines.get(0).contains(",437") || lines.get(0).contains("437,")); } - //endregion Tests migrated from CSVResultsExtractorTests + + // endregion Tests migrated from CSVResultsExtractorTests @Ignore("only work for legacy engine") public void sensitiveCharacterSanitizeTest() throws IOException { String requestBody = - "{" + - " \"=cmd|' /C notepad'!_xlbgnm.A1\": \"+cmd|' /C notepad'!_xlbgnm.A1\",\n" + - " \"-cmd|' /C notepad'!_xlbgnm.A1\": \"@cmd|' /C notepad'!_xlbgnm.A1\"\n" + - "}"; + "{" + + " \"=cmd|' /C notepad'!_xlbgnm.A1\": \"+cmd|' /C notepad'!_xlbgnm.A1\",\n" + + " \"-cmd|' /C notepad'!_xlbgnm.A1\": \"@cmd|' /C notepad'!_xlbgnm.A1\"\n" + + "}"; Request request = new Request("PUT", "/userdata/_doc/1?refresh=true"); request.setJsonEntity(requestBody); @@ -586,11 +630,11 @@ public void sensitiveCharacterSanitizeTest() throws IOException { @Ignore("only work for legacy engine") public void sensitiveCharacterSanitizeAndQuotedTest() throws IOException { String requestBody = - "{" + - " \"=cmd|' /C notepad'!_xlbgnm.A1,,\": \",+cmd|' /C notepad'!_xlbgnm.A1\",\n" + - " \",@cmd|' /C notepad'!_xlbgnm.A1\": \"+cmd|' /C notepad,,'!_xlbgnm.A1\",\n" + - " \"-cmd|' /C notepad,,'!_xlbgnm.A1\": \",,,@cmd|' /C notepad'!_xlbgnm.A1\"\n" + - "}"; + "{" + + " \"=cmd|' /C notepad'!_xlbgnm.A1,,\": \",+cmd|' /C notepad'!_xlbgnm.A1\",\n" + + " \",@cmd|' /C notepad'!_xlbgnm.A1\": \"+cmd|' /C notepad,,'!_xlbgnm.A1\",\n" + + " \"-cmd|' /C notepad,,'!_xlbgnm.A1\": \",,,@cmd|' /C notepad'!_xlbgnm.A1\"\n" + + "}"; Request request = new Request("PUT", "/userdata2/_doc/1?refresh=true"); request.setJsonEntity(requestBody); @@ -611,8 +655,11 @@ public void sensitiveCharacterSanitizeAndQuotedTest() throws IOException { @Test public void sanitizeTest() throws IOException { - CSVResult csvResult = executeCsvRequest( - String.format(Locale.ROOT, "SELECT firstname, lastname FROM %s", TEST_INDEX_BANK_CSV_SANITIZE), false); + CSVResult csvResult = + executeCsvRequest( + String.format( + Locale.ROOT, "SELECT firstname, lastname FROM %s", TEST_INDEX_BANK_CSV_SANITIZE), + false); List lines = csvResult.getLines(); assertEquals(5, lines.size()); assertEquals(lines.get(0), "'+Amber JOHnny,Duke Willmington+"); @@ -633,8 +680,12 @@ public void selectFunctionAsFieldTest() throws IOException { private void verifyFieldOrder(final String[] expectedFields) throws IOException { final String fields = String.join(", ", expectedFields); - final String query = String.format(Locale.ROOT, "SELECT %s FROM %s " + - "WHERE email='amberduke@pyrami.com'", fields, TEST_INDEX_ACCOUNT); + final String query = + String.format( + Locale.ROOT, + "SELECT %s FROM %s " + "WHERE email='amberduke@pyrami.com'", + fields, + TEST_INDEX_ACCOUNT); verifyFieldOrder(expectedFields, query); } @@ -658,13 +709,18 @@ private CSVResult executeCsvRequest(final String query, boolean flat) throws IOE return executeCsvRequest(query, flat, false, false); } - private CSVResult executeCsvRequest(final String query, boolean flat, boolean includeScore, - boolean includeId) throws IOException { + private CSVResult executeCsvRequest( + final String query, boolean flat, boolean includeScore, boolean includeId) + throws IOException { final String requestBody = super.makeRequest(query); - final String endpoint = String.format(Locale.ROOT, - "/_plugins/_sql?format=csv&flat=%b&_id=%b&_score=%b", - flat, includeId, includeScore); + final String endpoint = + String.format( + Locale.ROOT, + "/_plugins/_sql?format=csv&flat=%b&_id=%b&_score=%b", + flat, + includeId, + includeScore); final Request sqlRequest = new Request("POST", endpoint); sqlRequest.setJsonEntity(requestBody); RequestOptions.Builder restOptionsBuilder = RequestOptions.DEFAULT.toBuilder(); @@ -702,22 +758,32 @@ private CSVResult csvResultFromStringResponse(final String response) { return new CSVResult(headers, rows); } - private static AnyOf> hasRow(final String prefix, final String suffix, - final List items, - final boolean areItemsNested) { + private static AnyOf> hasRow( + final String prefix, + final String suffix, + final List items, + final boolean areItemsNested) { final Collection> permutations = TestUtils.getPermutations(items); - final List>> matchers = permutations.stream().map(permutation -> { - - final String delimiter = areItemsNested ? ", " : ","; - final String objectField = String.join(delimiter, permutation); - final String row = String.format(Locale.ROOT, "%s%s%s%s%s", - printablePrefix(prefix), areItemsNested ? "\"{" : "", - objectField, areItemsNested ? "}\"" : "", printableSuffix(suffix)); - return hasItem(row); - - }).collect(Collectors.toCollection(LinkedList::new)); + final List>> matchers = + permutations.stream() + .map( + permutation -> { + final String delimiter = areItemsNested ? ", " : ","; + final String objectField = String.join(delimiter, permutation); + final String row = + String.format( + Locale.ROOT, + "%s%s%s%s%s", + printablePrefix(prefix), + areItemsNested ? "\"{" : "", + objectField, + areItemsNested ? "}\"" : "", + printableSuffix(suffix)); + return hasItem(row); + }) + .collect(Collectors.toCollection(LinkedList::new)); return anyOf(matchers); } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/CursorIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/CursorIT.java index b246bb6224..b6a18d6de9 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/CursorIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/CursorIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.containsString; @@ -43,9 +42,8 @@ protected void init() throws Exception { } /** - * Acceptable fetch_size are positive numbers. - * For example 0, 24, 53.0, "110" (parsable string) , "786.23" - * Negative values should throw 400 + * Acceptable fetch_size are positive numbers. For example 0, 24, 53.0, "110" (parsable string) , + * "786.23" Negative values should throw 400 */ @Test public void invalidNegativeFetchSize() throws IOException { @@ -65,9 +63,7 @@ public void invalidNegativeFetchSize() throws IOException { assertThat(resp.query("/error/type"), equalTo("IllegalArgumentException")); } - /** - * Non-numeric fetch_size value should throw 400 - */ + /** Non-numeric fetch_size value should throw 400 */ @Test public void invalidNonNumericFetchSize() throws IOException { String query = @@ -105,19 +101,22 @@ public void testExceptionOnCursorExplain() throws IOException { } /** - * For fetch_size = 0, default to non-pagination behaviour for simple queries - * This can be verified by checking that cursor is not present, and old default limit applies + * For fetch_size = 0, default to non-pagination behaviour for simple queries This can be verified + * by checking that cursor is not present, and old default limit applies */ @Test public void noPaginationWhenFetchSizeZero() throws IOException { String selectQuery = StringUtils.format("SELECT firstname, state FROM %s", TEST_INDEX_ACCOUNT); JSONObject response = new JSONObject(executeFetchQuery(selectQuery, 0, JDBC)); assertFalse(response.has(CURSOR)); - assertThat(response.getJSONArray(DATAROWS).length(), equalTo(1000)); // Default limit is 1000 in new engine + assertThat( + response.getJSONArray(DATAROWS).length(), + equalTo(1000)); // Default limit is 1000 in new engine } /** - * The index has 1000 records, with fetch size of 50 we should get 20 pages with no cursor on last page + * The index has 1000 records, with fetch size of 50 we should get 20 pages with no cursor on last + * page */ @Test public void validNumberOfPages() throws IOException { @@ -128,7 +127,7 @@ public void validNumberOfPages() throws IOException { int pageCount = 1; - while (!cursor.isEmpty()) { //this condition also checks that there is no cursor on last page + while (!cursor.isEmpty()) { // this condition also checks that there is no cursor on last page response = executeCursorQuery(cursor); cursor = response.optString(CURSOR); if (!cursor.isEmpty()) { @@ -162,7 +161,6 @@ public void validNumberOfPages() throws IOException { assertThat(pageCount, equalTo(36)); } - @Test public void validTotalResultWithAndWithoutPagination() throws IOException { // simple query - accounts index has 1000 docs, using higher limit to get all docs @@ -172,71 +170,65 @@ public void validTotalResultWithAndWithoutPagination() throws IOException { @Test public void validTotalResultWithAndWithoutPaginationWhereClause() throws IOException { - String selectQuery = StringUtils.format( - "SELECT firstname, state FROM %s WHERE balance < 25000 AND age > 32", TEST_INDEX_ACCOUNT - ); + String selectQuery = + StringUtils.format( + "SELECT firstname, state FROM %s WHERE balance < 25000 AND age > 32", + TEST_INDEX_ACCOUNT); verifyWithAndWithoutPaginationResponse(selectQuery + " LIMIT 2000", selectQuery, 17, false); } @Test public void validTotalResultWithAndWithoutPaginationOrderBy() throws IOException { - String selectQuery = StringUtils.format( - "SELECT firstname, state FROM %s ORDER BY balance DESC ", TEST_INDEX_ACCOUNT - ); + String selectQuery = + StringUtils.format( + "SELECT firstname, state FROM %s ORDER BY balance DESC ", TEST_INDEX_ACCOUNT); verifyWithAndWithoutPaginationResponse(selectQuery + " LIMIT 2000", selectQuery, 26, false); } @Test public void validTotalResultWithAndWithoutPaginationWhereAndOrderBy() throws IOException { - String selectQuery = StringUtils.format( - "SELECT firstname, state FROM %s WHERE balance < 25000 ORDER BY balance ASC ", - TEST_INDEX_ACCOUNT - ); + String selectQuery = + StringUtils.format( + "SELECT firstname, state FROM %s WHERE balance < 25000 ORDER BY balance ASC ", + TEST_INDEX_ACCOUNT); verifyWithAndWithoutPaginationResponse(selectQuery + " LIMIT 2000", selectQuery, 80, false); } @Test public void validTotalResultWithAndWithoutPaginationNested() throws IOException { loadIndex(Index.NESTED_SIMPLE); - String selectQuery = StringUtils.format( - "SELECT name, a.city, a.state FROM %s m , m.address as a ", TEST_INDEX_NESTED_SIMPLE - ); + String selectQuery = + StringUtils.format( + "SELECT name, a.city, a.state FROM %s m , m.address as a ", TEST_INDEX_NESTED_SIMPLE); verifyWithAndWithoutPaginationResponse(selectQuery + " LIMIT 2000", selectQuery, 1, true); } @Test public void noCursorWhenResultsLessThanFetchSize() throws IOException { // fetch_size is 100, but actual number of rows returned from OpenSearch is 97 - // a scroll context will be opened but will be closed after first page as all records are fetched - String selectQuery = StringUtils.format( - "SELECT * FROM %s WHERE balance < 25000 AND age > 36 LIMIT 2000", TEST_INDEX_ACCOUNT - ); + // a scroll context will be opened but will be closed after first page as all records are + // fetched + String selectQuery = + StringUtils.format( + "SELECT * FROM %s WHERE balance < 25000 AND age > 36 LIMIT 2000", TEST_INDEX_ACCOUNT); JSONObject response = new JSONObject(executeFetchQuery(selectQuery, 100, JDBC)); assertFalse(response.has(CURSOR)); } @Ignore("Temporary deactivate the test until parameter substitution implemented in V2") - // Test was passing before, because such paging query was executed in V1, but now it is executed in V2 + // Test was passing before, because such paging query was executed in V1, but now it is executed + // in V2 @Test public void testCursorWithPreparedStatement() throws IOException { - JSONObject response = executeJDBCRequest(String.format("{" + - " \"fetch_size\": 200," + - " \"query\": \" SELECT age, state FROM %s WHERE age > ? OR state IN (?, ?)\"," + - " \"parameters\": [" + - " {" + - " \"type\": \"integer\"," + - " \"value\": 25" + - " }," + - " {" + - " \"type\": \"string\"," + - " \"value\": \"WA\"" + - " }," + - " {" + - " \"type\": \"string\"," + - " \"value\": \"UT\"" + - " }" + - " ]" + - "}", TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeJDBCRequest( + String.format( + "{ \"fetch_size\": 200, \"query\": \" SELECT age, state FROM %s WHERE age > ? OR" + + " state IN (?, ?)\", \"parameters\": [ { \"type\": \"integer\", " + + " \"value\": 25 }, { \"type\": \"string\", \"value\":" + + " \"WA\" }, { \"type\": \"string\", \"value\": \"UT\"" + + " } ]}", + TestsConstants.TEST_INDEX_ACCOUNT)); assertTrue(response.has(CURSOR)); verifyIsV1Cursor(response.getString(CURSOR)); @@ -247,15 +239,11 @@ public void testRegressionOnDateFormatChange() throws IOException { loadIndex(Index.DATETIME); /** * With pagination, the field should be date formatted to MySQL format as in - * @see PR #367PR #367TEST_INDEX_DATE_TIME has three docs with login_time as date field with following + * values 1.2015-01-01 2.2015-01-01T12:10:30Z 3.1585882955 4.2020-04-08T11:10:30+05:00 */ - List actualDateList = new ArrayList<>(); String selectQuery = StringUtils.format("SELECT login_time FROM %s LIMIT 500", TEST_INDEX_DATE_TIME); @@ -271,16 +259,16 @@ public void testRegressionOnDateFormatChange() throws IOException { actualDateList.add(response.getJSONArray(DATAROWS).getJSONArray(0).getString(0)); } - List expectedDateList = Arrays.asList( - "2015-01-01 00:00:00.000", - "2015-01-01 12:10:30.000", - "1585882955", // by existing design, this is not formatted in MySQL standard format - "2020-04-08 06:10:30.000"); + List expectedDateList = + Arrays.asList( + "2015-01-01 00:00:00.000", + "2015-01-01 12:10:30.000", + "1585882955", // by existing design, this is not formatted in MySQL standard format + "2020-04-08 06:10:30.000"); assertThat(actualDateList, equalTo(expectedDateList)); } - @Ignore("Breaking change for OpenSearch: deprecate and enable cursor always") @Test public void defaultBehaviorWhenCursorSettingIsDisabled() throws IOException { @@ -296,7 +284,6 @@ public void defaultBehaviorWhenCursorSettingIsDisabled() throws IOException { wipeAllClusterSettings(); } - @Test public void testCursorSettings() throws IOException { // Assert default cursor settings @@ -307,13 +294,11 @@ public void testCursorSettings() throws IOException { new ClusterSetting(PERSISTENT, Settings.Key.SQL_CURSOR_KEEP_ALIVE.getKeyValue(), "200s")); clusterSettings = getAllClusterSettings(); - assertThat(clusterSettings.query("/persistent/plugins.sql.cursor.keep_alive"), - equalTo("200s")); + assertThat(clusterSettings.query("/persistent/plugins.sql.cursor.keep_alive"), equalTo("200s")); wipeAllClusterSettings(); } - @Ignore("Breaking change for OpenSearch: no pagination if fetch_size field absent in request") @Test public void testDefaultFetchSizeFromClusterSettings() throws IOException { @@ -339,8 +324,9 @@ public void testDefaultFetchSizeFromClusterSettings() throws IOException { public void testCursorCloseAPI() throws IOException { // multiple invocation of closing cursor should return success // fetch page using old cursor should throw error - String selectQuery = StringUtils.format( - "SELECT firstname, state FROM %s WHERE balance > 100 and age < 40", TEST_INDEX_ACCOUNT); + String selectQuery = + StringUtils.format( + "SELECT firstname, state FROM %s WHERE balance > 100 and age < 40", TEST_INDEX_ACCOUNT); JSONObject result = new JSONObject(executeFetchQuery(selectQuery, 50, JDBC)); String cursor = result.getString(CURSOR); verifyIsV2Cursor(result); @@ -350,11 +336,11 @@ public void testCursorCloseAPI() throws IOException { cursor = result.optString(CURSOR); verifyIsV2Cursor(result); } - //Closing the cursor + // Closing the cursor JSONObject closeResp = executeCursorCloseQuery(cursor); assertThat(closeResp.getBoolean("succeeded"), equalTo(true)); - //Closing the cursor multiple times is idempotent + // Closing the cursor multiple times is idempotent for (int i = 0; i < 5; i++) { closeResp = executeCursorCloseQuery(cursor); assertThat(closeResp.getBoolean("succeeded"), equalTo(true)); @@ -371,8 +357,7 @@ public void testCursorCloseAPI() throws IOException { JSONObject resp = new JSONObject(TestUtils.getResponseBody(response)); assertThat(resp.getInt("status"), equalTo(404)); assertThat(resp.query("/error/reason").toString(), containsString("all shards failed")); - assertThat(resp.query("/error/details").toString(), - containsString("No search context found")); + assertThat(resp.query("/error/details").toString(), containsString("No search context found")); assertThat(resp.query("/error/type"), equalTo("SearchPhaseExecutionException")); } @@ -395,9 +380,9 @@ public void invalidCursorIdNotDecodable() throws IOException { } /** - * The index has 1000 records, with fetch size of 50 and LIMIT in place - * we should get Math.ceil(limit/fetchSize) pages and LIMIT number of rows. - * Basically it should not retrieve all records in presence of a smaller LIMIT value. + * The index has 1000 records, with fetch size of 50 and LIMIT in place we should get + * Math.ceil(limit/fetchSize) pages and LIMIT number of rows. Basically it should not retrieve all + * records in presence of a smaller LIMIT value. */ @Test public void respectLimitPassedInSelectClause() throws IOException { @@ -422,7 +407,6 @@ public void respectLimitPassedInSelectClause() throws IOException { assertThat(actualDataRowCount, equalTo(limit)); } - @Test public void noPaginationWithNonJDBCFormat() throws IOException { // checking for CSV, RAW format @@ -439,10 +423,9 @@ public void noPaginationWithNonJDBCFormat() throws IOException { assertThat(rows.length, equalTo(1000)); } - - public void verifyWithAndWithoutPaginationResponse(String sqlQuery, String cursorQuery, - int fetch_size, boolean shouldFallBackToV1) - throws IOException { + public void verifyWithAndWithoutPaginationResponse( + String sqlQuery, String cursorQuery, int fetch_size, boolean shouldFallBackToV1) + throws IOException { // we are only checking here for schema and datarows JSONObject withoutCursorResponse = new JSONObject(executeFetchQuery(sqlQuery, 0, JDBC)); @@ -473,10 +456,10 @@ public void verifyWithAndWithoutPaginationResponse(String sqlQuery, String curso } } - verifySchema(withoutCursorResponse.optJSONArray(SCHEMA), - withCursorResponse.optJSONArray(SCHEMA)); - verifyDataRows(withoutCursorResponse.optJSONArray(DATAROWS), - withCursorResponse.optJSONArray(DATAROWS)); + verifySchema( + withoutCursorResponse.optJSONArray(SCHEMA), withCursorResponse.optJSONArray(SCHEMA)); + verifyDataRows( + withoutCursorResponse.optJSONArray(DATAROWS), withCursorResponse.optJSONArray(DATAROWS)); } public void verifySchema(JSONArray schemaOne, JSONArray schemaTwo) { @@ -504,14 +487,14 @@ private void verifyIsV1Cursor(String cursor) { if (cursor.isEmpty()) { return; } - assertTrue("The cursor '" + cursor.substring(0, 50) + "...' is not from v1 engine.", cursor.startsWith("d:")); + assertTrue( + "The cursor '" + cursor.substring(0, 50) + "...' is not from v1 engine.", + cursor.startsWith("d:")); } private String makeRequest(String query, String fetch_size) { - return String.format("{" + - " \"fetch_size\": \"%s\"," + - " \"query\": \"%s\"" + - "}", fetch_size, query); + return String.format( + "{" + " \"fetch_size\": \"%s\"," + " \"query\": \"%s\"" + "}", fetch_size, query); } private JSONObject executeJDBCRequest(String requestBody) throws IOException { diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/DateFormatIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/DateFormatIT.java index a0b4b19898..7739bcd74b 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/DateFormatIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/DateFormatIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.contains; @@ -33,8 +32,7 @@ public class DateFormatIT extends SQLIntegTestCase { private static final String SELECT_FROM = - "SELECT insert_time " + - "FROM " + TestsConstants.TEST_INDEX_ONLINE + " "; + "SELECT insert_time " + "FROM " + TestsConstants.TEST_INDEX_ONLINE + " "; @Override protected void init() throws Exception { @@ -42,21 +40,20 @@ protected void init() throws Exception { } /** - * All of the following tests use UTC as their date_format timezone as this is the same timezone of the data - * being queried. This is to prevent discrepancies in the OpenSearch query and the actual field data that is - * being checked for the integration tests. - *

- * Large LIMIT values were given for some of these queries since the default result size of the query is 200 and - * this ends up excluding some of the expected values causing the assertion to fail. LIMIT overrides this. + * All of the following tests use UTC as their date_format timezone as this is the same timezone + * of the data being queried. This is to prevent discrepancies in the OpenSearch query and the + * actual field data that is being checked for the integration tests. + * + *

Large LIMIT values were given for some of these queries since the default result size of the + * query is 200 and this ends up excluding some of the expected values causing the assertion to + * fail. LIMIT overrides this. */ - @Test public void equalTo() throws SqlParseException { assertThat( dateQuery( SELECT_FROM + "WHERE date_format(insert_time, 'yyyy-MM-dd', 'UTC') = '2014-08-17'"), - contains("2014-08-17") - ); + contains("2014-08-17")); } @Test @@ -64,19 +61,18 @@ public void lessThan() throws SqlParseException { assertThat( dateQuery( SELECT_FROM + "WHERE date_format(insert_time, 'yyyy-MM-dd', 'UTC') < '2014-08-18'"), - contains("2014-08-17") - ); + contains("2014-08-17")); } @Test public void lessThanOrEqualTo() throws SqlParseException { assertThat( dateQuery( - SELECT_FROM + "WHERE date_format(insert_time, 'yyyy-MM-dd', 'UTC') <= '2014-08-18' " + - "ORDER BY insert_time " + - "LIMIT 1000"), - contains("2014-08-17", "2014-08-18") - ); + SELECT_FROM + + "WHERE date_format(insert_time, 'yyyy-MM-dd', 'UTC') <= '2014-08-18' " + + "ORDER BY insert_time " + + "LIMIT 1000"), + contains("2014-08-17", "2014-08-18")); } @Test @@ -84,92 +80,101 @@ public void greaterThan() throws SqlParseException { assertThat( dateQuery( SELECT_FROM + "WHERE date_format(insert_time, 'yyyy-MM-dd', 'UTC') > '2014-08-23'"), - contains("2014-08-24") - ); + contains("2014-08-24")); } @Test public void greaterThanOrEqualTo() throws SqlParseException { assertThat( dateQuery( - SELECT_FROM + "WHERE date_format(insert_time, 'yyyy-MM-dd', 'UTC') >= '2014-08-23' " + - "ORDER BY insert_time " + - "LIMIT 2000"), - contains("2014-08-23", "2014-08-24") - ); + SELECT_FROM + + "WHERE date_format(insert_time, 'yyyy-MM-dd', 'UTC') >= '2014-08-23' " + + "ORDER BY insert_time " + + "LIMIT 2000"), + contains("2014-08-23", "2014-08-24")); } @Test public void and() throws SqlParseException { assertThat( - dateQuery(SELECT_FROM + - "WHERE date_format(insert_time, 'yyyy-MM-dd', 'UTC') >= '2014-08-21' " + - "AND date_format(insert_time, 'yyyy-MM-dd', 'UTC') <= '2014-08-23' " + - "ORDER BY insert_time " + - "LIMIT 3000"), - contains("2014-08-21", "2014-08-22", "2014-08-23") - ); + dateQuery( + SELECT_FROM + + "WHERE date_format(insert_time, 'yyyy-MM-dd', 'UTC') >= '2014-08-21' " + + "AND date_format(insert_time, 'yyyy-MM-dd', 'UTC') <= '2014-08-23' " + + "ORDER BY insert_time " + + "LIMIT 3000"), + contains("2014-08-21", "2014-08-22", "2014-08-23")); } @Test public void andWithDefaultTimeZone() throws SqlParseException { assertThat( - dateQuery(SELECT_FROM + - "WHERE date_format(insert_time, 'yyyy-MM-dd HH:mm:ss') >= '2014-08-17 16:13:12' " + - "AND date_format(insert_time, 'yyyy-MM-dd HH:mm:ss') <= '2014-08-17 16:13:13'", + dateQuery( + SELECT_FROM + + "WHERE date_format(insert_time, 'yyyy-MM-dd HH:mm:ss') >= '2014-08-17 16:13:12' " + + "AND date_format(insert_time, 'yyyy-MM-dd HH:mm:ss') <= '2014-08-17 16:13:13'", "yyyy-MM-dd HH:mm:ss"), - contains("2014-08-17 16:13:12") - ); + contains("2014-08-17 16:13:12")); } @Test public void or() throws SqlParseException { assertThat( - dateQuery(SELECT_FROM + - "WHERE date_format(insert_time, 'yyyy-MM-dd', 'UTC') < '2014-08-18' " + - "OR date_format(insert_time, 'yyyy-MM-dd', 'UTC') > '2014-08-23' " + - "ORDER BY insert_time " + - "LIMIT 1000"), - contains("2014-08-17", "2014-08-24") - ); + dateQuery( + SELECT_FROM + + "WHERE date_format(insert_time, 'yyyy-MM-dd', 'UTC') < '2014-08-18' " + + "OR date_format(insert_time, 'yyyy-MM-dd', 'UTC') > '2014-08-23' " + + "ORDER BY insert_time " + + "LIMIT 1000"), + contains("2014-08-17", "2014-08-24")); } - @Test public void sortByDateFormat() throws IOException { - // Sort by expression in descending order, but sort inside in ascending order, so we increase our confidence + // Sort by expression in descending order, but sort inside in ascending order, so we increase + // our confidence // that successful test isn't just random chance. JSONArray hits = - getHits(executeQuery("SELECT all_client, insert_time " + - " FROM " + TestsConstants.TEST_INDEX_ONLINE + - " ORDER BY date_format(insert_time, 'dd-MM-YYYY', 'UTC') DESC, insert_time " + - " LIMIT 10")); + getHits( + executeQuery( + "SELECT all_client, insert_time " + + " FROM " + + TestsConstants.TEST_INDEX_ONLINE + + " ORDER BY date_format(insert_time, 'dd-MM-YYYY', 'UTC') DESC, insert_time " + + " LIMIT 10")); - assertThat(new DateTime(getSource(hits.getJSONObject(0)).get("insert_time"), DateTimeZone.UTC), + assertThat( + new DateTime(getSource(hits.getJSONObject(0)).get("insert_time"), DateTimeZone.UTC), is(new DateTime("2014-08-24T00:00:41.221Z", DateTimeZone.UTC))); } @Test public void sortByAliasedDateFormat() throws IOException { JSONArray hits = - getHits(executeQuery( - "SELECT all_client, insert_time, date_format(insert_time, 'dd-MM-YYYY', 'UTC') date" + - " FROM " + TestsConstants.TEST_INDEX_ONLINE + - " ORDER BY date DESC, insert_time " + - " LIMIT 10")); + getHits( + executeQuery( + "SELECT all_client, insert_time, date_format(insert_time, 'dd-MM-YYYY', 'UTC')" + + " date FROM " + + TestsConstants.TEST_INDEX_ONLINE + + " ORDER BY date DESC, insert_time " + + " LIMIT 10")); - assertThat(new DateTime(getSource(hits.getJSONObject(0)).get("insert_time"), DateTimeZone.UTC), + assertThat( + new DateTime(getSource(hits.getJSONObject(0)).get("insert_time"), DateTimeZone.UTC), is(new DateTime("2014-08-24T00:00:41.221Z", DateTimeZone.UTC))); } @Ignore("skip this test due to inconsistency in type in new engine") @Test public void selectDateTimeWithDefaultTimeZone() throws SqlParseException { - JSONObject response = executeJdbcRequest("SELECT date_format(insert_time, 'yyyy-MM-dd') as date " + - " FROM " + TestsConstants.TEST_INDEX_ONLINE + - " WHERE date_format(insert_time, 'yyyy-MM-dd HH:mm:ss') >= '2014-08-17 16:13:12' " + - " AND date_format(insert_time, 'yyyy-MM-dd HH:mm:ss') <= '2014-08-17 16:13:13'"); + JSONObject response = + executeJdbcRequest( + "SELECT date_format(insert_time, 'yyyy-MM-dd') as date " + + " FROM " + + TestsConstants.TEST_INDEX_ONLINE + + " WHERE date_format(insert_time, 'yyyy-MM-dd HH:mm:ss') >= '2014-08-17 16:13:12' " + + " AND date_format(insert_time, 'yyyy-MM-dd HH:mm:ss') <= '2014-08-17 16:13:13'"); verifySchema(response, schema("date", "", "text")); verifyDataRows(response, rows("2014-08-17")); @@ -177,52 +182,57 @@ public void selectDateTimeWithDefaultTimeZone() throws SqlParseException { @Test public void groupByAndSort() throws IOException { - JSONObject aggregations = executeQuery( - "SELECT date_format(insert_time, 'dd-MM-YYYY') " + - "FROM opensearch-sql_test_index_online " + - "GROUP BY date_format(insert_time, 'dd-MM-YYYY') " + - "ORDER BY date_format(insert_time, 'dd-MM-YYYY') DESC") - .getJSONObject("aggregations"); + JSONObject aggregations = + executeQuery( + "SELECT date_format(insert_time, 'dd-MM-YYYY') " + + "FROM opensearch-sql_test_index_online " + + "GROUP BY date_format(insert_time, 'dd-MM-YYYY') " + + "ORDER BY date_format(insert_time, 'dd-MM-YYYY') DESC") + .getJSONObject("aggregations"); checkAggregations(aggregations, "date_format", Ordering.natural().reverse()); } @Test public void groupByAndSortAliasedReversed() throws IOException { - JSONObject aggregations = executeQuery( - "SELECT date_format(insert_time, 'dd-MM-YYYY') date " + - "FROM opensearch-sql_test_index_online " + - "GROUP BY date " + - "ORDER BY date DESC") - .getJSONObject("aggregations"); + JSONObject aggregations = + executeQuery( + "SELECT date_format(insert_time, 'dd-MM-YYYY') date " + + "FROM opensearch-sql_test_index_online " + + "GROUP BY date " + + "ORDER BY date DESC") + .getJSONObject("aggregations"); checkAggregations(aggregations, "date", Ordering.natural().reverse()); } @Test public void groupByAndSortAliased() throws IOException { - JSONObject aggregations = executeQuery( - "SELECT date_format(insert_time, 'dd-MM-YYYY') date " + - "FROM opensearch-sql_test_index_online " + - "GROUP BY date " + - "ORDER BY date ") - .getJSONObject("aggregations"); + JSONObject aggregations = + executeQuery( + "SELECT date_format(insert_time, 'dd-MM-YYYY') date " + + "FROM opensearch-sql_test_index_online " + + "GROUP BY date " + + "ORDER BY date ") + .getJSONObject("aggregations"); checkAggregations(aggregations, "date", Ordering.natural()); } - private void checkAggregations(JSONObject aggregations, String key, - Ordering ordering) { + private void checkAggregations( + JSONObject aggregations, String key, Ordering ordering) { String date = getScriptAggregationKey(aggregations, key); JSONArray buckets = aggregations.getJSONObject(date).getJSONArray("buckets"); assertThat(buckets.length(), is(8)); - List aggregationSortKeys = IntStream.range(0, 8) - .mapToObj(index -> buckets.getJSONObject(index).getString("key")) - .collect(Collectors.toList()); + List aggregationSortKeys = + IntStream.range(0, 8) + .mapToObj(index -> buckets.getJSONObject(index).getString("key")) + .collect(Collectors.toList()); - assertTrue("The query result must be sorted by date in descending order", + assertTrue( + "The query result must be sorted by date in descending order", ordering.isOrdered(aggregationSortKeys)); } @@ -239,7 +249,8 @@ private Set dateQuery(String sql, String format) throws SqlParseExceptio } } - private Set getResult(JSONObject response, String fieldName, DateTimeFormatter formatter) { + private Set getResult( + JSONObject response, String fieldName, DateTimeFormatter formatter) { JSONArray hits = getHits(response); Set result = new TreeSet<>(); // Using TreeSet so order is maintained for (int i = 0; i < hits.length(); i++) { @@ -255,11 +266,11 @@ private Set getResult(JSONObject response, String fieldName, DateTimeFor } public static String getScriptAggregationKey(JSONObject aggregation, String prefix) { - return aggregation.keySet() - .stream() + return aggregation.keySet().stream() .filter(x -> x.startsWith(prefix)) .findFirst() - .orElseThrow(() -> new RuntimeException( - "Can't find key" + prefix + " in aggregation " + aggregation)); + .orElseThrow( + () -> + new RuntimeException("Can't find key" + prefix + " in aggregation " + aggregation)); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/DateFunctionsIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/DateFunctionsIT.java index 369984d0a3..d9a6849fc8 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/DateFunctionsIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/DateFunctionsIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.equalTo; @@ -31,13 +30,13 @@ public class DateFunctionsIT extends SQLIntegTestCase { private static final String FROM = "FROM " + TestsConstants.TEST_INDEX_ONLINE; /** - * Some of the first few SQL functions are tested in both SELECT and WHERE cases for flexibility and the remainder - * are merely tested in SELECT for simplicity. - *

- * There is a limitation in all date SQL functions in that they expect a date field as input. In the future this - * can be expanded on by supporting CAST and casting dates given as Strings to TIMESTAMP (SQL's date type). + * Some of the first few SQL functions are tested in both SELECT and WHERE cases for flexibility + * and the remainder are merely tested in SELECT for simplicity. + * + *

There is a limitation in all date SQL functions in that they expect a date field as input. + * In the future this can be expanded on by supporting CAST and casting dates given as Strings to + * TIMESTAMP (SQL's date type). */ - @Override protected void init() throws Exception { loadIndex(Index.ONLINE); @@ -45,9 +44,7 @@ protected void init() throws Exception { @Test public void year() throws IOException { - SearchHit[] hits = query( - "SELECT YEAR(insert_time) as year" - ); + SearchHit[] hits = query("SELECT YEAR(insert_time) as year"); for (SearchHit hit : hits) { int year = (int) getField(hit, "year"); DateTime insertTime = getDateFromSource(hit, "insert_time"); @@ -57,9 +54,7 @@ public void year() throws IOException { @Test public void monthOfYear() throws IOException { - SearchHit[] hits = query( - "SELECT MONTH_OF_YEAR(insert_time) as month_of_year" - ); + SearchHit[] hits = query("SELECT MONTH_OF_YEAR(insert_time) as month_of_year"); for (SearchHit hit : hits) { int monthOfYear = (int) getField(hit, "month_of_year"); DateTime insertTime = getDateFromSource(hit, "insert_time"); @@ -69,9 +64,7 @@ public void monthOfYear() throws IOException { @Test public void weekOfYearInSelect() throws IOException { - SearchHit[] hits = query( - "SELECT WEEK_OF_YEAR(insert_time) as week_of_year" - ); + SearchHit[] hits = query("SELECT WEEK_OF_YEAR(insert_time) as week_of_year"); for (SearchHit hit : hits) { int weekOfYear = (int) getField(hit, "week_of_year"); DateTime insertTime = getDateFromSource(hit, "insert_time"); @@ -81,12 +74,12 @@ public void weekOfYearInSelect() throws IOException { @Test public void weekOfYearInWhere() throws IOException { - SearchHit[] hits = query( - "SELECT insert_time", - "WHERE DATE_FORMAT(insert_time, 'YYYY-MM-dd') < '2014-08-19' AND " + - "WEEK_OF_YEAR(insert_time) > 33", - "LIMIT 2000" - ); + SearchHit[] hits = + query( + "SELECT insert_time", + "WHERE DATE_FORMAT(insert_time, 'YYYY-MM-dd') < '2014-08-19' AND " + + "WEEK_OF_YEAR(insert_time) > 33", + "LIMIT 2000"); for (SearchHit hit : hits) { DateTime insertTime = getDateFromSource(hit, "insert_time"); assertThat(insertTime.weekOfWeekyear().get(), greaterThan(33)); @@ -95,9 +88,7 @@ public void weekOfYearInWhere() throws IOException { @Test public void dayOfYearInSelect() throws IOException { - SearchHit[] hits = query( - "SELECT DAY_OF_YEAR(insert_time) as day_of_year", "LIMIT 2000" - ); + SearchHit[] hits = query("SELECT DAY_OF_YEAR(insert_time) as day_of_year", "LIMIT 2000"); for (SearchHit hit : hits) { int dayOfYear = (int) getField(hit, "day_of_year"); DateTime insertTime = getDateFromSource(hit, "insert_time"); @@ -107,9 +98,8 @@ public void dayOfYearInSelect() throws IOException { @Test public void dayOfYearInWhere() throws IOException { - SearchHit[] hits = query( - "SELECT insert_time", "WHERE DAY_OF_YEAR(insert_time) < 233", "LIMIT 10000" - ); + SearchHit[] hits = + query("SELECT insert_time", "WHERE DAY_OF_YEAR(insert_time) < 233", "LIMIT 10000"); for (SearchHit hit : hits) { DateTime insertTime = getDateFromSource(hit, "insert_time"); assertThat(insertTime.dayOfYear().get(), lessThan(233)); @@ -118,9 +108,7 @@ public void dayOfYearInWhere() throws IOException { @Test public void dayOfMonthInSelect() throws IOException { - SearchHit[] hits = query( - "SELECT DAY_OF_MONTH(insert_time) as day_of_month", "LIMIT 2000" - ); + SearchHit[] hits = query("SELECT DAY_OF_MONTH(insert_time) as day_of_month", "LIMIT 2000"); for (SearchHit hit : hits) { int dayOfMonth = (int) getField(hit, "day_of_month"); DateTime insertTime = getDateFromSource(hit, "insert_time"); @@ -130,9 +118,8 @@ public void dayOfMonthInSelect() throws IOException { @Test public void dayOfMonthInWhere() throws IOException { - SearchHit[] hits = query( - "SELECT insert_time", "WHERE DAY_OF_MONTH(insert_time) < 21", "LIMIT 10000" - ); + SearchHit[] hits = + query("SELECT insert_time", "WHERE DAY_OF_MONTH(insert_time) < 21", "LIMIT 10000"); for (SearchHit hit : hits) { DateTime insertTime = getDateFromSource(hit, "insert_time"); assertThat(insertTime.dayOfMonth().get(), lessThan(21)); @@ -141,9 +128,7 @@ public void dayOfMonthInWhere() throws IOException { @Test public void dayOfWeek() throws IOException { - SearchHit[] hits = query( - "SELECT DAY_OF_WEEK(insert_time) as day_of_week", "LIMIT 2000" - ); + SearchHit[] hits = query("SELECT DAY_OF_WEEK(insert_time) as day_of_week", "LIMIT 2000"); for (SearchHit hit : hits) { int dayOfWeek = (int) getField(hit, "day_of_week"); DateTime insertTime = getDateFromSource(hit, "insert_time"); @@ -153,9 +138,7 @@ public void dayOfWeek() throws IOException { @Test public void hourOfDay() throws IOException { - SearchHit[] hits = query( - "SELECT HOUR_OF_DAY(insert_time) as hour_of_day", "LIMIT 1000" - ); + SearchHit[] hits = query("SELECT HOUR_OF_DAY(insert_time) as hour_of_day", "LIMIT 1000"); for (SearchHit hit : hits) { int hourOfDay = (int) getField(hit, "hour_of_day"); DateTime insertTime = getDateFromSource(hit, "insert_time"); @@ -165,9 +148,7 @@ public void hourOfDay() throws IOException { @Test public void minuteOfDay() throws IOException { - SearchHit[] hits = query( - "SELECT MINUTE_OF_DAY(insert_time) as minute_of_day", "LIMIT 500" - ); + SearchHit[] hits = query("SELECT MINUTE_OF_DAY(insert_time) as minute_of_day", "LIMIT 500"); for (SearchHit hit : hits) { int minuteOfDay = (int) getField(hit, "minute_of_day"); DateTime insertTime = getDateFromSource(hit, "insert_time"); @@ -177,9 +158,7 @@ public void minuteOfDay() throws IOException { @Test public void minuteOfHour() throws IOException { - SearchHit[] hits = query( - "SELECT MINUTE_OF_HOUR(insert_time) as minute_of_hour", "LIMIT 500" - ); + SearchHit[] hits = query("SELECT MINUTE_OF_HOUR(insert_time) as minute_of_hour", "LIMIT 500"); for (SearchHit hit : hits) { int minuteOfHour = (int) getField(hit, "minute_of_hour"); DateTime insertTime = getDateFromSource(hit, "insert_time"); @@ -189,9 +168,8 @@ public void minuteOfHour() throws IOException { @Test public void secondOfMinute() throws IOException { - SearchHit[] hits = query( - "SELECT SECOND_OF_MINUTE(insert_time) as second_of_minute", "LIMIT 500" - ); + SearchHit[] hits = + query("SELECT SECOND_OF_MINUTE(insert_time) as second_of_minute", "LIMIT 500"); for (SearchHit hit : hits) { int secondOfMinute = (int) getField(hit, "second_of_minute"); DateTime insertTime = getDateFromSource(hit, "insert_time"); @@ -201,9 +179,7 @@ public void secondOfMinute() throws IOException { @Test public void month() throws IOException { - SearchHit[] hits = query( - "SELECT MONTH(insert_time) AS month", "LIMIT 500" - ); + SearchHit[] hits = query("SELECT MONTH(insert_time) AS month", "LIMIT 500"); for (SearchHit hit : hits) { int month = (int) getField(hit, "month"); DateTime dateTime = getDateFromSource(hit, "insert_time"); @@ -213,9 +189,7 @@ public void month() throws IOException { @Test public void dayofmonth() throws IOException { - SearchHit[] hits = query( - "SELECT DAYOFMONTH(insert_time) AS dayofmonth", "LIMIT 500" - ); + SearchHit[] hits = query("SELECT DAYOFMONTH(insert_time) AS dayofmonth", "LIMIT 500"); for (SearchHit hit : hits) { int dayofmonth = (int) getField(hit, "dayofmonth"); DateTime dateTime = getDateFromSource(hit, "insert_time"); @@ -225,9 +199,7 @@ public void dayofmonth() throws IOException { @Test public void date() throws IOException { - SearchHit[] hits = query( - "SELECT DATE(insert_time) AS date", "LIMIT 500" - ); + SearchHit[] hits = query("SELECT DATE(insert_time) AS date", "LIMIT 500"); for (SearchHit hit : hits) { String date = (String) getField(hit, "date"); DateTime dateTime = getDateFromSource(hit, "insert_time"); @@ -237,9 +209,7 @@ public void date() throws IOException { @Test public void monthname() throws IOException { - SearchHit[] hits = query( - "SELECT MONTHNAME(insert_time) AS monthname", "LIMIT 500" - ); + SearchHit[] hits = query("SELECT MONTHNAME(insert_time) AS monthname", "LIMIT 500"); for (SearchHit hit : hits) { String monthname = (String) getField(hit, "monthname"); DateTime dateTime = getDateFromSource(hit, "insert_time"); @@ -249,9 +219,7 @@ public void monthname() throws IOException { @Test public void timestamp() throws IOException { - SearchHit[] hits = query( - "SELECT TIMESTAMP(insert_time) AS timestamp", "LIMIT 500" - ); + SearchHit[] hits = query("SELECT TIMESTAMP(insert_time) AS timestamp", "LIMIT 500"); for (SearchHit hit : hits) { String timastamp = (String) getField(hit, "timestamp"); DateTime dateTime = getDateFromSource(hit, "insert_time"); @@ -284,14 +252,16 @@ private SearchHit[] query(String select, String... statements) throws IOExceptio return execute(select + " " + FROM + " " + String.join(" ", statements)); } - // TODO: I think this code is now re-used in multiple classes, would be good to move to the base class. + // TODO: I think this code is now re-used in multiple classes, would be good to move to the base + // class. private SearchHit[] execute(String sqlRequest) throws IOException { final JSONObject jsonObject = executeRequest(makeRequest(sqlRequest)); - final XContentParser parser = new JsonXContentParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - new JsonFactory().createParser(jsonObject.toString())); + final XContentParser parser = + new JsonXContentParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + new JsonFactory().createParser(jsonObject.toString())); return SearchResponse.fromXContent(parser).getHits().getHits(); } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/DeleteIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/DeleteIT.java index 4fad5a23b7..24895b5b69 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/DeleteIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/DeleteIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.core.IsEqual.equalTo; @@ -20,8 +19,8 @@ public class DeleteIT extends SQLIntegTestCase { protected void init() throws Exception { loadIndex(Index.ACCOUNT); loadIndex(Index.PHRASE); - updateClusterSettings(new ClusterSetting(PERSISTENT, - Settings.Key.SQL_DELETE_ENABLED.getKeyValue(), "true")); + updateClusterSettings( + new ClusterSetting(PERSISTENT, Settings.Key.SQL_DELETE_ENABLED.getKeyValue(), "true")); } @Test @@ -34,7 +33,8 @@ public void deleteAllTest() throws IOException, InterruptedException { response = executeRequest(makeRequest(deleteQuery)); assertThat(response.getInt("deleted"), equalTo(totalHits)); - // The documents are not deleted immediately, causing the next search call to return all results. + // The documents are not deleted immediately, causing the next search call to return all + // results. // To prevent flakiness, the minimum value of 2000 msec works fine. Thread.sleep(2000); @@ -44,20 +44,21 @@ public void deleteAllTest() throws IOException, InterruptedException { @Test public void deleteWithConditionTest() throws IOException, InterruptedException { - String selectQuery = StringUtils.format( - "SELECT * FROM %s WHERE match_phrase(phrase, 'quick fox here')", - TestsConstants.TEST_INDEX_PHRASE - ); + String selectQuery = + StringUtils.format( + "SELECT * FROM %s WHERE match_phrase(phrase, 'quick fox here')", + TestsConstants.TEST_INDEX_PHRASE); JSONObject response = executeRequest(makeRequest(selectQuery)); int totalHits = getTotalHits(response); - String deleteQuery = StringUtils.format( - "DELETE FROM %s WHERE match_phrase(phrase, 'quick fox here')", - TestsConstants.TEST_INDEX_PHRASE - ); + String deleteQuery = + StringUtils.format( + "DELETE FROM %s WHERE match_phrase(phrase, 'quick fox here')", + TestsConstants.TEST_INDEX_PHRASE); response = executeRequest(makeRequest(deleteQuery)); assertThat(response.getInt("deleted"), equalTo(totalHits)); - // The documents are not deleted immediately, causing the next search call to return all results. + // The documents are not deleted immediately, causing the next search call to return all + // results. // To prevent flakiness, the minimum value of 2000 msec works fine. Thread.sleep(2000); @@ -84,7 +85,8 @@ public void deleteAllWithJdbcFormat() throws IOException, InterruptedException { assertThat(response.query("/status"), equalTo(200)); assertThat(response.query("/size"), equalTo(1)); - // The documents are not deleted immediately, causing the next search call to return all results. + // The documents are not deleted immediately, causing the next search call to return all + // results. // To prevent flakiness, the minimum value of 2000 msec works fine. Thread.sleep(2000); @@ -98,18 +100,18 @@ public void deleteAllWithJdbcFormat() throws IOException, InterruptedException { @Test public void deleteWithConditionTestJdbcFormat() throws IOException, InterruptedException { - String selectQuery = StringUtils.format( - "SELECT * FROM %s WHERE match_phrase(phrase, 'quick fox here')", - TestsConstants.TEST_INDEX_PHRASE - ); + String selectQuery = + StringUtils.format( + "SELECT * FROM %s WHERE match_phrase(phrase, 'quick fox here')", + TestsConstants.TEST_INDEX_PHRASE); JSONObject response = executeRequest(makeRequest(selectQuery)); int totalHits = getTotalHits(response); - String deleteQuery = StringUtils.format( - "DELETE FROM %s WHERE match_phrase(phrase, 'quick fox here')", - TestsConstants.TEST_INDEX_PHRASE - ); + String deleteQuery = + StringUtils.format( + "DELETE FROM %s WHERE match_phrase(phrase, 'quick fox here')", + TestsConstants.TEST_INDEX_PHRASE); response = new JSONObject(executeQuery(deleteQuery, "jdbc")); System.out.println(response); @@ -120,7 +122,8 @@ public void deleteWithConditionTestJdbcFormat() throws IOException, InterruptedE assertThat(response.query("/status"), equalTo(200)); assertThat(response.query("/size"), equalTo(1)); - // The documents are not deleted immediately, causing the next search call to return all results. + // The documents are not deleted immediately, causing the next search call to return all + // results. // To prevent flakiness, the minimum value of 2000 msec works fine. Thread.sleep(2000); diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/ExplainIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/ExplainIT.java index 4ecabdbf01..b42e9f84f4 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/ExplainIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/ExplainIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.containsString; @@ -40,16 +39,20 @@ protected void init() throws Exception { @Test public void searchSanity() throws IOException { - String expectedOutputFilePath = TestUtils.getResourceFilePath( - "src/test/resources/expectedOutput/search_explain.json"); - String expectedOutput = Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) - .replaceAll("\r", ""); + String expectedOutputFilePath = + TestUtils.getResourceFilePath("src/test/resources/expectedOutput/search_explain.json"); + String expectedOutput = + Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) + .replaceAll("\r", ""); String result = - explainQuery(String.format("SELECT * FROM %s WHERE firstname LIKE 'A%%' AND age > 20 " + - "GROUP BY gender order by _score", TEST_INDEX_ACCOUNT)); - Assert - .assertThat(result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); + explainQuery( + String.format( + "SELECT * FROM %s WHERE firstname LIKE 'A%%' AND age > 20 " + + "GROUP BY gender order by _score", + TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); } // This test was ignored because group by case function is not supported @@ -57,118 +60,153 @@ public void searchSanity() throws IOException { @Test public void aggregationQuery() throws IOException { - String expectedOutputFilePath = TestUtils.getResourceFilePath( - "src/test/resources/expectedOutput/aggregation_query_explain.json"); - String expectedOutput = Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) - .replaceAll("\r", ""); - - String result = explainQuery( - String.format("SELECT address, CASE WHEN gender='0' then 'aaa' else 'bbb'end a2345," + - "count(age) FROM %s GROUP BY terms('field'='address','execution_hint'='global_ordinals'),a2345", - TEST_INDEX_ACCOUNT)); - Assert - .assertThat(result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); + String expectedOutputFilePath = + TestUtils.getResourceFilePath( + "src/test/resources/expectedOutput/aggregation_query_explain.json"); + String expectedOutput = + Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) + .replaceAll("\r", ""); + + String result = + explainQuery( + String.format( + "SELECT address, CASE WHEN gender='0' then 'aaa' else 'bbb'end a2345,count(age)" + + " FROM %s GROUP BY" + + " terms('field'='address','execution_hint'='global_ordinals'),a2345", + TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); } @Test public void explainScriptValue() throws IOException { - String expectedOutputFilePath = TestUtils.getResourceFilePath( - "src/test/resources/expectedOutput/script_value.json"); - String expectedOutput = Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) - .replaceAll("\r", ""); + String expectedOutputFilePath = + TestUtils.getResourceFilePath("src/test/resources/expectedOutput/script_value.json"); + String expectedOutput = + Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) + .replaceAll("\r", ""); - String result = explainQuery(String.format("SELECT case when gender is null then 'aaa' " + - "else gender end test , account_number FROM %s", TEST_INDEX_ACCOUNT)); - Assert - .assertThat(result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); + String result = + explainQuery( + String.format( + "SELECT case when gender is null then 'aaa' " + + "else gender end test , account_number FROM %s", + TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); } @Test public void betweenScriptValue() throws IOException { - String expectedOutputFilePath = TestUtils.getResourceFilePath( - "src/test/resources/expectedOutput/between_query.json"); - String expectedOutput = Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) - .replaceAll("\r", ""); + String expectedOutputFilePath = + TestUtils.getResourceFilePath("src/test/resources/expectedOutput/between_query.json"); + String expectedOutput = + Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) + .replaceAll("\r", ""); String result = - explainQuery(String.format("SELECT case when balance between 100 and 200 then 'aaa' " + - "else balance end test, account_number FROM %s", TEST_INDEX_ACCOUNT)); - Assert - .assertThat(result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); + explainQuery( + String.format( + "SELECT case when balance between 100 and 200 then 'aaa' " + + "else balance end test, account_number FROM %s", + TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); } @Test public void searchSanityFilter() throws IOException { - String expectedOutputFilePath = TestUtils.getResourceFilePath( - "src/test/resources/expectedOutput/search_explain_filter.json"); - String expectedOutput = Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) - .replaceAll("\r", ""); + String expectedOutputFilePath = + TestUtils.getResourceFilePath( + "src/test/resources/expectedOutput/search_explain_filter.json"); + String expectedOutput = + Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) + .replaceAll("\r", ""); - String result = explainQuery(String.format("SELECT * FROM %s WHERE firstname LIKE 'A%%' " + - "AND age > 20 GROUP BY gender", TEST_INDEX_ACCOUNT)); - Assert - .assertThat(result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); + String result = + explainQuery( + String.format( + "SELECT * FROM %s WHERE firstname LIKE 'A%%' " + "AND age > 20 GROUP BY gender", + TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); } @Test public void deleteSanity() throws IOException { - String expectedOutputFilePath = TestUtils.getResourceFilePath( - "src/test/resources/expectedOutput/delete_explain.json"); - String expectedOutput = Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) - .replaceAll("\r", ""); + String expectedOutputFilePath = + TestUtils.getResourceFilePath("src/test/resources/expectedOutput/delete_explain.json"); + String expectedOutput = + Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) + .replaceAll("\r", ""); ; String result = - explainQuery(String.format("DELETE FROM %s WHERE firstname LIKE 'A%%' AND age > 20", - TEST_INDEX_ACCOUNT)); - Assert - .assertThat(result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); + explainQuery( + String.format( + "DELETE FROM %s WHERE firstname LIKE 'A%%' AND age > 20", TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); } @Test public void spatialFilterExplainTest() throws IOException { - String expectedOutputFilePath = TestUtils.getResourceFilePath( - "src/test/resources/expectedOutput/search_spatial_explain.json"); - String expectedOutput = Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) - .replaceAll("\r", ""); + String expectedOutputFilePath = + TestUtils.getResourceFilePath( + "src/test/resources/expectedOutput/search_spatial_explain.json"); + String expectedOutput = + Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) + .replaceAll("\r", ""); ; - String result = explainQuery(String.format("SELECT * FROM %s WHERE GEO_INTERSECTS" + - "(place,'POLYGON ((102 2, 103 2, 103 3, 102 3, 102 2))')", TEST_INDEX_LOCATION)); - Assert - .assertThat(result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); + String result = + explainQuery( + String.format( + "SELECT * FROM %s WHERE GEO_INTERSECTS" + + "(place,'POLYGON ((102 2, 103 2, 103 3, 102 3, 102 2))')", + TEST_INDEX_LOCATION)); + Assert.assertThat( + result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); } @Test public void orderByOnNestedFieldTest() throws Exception { String result = - explainQuery(String.format("SELECT * FROM %s ORDER BY NESTED('message.info','message')", - TEST_INDEX_NESTED_TYPE)); - Assert.assertThat(result.replaceAll("\\s+", ""), - equalTo("{\"from\":0,\"size\":200,\"sort\":[{\"message.info\":" + - "{\"order\":\"asc\",\"nested\":{\"path\":\"message\"}}}]}")); + explainQuery( + String.format( + "SELECT * FROM %s ORDER BY NESTED('message.info','message')", + TEST_INDEX_NESTED_TYPE)); + Assert.assertThat( + result.replaceAll("\\s+", ""), + equalTo( + "{\"from\":0,\"size\":200,\"sort\":[{\"message.info\":" + + "{\"order\":\"asc\",\"nested\":{\"path\":\"message\"}}}]}")); } @Test public void multiMatchQuery() throws IOException { - String expectedOutputFilePath = TestUtils.getResourceFilePath( - "src/test/resources/expectedOutput/multi_match_query.json"); - String expectedOutput = Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) - .replaceAll("\r", ""); + String expectedOutputFilePath = + TestUtils.getResourceFilePath("src/test/resources/expectedOutput/multi_match_query.json"); + String expectedOutput = + Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) + .replaceAll("\r", ""); String result = - explainQuery(String.format("SELECT * FROM %s WHERE multimatch('query'='this is a test'," + - "'fields'='subject^3,message','analyzer'='standard','type'='best_fields','boost'=1.0," + - "'slop'=0,'tie_breaker'=0.3,'operator'='and')", TEST_INDEX_ACCOUNT)); - Assert - .assertThat(result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); + explainQuery( + String.format( + "SELECT * FROM %s WHERE multimatch('query'='this is a test'," + + "'fields'='subject^3,message','analyzer'='standard','type'='best_fields','boost'=1.0," + + "'slop'=0,'tie_breaker'=0.3,'operator'='and')", + TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); } @Test @@ -180,36 +218,49 @@ public void termsIncludeExcludeExplainTest() throws IOException { final String expected3 = "\"include\":{\"partition\":0,\"num_partitions\":20}"; String result = - explainQuery(queryPrefix + " terms('field'='correspond_brand_name','size'='10'," + - "'alias'='correspond_brand_name','include'='\\\".*sport.*\\\"','exclude'='\\\"water_.*\\\"')"); + explainQuery( + queryPrefix + + " terms('field'='correspond_brand_name','size'='10'," + + "'alias'='correspond_brand_name','include'='\\\".*sport.*\\\"','exclude'='\\\"water_.*\\\"')"); Assert.assertThat(result, containsString(expected1)); - result = explainQuery(queryPrefix + "terms('field'='correspond_brand_name','size'='10'," + - "'alias'='correspond_brand_name','include'='[\\\"mazda\\\", \\\"honda\\\"]'," + - "'exclude'='[\\\"rover\\\", \\\"jensen\\\"]')"); + result = + explainQuery( + queryPrefix + + "terms('field'='correspond_brand_name','size'='10'," + + "'alias'='correspond_brand_name','include'='[\\\"mazda\\\", \\\"honda\\\"]'," + + "'exclude'='[\\\"rover\\\", \\\"jensen\\\"]')"); Assert.assertThat(result, containsString(expected2)); - result = explainQuery(queryPrefix + "terms('field'='correspond_brand_name','size'='10'," + - "'alias'='correspond_brand_name','include'='{\\\"partition\\\":0,\\\"num_partitions\\\":20}')"); + result = + explainQuery( + queryPrefix + + "terms('field'='correspond_brand_name','size'='10'," + + "'alias'='correspond_brand_name','include'='{\\\"partition\\\":0,\\\"num_partitions\\\":20}')"); Assert.assertThat(result, containsString(expected3)); } @Test public void explainNLJoin() throws IOException { - String expectedOutputFilePath = TestUtils.getResourceFilePath( - "src/test/resources/expectedOutput/nested_loop_join_explain.json"); - String expectedOutput = Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) - .replaceAll("\r", ""); - - String query = "SELECT /*! USE_NL*/ a.firstname ,a.lastname , a.gender ,d.dog_name FROM " + - TEST_INDEX_PEOPLE + "/people a JOIN " + TEST_INDEX_DOG + - "/dog d on d.holdersName = a.firstname" + - " WHERE (a.age > 10 OR a.balance > 2000) AND d.age > 1"; + String expectedOutputFilePath = + TestUtils.getResourceFilePath( + "src/test/resources/expectedOutput/nested_loop_join_explain.json"); + String expectedOutput = + Files.toString(new File(expectedOutputFilePath), StandardCharsets.UTF_8) + .replaceAll("\r", ""); + + String query = + "SELECT /*! USE_NL*/ a.firstname ,a.lastname , a.gender ,d.dog_name FROM " + + TEST_INDEX_PEOPLE + + "/people a JOIN " + + TEST_INDEX_DOG + + "/dog d on d.holdersName = a.firstname" + + " WHERE (a.age > 10 OR a.balance > 2000) AND d.age > 1"; String result = explainQuery(query); - Assert - .assertThat(result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); + Assert.assertThat( + result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); } public void testContentTypeOfExplainRequestShouldBeJson() throws IOException { diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/GetEndpointQueryIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/GetEndpointQueryIT.java index e23753bbd2..81edb54556 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/GetEndpointQueryIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/GetEndpointQueryIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT; @@ -14,13 +13,10 @@ import org.junit.rules.ExpectedException; import org.opensearch.client.ResponseException; -/** - * Tests to cover requests with "?format=csv" parameter - */ +/** Tests to cover requests with "?format=csv" parameter */ public class GetEndpointQueryIT extends SQLIntegTestCase { - @Rule - public ExpectedException rule = ExpectedException.none(); + @Rule public ExpectedException rule = ExpectedException.none(); @Override protected void init() throws Exception { diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/HashJoinIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/HashJoinIT.java index 9cd497e675..02c55d8eb8 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/HashJoinIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/HashJoinIT.java @@ -3,16 +3,13 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.equalTo; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_GAME_OF_THRONES; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_UNEXPANDED_OBJECT; -import static org.opensearch.sql.util.MatcherUtils.columnName; import static org.opensearch.sql.util.MatcherUtils.rows; -import static org.opensearch.sql.util.MatcherUtils.verifyColumn; import static org.opensearch.sql.util.MatcherUtils.verifyDataRows; import java.io.IOException; @@ -24,35 +21,23 @@ import org.junit.Assert; import org.junit.Test; -/** - * Test new hash join algorithm by comparison with old implementation. - */ +/** Test new hash join algorithm by comparison with old implementation. */ public class HashJoinIT extends SQLIntegTestCase { - /** - * Hint to use old join algorithm - */ + /** Hint to use old join algorithm */ private static final String USE_OLD_JOIN_ALGORITHM = "/*! USE_NL*/"; - /** - * Set limit to 100% to bypass circuit break check - */ + /** Set limit to 100% to bypass circuit break check */ private static final String BYPASS_CIRCUIT_BREAK = "/*! JOIN_CIRCUIT_BREAK_LIMIT(100)*/"; - /** - * Enable term filter optimization - */ + /** Enable term filter optimization */ private static final String ENABLE_TERMS_FILTER = "/*! HASH_WITH_TERMS_FILTER*/"; - /** - * Default page size is greater than block size - */ + /** Default page size is greater than block size */ private static final String PAGE_SIZE_GREATER_THAN_BLOCK_SIZE = "/*! JOIN_ALGORITHM_BLOCK_SIZE(5)*/"; - /** - * Page size is smaller than block size - */ + /** Page size is smaller than block size */ private static final String PAGE_SIZE_LESS_THAN_BLOCK_SIZE = "/*! JOIN_ALGORITHM_BLOCK_SIZE(5)*/ /*! JOIN_SCROLL_PAGE_SIZE(2)*/"; @@ -77,14 +62,16 @@ public void leftJoin() throws IOException { @Test public void innerJoinUnexpandedObjectField() { - String query = String.format(Locale.ROOT, - "SELECT " + - "a.id.serial, b.id.serial " + - "FROM %1$s AS a " + - "JOIN %1$s AS b " + - "ON a.id.serial = b.attributes.hardware.correlate_id " + - "WHERE b.attributes.hardware.platform = 'Linux' ", - TEST_INDEX_UNEXPANDED_OBJECT); + String query = + String.format( + Locale.ROOT, + "SELECT " + + "a.id.serial, b.id.serial " + + "FROM %1$s AS a " + + "JOIN %1$s AS b " + + "ON a.id.serial = b.attributes.hardware.correlate_id " + + "WHERE b.attributes.hardware.platform = 'Linux' ", + TEST_INDEX_UNEXPANDED_OBJECT); JSONObject response = executeJdbcRequest(query); verifyDataRows(response, rows(3, 1), rows(3, 3)); @@ -137,8 +124,8 @@ private void testJoin(final String join) throws IOException { // TODO: reduce the balance threshold to 10000 when the memory circuit breaker issue // (https://github.com/opendistro-for-elasticsearch/sql/issues/73) is fixed. final String querySuffixTemplate = - "a.firstname, a.lastname, b.city, b.state FROM %1$s a %2$s %1$s b " + - "ON b.age = a.age WHERE a.balance > 45000 AND b.age > 25 LIMIT 1000000"; + "a.firstname, a.lastname, b.city, b.state FROM %1$s a %2$s %1$s b " + + "ON b.age = a.age WHERE a.balance > 45000 AND b.age > 25 LIMIT 1000000"; final String querySuffix = String.format(Locale.ROOT, querySuffixTemplate, TEST_INDEX_ACCOUNT, join); @@ -154,10 +141,11 @@ private void testJoinWithObjectField(final String join, final String hint) throw // TODO: reduce the balance threshold to 10000 when the memory circuit breaker issue // (https://github.com/opendistro-for-elasticsearch/sql/issues/73) is fixed. - final String querySuffixTemplate = "c.name.firstname, c.name.lastname, f.hname, f.seat " + - "FROM %1$s c %2$s %1$s f ON f.gender.keyword = c.gender.keyword " + - "AND f.house.keyword = c.house.keyword " + - "WHERE c.gender = 'M' LIMIT 1000000"; + final String querySuffixTemplate = + "c.name.firstname, c.name.lastname, f.hname, f.seat " + + "FROM %1$s c %2$s %1$s f ON f.gender.keyword = c.gender.keyword " + + "AND f.house.keyword = c.house.keyword " + + "WHERE c.gender = 'M' LIMIT 1000000"; final String querySuffix = String.format(Locale.ROOT, querySuffixTemplate, TEST_INDEX_GAME_OF_THRONES, join); @@ -182,14 +170,16 @@ private void executeAndCompareOldAndNewJoins(final String oldQuery, final String Set idsOld = new HashSet<>(); - hitsOld.forEach(hitObj -> { - JSONObject hit = (JSONObject) hitObj; - idsOld.add(hit.getString("_id")); - }); - - hitsNew.forEach(hitObj -> { - JSONObject hit = (JSONObject) hitObj; - Assert.assertTrue(idsOld.contains(hit.getString("_id"))); - }); + hitsOld.forEach( + hitObj -> { + JSONObject hit = (JSONObject) hitObj; + idsOld.add(hit.getString("_id")); + }); + + hitsNew.forEach( + hitObj -> { + JSONObject hit = (JSONObject) hitObj; + Assert.assertTrue(idsOld.contains(hit.getString("_id"))); + }); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/HavingIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/HavingIT.java index 34e6af02b4..3bd2195a89 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/HavingIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/HavingIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.arrayContaining; @@ -26,15 +25,19 @@ public class HavingIT extends SQLIntegTestCase { private static final String SELECT_FROM_WHERE_GROUP_BY = - "SELECT state, COUNT(*) cnt " + - "FROM " + TestsConstants.TEST_INDEX_ACCOUNT + " " + - "WHERE age = 30 " + - "GROUP BY state "; - - private static final Set> states1 = rowSet(1, Arrays.asList( - "AK", "AR", "CT", "DE", "HI", "IA", "IL", "IN", "LA", "MA", "MD", "MN", - "MO", "MT", "NC", "ND", "NE", "NH", "NJ", "NV", "SD", "VT", "WV", "WY" - )); + "SELECT state, COUNT(*) cnt " + + "FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " " + + "WHERE age = 30 " + + "GROUP BY state "; + + private static final Set> states1 = + rowSet( + 1, + Arrays.asList( + "AK", "AR", "CT", "DE", "HI", "IA", "IL", "IN", "LA", "MA", "MD", "MN", "MO", "MT", + "NC", "ND", "NE", "NH", "NJ", "NV", "SD", "VT", "WV", "WY")); private static final Set> states2 = rowSet(2, Arrays.asList("AZ", "DC", "KS", "ME")); private static final Set> states3 = @@ -47,118 +50,67 @@ protected void init() throws Exception { @Test public void equalsTo() throws IOException { - assertThat( - query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt = 2"), - resultSet( - states2 - ) - ); + assertThat(query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt = 2"), resultSet(states2)); } @Test public void lessThanOrEqual() throws IOException { - assertThat( - query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt <= 2"), - resultSet( - states1, - states2 - ) - ); + assertThat(query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt <= 2"), resultSet(states1, states2)); } @Test public void notEqualsTo() throws IOException { - assertThat( - query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt <> 2"), - resultSet( - states1, - states3 - ) - ); + assertThat(query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt <> 2"), resultSet(states1, states3)); } @Test public void between() throws IOException { assertThat( query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt BETWEEN 1 AND 2"), - resultSet( - states1, - states2 - ) - ); + resultSet(states1, states2)); } @Test public void notBetween() throws IOException { assertThat( - query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt NOT BETWEEN 1 AND 2"), - resultSet( - states3 - ) - ); + query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt NOT BETWEEN 1 AND 2"), resultSet(states3)); } @Test public void in() throws IOException { assertThat( - query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt IN (2, 3)"), - resultSet( - states2, - states3 - ) - ); + query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt IN (2, 3)"), resultSet(states2, states3)); } @Test public void notIn() throws IOException { - assertThat( - query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt NOT IN (2, 3)"), - resultSet( - states1 - ) - ); + assertThat(query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt NOT IN (2, 3)"), resultSet(states1)); } @Test public void and() throws IOException { assertThat( query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt >= 1 AND cnt < 3"), - resultSet( - states1, - states2 - ) - ); + resultSet(states1, states2)); } @Test public void or() throws IOException { assertThat( query(SELECT_FROM_WHERE_GROUP_BY + "HAVING cnt = 1 OR cnt = 3"), - resultSet( - states1, - states3 - ) - ); + resultSet(states1, states3)); } @Test public void not() throws IOException { - assertThat( - query(SELECT_FROM_WHERE_GROUP_BY + "HAVING NOT cnt >= 2"), - resultSet( - states1 - ) - ); + assertThat(query(SELECT_FROM_WHERE_GROUP_BY + "HAVING NOT cnt >= 2"), resultSet(states1)); } @Test public void notAndOr() throws IOException { assertThat( query(SELECT_FROM_WHERE_GROUP_BY + "HAVING NOT (cnt > 0 AND cnt <= 2)"), - resultSet( - states3 - ) - ); + resultSet(states3)); } private Set query(String query) throws IOException { @@ -174,10 +126,8 @@ private Set getResult(JSONObject response, String aggName, String aggF Set result = new HashSet<>(); for (int i = 0; i < buckets.length(); i++) { JSONObject bucket = buckets.getJSONObject(i); - result.add(new Object[] { - bucket.get("key"), - ((JSONObject) bucket.get(aggFunc)).getLong("value") - }); + result.add( + new Object[] {bucket.get("key"), ((JSONObject) bucket.get(aggFunc)).getLong("value")}); } return result; @@ -185,15 +135,12 @@ private Set getResult(JSONObject response, String aggName, String aggF @SafeVarargs private final Matcher> resultSet(Set>... rowSets) { - return containsInAnyOrder(Arrays.stream(rowSets) - .flatMap(Collection::stream) - .collect(Collectors.toList())); + return containsInAnyOrder( + Arrays.stream(rowSets).flatMap(Collection::stream).collect(Collectors.toList())); } private static Set> rowSet(long count, List states) { - return states.stream() - .map(state -> row(state, count)) - .collect(Collectors.toSet()); + return states.stream().map(state -> row(state, count)).collect(Collectors.toSet()); } private static Matcher row(String state, long count) { diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/JSONRequestIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/JSONRequestIT.java index dcc90a9acf..62508576e8 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/JSONRequestIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/JSONRequestIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.CoreMatchers.anyOf; @@ -35,11 +34,11 @@ protected void init() throws Exception { @Test public void search() throws IOException { int ageToCompare = 25; - SearchHits response = query(String.format("{\"query\":\"" + - "SELECT * " + - "FROM %s " + - "WHERE age > %s " + - "LIMIT 1000\"}", TestsConstants.TEST_INDEX_ACCOUNT, ageToCompare)); + SearchHits response = + query( + String.format( + "{\"query\":\"" + "SELECT * " + "FROM %s " + "WHERE age > %s " + "LIMIT 1000\"}", + TestsConstants.TEST_INDEX_ACCOUNT, ageToCompare)); SearchHit[] hits = response.getHits(); for (SearchHit hit : hits) { int age = (int) hit.getSourceAsMap().get("age"); @@ -63,11 +62,14 @@ public void searchWithFilterAndNoWhere() throws IOException { * } */ int ageToCompare = 25; - SearchHits response = query(String.format("{\"query\":\"" + - "SELECT * " + - "FROM %s " + - "LIMIT 1000\",\"filter\":{\"range\":{\"age\":{\"gt\":%s}}}}", - TestsConstants.TEST_INDEX_ACCOUNT, ageToCompare)); + SearchHits response = + query( + String.format( + "{\"query\":\"" + + "SELECT * " + + "FROM %s " + + "LIMIT 1000\",\"filter\":{\"range\":{\"age\":{\"gt\":%s}}}}", + TestsConstants.TEST_INDEX_ACCOUNT, ageToCompare)); SearchHit[] hits = response.getHits(); for (SearchHit hit : hits) { int age = (int) hit.getSourceAsMap().get("age"); @@ -92,12 +94,15 @@ public void searchWithRangeFilter() throws IOException { */ int ageToCompare = 25; int balanceToCompare = 35000; - SearchHits response = query(String.format("{\"query\":\"" + - "SELECT * " + - "FROM %s " + - "WHERE age > %s " + - "LIMIT 1000\",\"filter\":{\"range\":{\"balance\":{\"lt\":%s}}}}", - TestsConstants.TEST_INDEX_ACCOUNT, ageToCompare, balanceToCompare)); + SearchHits response = + query( + String.format( + "{\"query\":\"" + + "SELECT * " + + "FROM %s " + + "WHERE age > %s " + + "LIMIT 1000\",\"filter\":{\"range\":{\"balance\":{\"lt\":%s}}}}", + TestsConstants.TEST_INDEX_ACCOUNT, ageToCompare, balanceToCompare)); SearchHit[] hits = response.getHits(); for (SearchHit hit : hits) { int age = (int) hit.getSourceAsMap().get("age"); @@ -109,8 +114,8 @@ public void searchWithRangeFilter() throws IOException { @Test /** - * Using TEST_INDEX_NESTED_TYPE here since term filter does not work properly on analyzed fields like text. - * The field 'someField' in TEST_INDEX_NESTED_TYPE is of type keyword. + * Using TEST_INDEX_NESTED_TYPE here since term filter does not work properly on analyzed fields + * like text. The field 'someField' in TEST_INDEX_NESTED_TYPE is of type keyword. */ public void searchWithTermFilter() throws IOException { /* @@ -126,12 +131,15 @@ public void searchWithTermFilter() throws IOException { */ int likesToCompare = 3; String fieldToCompare = "a"; - SearchHits response = query(String.format("{\"query\":\"" + - "SELECT * " + - "FROM %s " + - "WHERE nested(comment.likes) < %s\"," + - "\"filter\":{\"term\":{\"someField\":\"%s\"}}}", - TestsConstants.TEST_INDEX_NESTED_TYPE, likesToCompare, fieldToCompare)); + SearchHits response = + query( + String.format( + "{\"query\":\"" + + "SELECT * " + + "FROM %s " + + "WHERE nested(comment.likes) < %s\"," + + "\"filter\":{\"term\":{\"someField\":\"%s\"}}}", + TestsConstants.TEST_INDEX_NESTED_TYPE, likesToCompare, fieldToCompare)); SearchHit[] hits = response.getHits(); for (SearchHit hit : hits) { int likes = (int) ((Map) hit.getSourceAsMap().get("comment")).get("likes"); @@ -165,13 +173,16 @@ public void searchWithNestedFilter() throws IOException { */ int likesToCompare = 1; String dataToCompare = "aa"; - SearchHits response = query(String.format("{\"query\":\"" + - "SELECT * " + - "FROM %s " + - "WHERE nested(comment.likes) > %s\"," + - "\"filter\":{\"nested\":{\"path\":\"comment\"," + - "\"query\":{\"bool\":{\"must\":{\"term\":{\"comment.data\":\"%s\"}}}}}}}", - TestsConstants.TEST_INDEX_NESTED_TYPE, likesToCompare, dataToCompare)); + SearchHits response = + query( + String.format( + "{\"query\":\"" + + "SELECT * " + + "FROM %s " + + "WHERE nested(comment.likes) > %s\"," + + "\"filter\":{\"nested\":{\"path\":\"comment\"," + + "\"query\":{\"bool\":{\"must\":{\"term\":{\"comment.data\":\"%s\"}}}}}}}", + TestsConstants.TEST_INDEX_NESTED_TYPE, likesToCompare, dataToCompare)); SearchHit[] hits = response.getHits(); for (SearchHit hit : hits) { int likes = (int) ((Map) hit.getSourceAsMap().get("comment")).get("likes"); @@ -184,10 +195,11 @@ public void searchWithNestedFilter() throws IOException { private SearchHits query(String request) throws IOException { final JSONObject jsonObject = executeRequest(request); - final XContentParser parser = new JsonXContentParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - new JsonFactory().createParser(jsonObject.toString())); + final XContentParser parser = + new JsonXContentParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + new JsonFactory().createParser(jsonObject.toString())); return SearchResponse.fromXContent(parser).getHits(); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/JdbcTestIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/JdbcTestIT.java index e3a0cbd89d..d86fad3600 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/JdbcTestIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/JdbcTestIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.containsString; @@ -27,9 +26,10 @@ protected void init() throws Exception { } public void testPercentilesQuery() { - JSONObject response = executeJdbcRequest( - "SELECT percentiles(age, 25.0, 50.0, 75.0, 99.9) age_percentiles " + - "FROM opensearch-sql_test_index_people"); + JSONObject response = + executeJdbcRequest( + "SELECT percentiles(age, 25.0, 50.0, 75.0, 99.9) age_percentiles " + + "FROM opensearch-sql_test_index_people"); assertThat(response.getJSONArray("datarows").length(), equalTo(1)); @@ -47,9 +47,10 @@ public void testSlowQuery() throws IOException { // set slow log threshold = 0s updateClusterSettings(new ClusterSetting(PERSISTENT, "plugins.sql.slowlog", "0")); - JSONObject response = executeJdbcRequest( - "SELECT percentiles(age, 25.0, 50.0, 75.0, 99.9) age_percentiles " + - "FROM opensearch-sql_test_index_people"); + JSONObject response = + executeJdbcRequest( + "SELECT percentiles(age, 25.0, 50.0, 75.0, 99.9) age_percentiles " + + "FROM opensearch-sql_test_index_people"); assertThat(response.getJSONArray("datarows").length(), equalTo(1)); JSONObject percentileRow = (JSONObject) response.query("/datarows/0/0"); @@ -61,42 +62,39 @@ public void testSlowQuery() throws IOException { wipeAllClusterSettings(); } - @Ignore("flaky test, trigger resource not enough exception. " - + "ORDER BY date_format(insert_time, 'dd-MM-YYYY') can't be pushed down ") + @Ignore( + "flaky test, trigger resource not enough exception. " + + "ORDER BY date_format(insert_time, 'dd-MM-YYYY') can't be pushed down ") public void testDateTimeInQuery() { - JSONObject response = executeJdbcRequest( - "SELECT date_format(insert_time, 'dd-MM-YYYY') " + - "FROM opensearch-sql_test_index_online " + - "ORDER BY date_format(insert_time, 'dd-MM-YYYY') " + - "LIMIT 1" - ); + JSONObject response = + executeJdbcRequest( + "SELECT date_format(insert_time, 'dd-MM-YYYY') " + + "FROM opensearch-sql_test_index_online " + + "ORDER BY date_format(insert_time, 'dd-MM-YYYY') " + + "LIMIT 1"); assertThat( - response.getJSONArray("datarows") - .getJSONArray(0) - .getString(0), - equalTo("17-08-2014")); + response.getJSONArray("datarows").getJSONArray(0).getString(0), equalTo("17-08-2014")); } - @Ignore("flaky test, trigger resource not enough exception. " - + "ORDER BY all_client/10 can't be pushed down ") + @Ignore( + "flaky test, trigger resource not enough exception. " + + "ORDER BY all_client/10 can't be pushed down ") public void testDivisionInQuery() { - JSONObject response = executeJdbcRequest( - "SELECT all_client/10 from opensearch-sql_test_index_online ORDER BY all_client/10 desc limit 1"); + JSONObject response = + executeJdbcRequest( + "SELECT all_client/10 from opensearch-sql_test_index_online ORDER BY all_client/10 desc" + + " limit 1"); - assertThat( - response.getJSONArray("datarows") - .getJSONArray(0) - .getDouble(0), - equalTo(16827.0)); + assertThat(response.getJSONArray("datarows").getJSONArray(0).getDouble(0), equalTo(16827.0)); } public void testGroupByInQuery() { - JSONObject response = executeJdbcRequest( - "SELECT date_format(insert_time, 'YYYY-MM-dd'), COUNT(*) " + - "FROM opensearch-sql_test_index_online " + - "GROUP BY date_format(insert_time, 'YYYY-MM-dd')" - ); + JSONObject response = + executeJdbcRequest( + "SELECT date_format(insert_time, 'YYYY-MM-dd'), COUNT(*) " + + "FROM opensearch-sql_test_index_online " + + "GROUP BY date_format(insert_time, 'YYYY-MM-dd')"); assertThat(response.getJSONArray("schema").length(), equalTo(2)); assertThat(response.getJSONArray("datarows").length(), equalTo(8)); @@ -105,28 +103,31 @@ public void testGroupByInQuery() { @Test public void numberOperatorNameCaseInsensitiveTest() { assertSchemaContains( - executeQuery("SELECT ABS(age) FROM opensearch-sql_test_index_account " + - "WHERE age IS NOT NULL ORDER BY age LIMIT 5", "jdbc"), - "ABS(age)" - ); + executeQuery( + "SELECT ABS(age) FROM opensearch-sql_test_index_account " + + "WHERE age IS NOT NULL ORDER BY age LIMIT 5", + "jdbc"), + "ABS(age)"); } @Test public void trigFunctionNameCaseInsensitiveTest() { assertSchemaContains( - executeQuery("SELECT Cos(age) FROM opensearch-sql_test_index_account " + - "WHERE age is NOT NULL ORDER BY age LIMIT 5", "jdbc"), - "Cos(age)" - ); + executeQuery( + "SELECT Cos(age) FROM opensearch-sql_test_index_account " + + "WHERE age is NOT NULL ORDER BY age LIMIT 5", + "jdbc"), + "Cos(age)"); } @Test public void stringOperatorNameCaseInsensitiveTest() { assertSchemaContains( - executeQuery("SELECT SubStrinG(lastname, 0, 2) FROM opensearch-sql_test_index_account " + - "ORDER BY age LIMIT 5", "jdbc"), - "SubStrinG(lastname, 0, 2)" - ); + executeQuery( + "SELECT SubStrinG(lastname, 0, 2) FROM opensearch-sql_test_index_account " + + "ORDER BY age LIMIT 5", + "jdbc"), + "SubStrinG(lastname, 0, 2)"); } @Ignore("DATE_FORMAT function signature changed in new engine") @@ -134,45 +135,52 @@ public void stringOperatorNameCaseInsensitiveTest() { public void dateFunctionNameCaseInsensitiveTest() { assertTrue( executeQuery( - "SELECT DATE_FORMAT(insert_time, 'yyyy-MM-dd', 'UTC') FROM opensearch-sql_test_index_online " + - "WHERE date_FORMAT(insert_time, 'yyyy-MM-dd', 'UTC') > '2014-01-01' " + - "GROUP BY DAte_format(insert_time, 'yyyy-MM-dd', 'UTC') " + - "ORDER BY date_forMAT(insert_time, 'yyyy-MM-dd', 'UTC')", "jdbc").equalsIgnoreCase( - executeQuery( - "SELECT date_format(insert_time, 'yyyy-MM-dd', 'UTC') FROM opensearch-sql_test_index_online " + - "WHERE date_format(insert_time, 'yyyy-MM-dd', 'UTC') > '2014-01-01' " + - "GROUP BY date_format(insert_time, 'yyyy-MM-dd', 'UTC') " + - "ORDER BY date_format(insert_time, 'yyyy-MM-dd', 'UTC')", "jdbc") - ) - ); + "SELECT DATE_FORMAT(insert_time, 'yyyy-MM-dd', 'UTC') FROM" + + " opensearch-sql_test_index_online WHERE date_FORMAT(insert_time," + + " 'yyyy-MM-dd', 'UTC') > '2014-01-01' GROUP BY DAte_format(insert_time," + + " 'yyyy-MM-dd', 'UTC') ORDER BY date_forMAT(insert_time, 'yyyy-MM-dd'," + + " 'UTC')", + "jdbc") + .equalsIgnoreCase( + executeQuery( + "SELECT date_format(insert_time, 'yyyy-MM-dd', 'UTC') FROM" + + " opensearch-sql_test_index_online WHERE date_format(insert_time," + + " 'yyyy-MM-dd', 'UTC') > '2014-01-01' GROUP BY date_format(insert_time," + + " 'yyyy-MM-dd', 'UTC') ORDER BY date_format(insert_time, 'yyyy-MM-dd'," + + " 'UTC')", + "jdbc"))); } @Test public void ipTypeShouldPassJdbcFormatter() { assertThat( - executeQuery("SELECT host AS hostIP FROM " + TestsConstants.TEST_INDEX_WEBLOG - + " ORDER BY hostIP", "jdbc"), - containsString("\"type\": \"ip\"") - ); + executeQuery( + "SELECT host AS hostIP FROM " + TestsConstants.TEST_INDEX_WEBLOG + " ORDER BY hostIP", + "jdbc"), + containsString("\"type\": \"ip\"")); } @Test public void functionWithoutAliasShouldHaveEntireFunctionAsNameInSchema() { assertThat( - executeQuery("SELECT substring(lastname, 1, 2) FROM " + TestsConstants.TEST_INDEX_ACCOUNT - + " ORDER BY substring(lastname, 1, 2)", "jdbc"), - containsString("\"name\": \"substring(lastname, 1, 2)\"") - ); + executeQuery( + "SELECT substring(lastname, 1, 2) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY substring(lastname, 1, 2)", + "jdbc"), + containsString("\"name\": \"substring(lastname, 1, 2)\"")); } @Ignore("Handled by v2 engine which returns 'name': 'substring(lastname, 1, 2)' instead") @Test public void functionWithAliasShouldHaveAliasAsNameInSchema() { assertThat( - executeQuery("SELECT substring(lastname, 1, 2) AS substring FROM " - + TestsConstants.TEST_INDEX_ACCOUNT + " ORDER BY substring", "jdbc"), - containsString("\"name\": \"substring\"") - ); + executeQuery( + "SELECT substring(lastname, 1, 2) AS substring FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY substring", + "jdbc"), + containsString("\"name\": \"substring\"")); } private void assertSchemaContains(String actualResponse, String expected) { @@ -183,7 +191,10 @@ private void assertSchemaContains(String actualResponse, String expected) { return; } } - Assert.fail("Expected field name [" + expected + "] is not found in response schema: " + - actualResponse); + Assert.fail( + "Expected field name [" + + expected + + "] is not found in response schema: " + + actualResponse); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/JoinAliasWriterRuleIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/JoinAliasWriterRuleIT.java index 31c77fa7c0..75b2b45df6 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/JoinAliasWriterRuleIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/JoinAliasWriterRuleIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.equalTo; @@ -15,18 +14,15 @@ import org.junit.rules.ExpectedException; import org.opensearch.client.ResponseException; -/** - * Test cases for writing missing join table aliases. - */ +/** Test cases for writing missing join table aliases. */ public class JoinAliasWriterRuleIT extends SQLIntegTestCase { - @Rule - public ExpectedException exception = ExpectedException.none(); + @Rule public ExpectedException exception = ExpectedException.none(); protected void init() throws Exception { - loadIndex(Index.ORDER); // opensearch-sql_test_index_order - loadIndex(Index.BANK); // opensearch-sql_test_index_bank - loadIndex(Index.BANK_TWO); // opensearch-sql_test_index_bank_two + loadIndex(Index.ORDER); // opensearch-sql_test_index_order + loadIndex(Index.BANK); // opensearch-sql_test_index_bank + loadIndex(Index.BANK_TWO); // opensearch-sql_test_index_bank_two } @Test @@ -38,12 +34,14 @@ public void noTableAliasNoCommonColumns() throws IOException { "INNER JOIN opensearch-sql_test_index_bank ", "ON name = firstname WHERE state = 'WA' OR id < 7"), query( - "SELECT opensearch-sql_test_index_order_0.id, opensearch-sql_test_index_bank_1.firstname ", + "SELECT opensearch-sql_test_index_order_0.id," + + " opensearch-sql_test_index_bank_1.firstname ", "FROM opensearch-sql_test_index_order opensearch-sql_test_index_order_0 ", "INNER JOIN opensearch-sql_test_index_bank opensearch-sql_test_index_bank_1 ", - "ON opensearch-sql_test_index_order_0.name = opensearch-sql_test_index_bank_1.firstname ", - "WHERE opensearch-sql_test_index_bank_1.state = 'WA' OR opensearch-sql_test_index_order_0.id < 7") - ); + "ON opensearch-sql_test_index_order_0.name = opensearch-sql_test_index_bank_1.firstname" + + " ", + "WHERE opensearch-sql_test_index_bank_1.state = 'WA' OR" + + " opensearch-sql_test_index_order_0.id < 7")); } @Test @@ -59,8 +57,7 @@ public void oneTableAliasNoCommonColumns() throws IOException { "FROM opensearch-sql_test_index_order a ", "INNER JOIN opensearch-sql_test_index_bank opensearch-sql_test_index_bank_0 ", "ON a.name = opensearch-sql_test_index_bank_0.firstname ", - "WHERE opensearch-sql_test_index_bank_0.state = 'WA' OR a.id < 7") - ); + "WHERE opensearch-sql_test_index_bank_0.state = 'WA' OR a.id < 7")); } @Test @@ -76,8 +73,7 @@ public void bothTableAliasNoCommonColumns() throws IOException { "FROM opensearch-sql_test_index_order a ", "INNER JOIN opensearch-sql_test_index_bank b ", "ON a.name = b.firstname ", - "WHERE b.state = 'WA' OR a.id < 7 ") - ); + "WHERE b.state = 'WA' OR a.id < 7 ")); } @Test @@ -90,12 +86,14 @@ public void tableNamesWithTypeName() throws IOException { "INNER JOIN opensearch-sql_test_index_bank/account ", "ON name = firstname WHERE state = 'WA' OR id < 7"), query( - "SELECT opensearch-sql_test_index_order_0.id, opensearch-sql_test_index_bank_1.firstname ", + "SELECT opensearch-sql_test_index_order_0.id," + + " opensearch-sql_test_index_bank_1.firstname ", "FROM opensearch-sql_test_index_order/_doc opensearch-sql_test_index_order_0 ", "INNER JOIN opensearch-sql_test_index_bank/_account opensearch-sql_test_index_bank_1 ", - "ON opensearch-sql_test_index_order_0.name = opensearch-sql_test_index_bank_1.firstname ", - "WHERE opensearch-sql_test_index_bank_1.state = 'WA' OR opensearch-sql_test_index_order_0.id < 7") - ); + "ON opensearch-sql_test_index_order_0.name = opensearch-sql_test_index_bank_1.firstname" + + " ", + "WHERE opensearch-sql_test_index_bank_1.state = 'WA' OR" + + " opensearch-sql_test_index_order_0.id < 7")); } @Ignore @@ -112,8 +110,7 @@ public void tableNamesWithTypeNameExplicitTableAlias() throws IOException { "FROM opensearch-sql_test_index_order a ", "INNER JOIN opensearch-sql_test_index_bank b ", "ON a.name = b.firstname ", - "WHERE b.state = 'WA' OR a.id < 7") - ); + "WHERE b.state = 'WA' OR a.id < 7")); } @Test @@ -129,8 +126,7 @@ public void actualTableNameAsAliasOnColumnFields() throws IOException { "FROM opensearch-sql_test_index_order opensearch-sql_test_index_order_0 ", "INNER JOIN opensearch-sql_test_index_bank b ", "ON opensearch-sql_test_index_order_0.name = b.firstname ", - "WHERE b.state = 'WA' OR opensearch-sql_test_index_order_0.id < 7") - ); + "WHERE b.state = 'WA' OR opensearch-sql_test_index_order_0.id < 7")); } @Test @@ -143,12 +139,14 @@ public void actualTableNameAsAliasOnColumnFieldsTwo() throws IOException { "ON opensearch-sql_test_index_order.name = firstname ", "WHERE opensearch-sql_test_index_bank.state = 'WA' OR id < 7"), query( - "SELECT opensearch-sql_test_index_order_0.id, opensearch-sql_test_index_bank_1.firstname ", + "SELECT opensearch-sql_test_index_order_0.id," + + " opensearch-sql_test_index_bank_1.firstname ", "FROM opensearch-sql_test_index_order opensearch-sql_test_index_order_0 ", "INNER JOIN opensearch-sql_test_index_bank opensearch-sql_test_index_bank_1", - "ON opensearch-sql_test_index_order_0.name = opensearch-sql_test_index_bank_1.firstname ", - "WHERE opensearch-sql_test_index_bank_1.state = 'WA' OR opensearch-sql_test_index_order_0.id < 7") - ); + "ON opensearch-sql_test_index_order_0.name = opensearch-sql_test_index_bank_1.firstname" + + " ", + "WHERE opensearch-sql_test_index_bank_1.state = 'WA' OR" + + " opensearch-sql_test_index_order_0.id < 7")); } @Test @@ -164,44 +162,47 @@ public void columnsWithTableAliasNotAffected() throws IOException { "FROM opensearch-sql_test_index_order a ", "INNER JOIN opensearch-sql_test_index_bank b ", "ON a.name = b.firstname ", - "WHERE b.state = 'WA' OR a.id < 7") - ); + "WHERE b.state = 'WA' OR a.id < 7")); } @Test public void commonColumnWithoutTableAliasDifferentTables() throws IOException { exception.expect(ResponseException.class); exception.expectMessage("Field name [firstname] is ambiguous"); - String explain = explainQuery(query( - "SELECT firstname, lastname ", - "FROM opensearch-sql_test_index_bank ", - "LEFT JOIN opensearch-sql_test_index_bank_two ", - "ON firstname = lastname WHERE state = 'VA' " - )); + String explain = + explainQuery( + query( + "SELECT firstname, lastname ", + "FROM opensearch-sql_test_index_bank ", + "LEFT JOIN opensearch-sql_test_index_bank_two ", + "ON firstname = lastname WHERE state = 'VA' ")); } @Test public void sameTablesNoAliasAndNoAliasOnColumns() throws IOException { exception.expect(ResponseException.class); exception.expectMessage("Not unique table/alias: [opensearch-sql_test_index_bank]"); - String explain = explainQuery(query( - "SELECT firstname, lastname ", - "FROM opensearch-sql_test_index_bank ", - "LEFT JOIN opensearch-sql_test_index_bank ", - "ON firstname = lastname WHERE state = 'VA' " - )); + String explain = + explainQuery( + query( + "SELECT firstname, lastname ", + "FROM opensearch-sql_test_index_bank ", + "LEFT JOIN opensearch-sql_test_index_bank ", + "ON firstname = lastname WHERE state = 'VA' ")); } @Test public void sameTablesNoAliasWithTableNameAsAliasOnColumns() throws IOException { exception.expect(ResponseException.class); exception.expectMessage("Not unique table/alias: [opensearch-sql_test_index_bank]"); - String explain = explainQuery(query( - "SELECT opensearch-sql_test_index_bank.firstname", - "FROM opensearch-sql_test_index_bank ", - "JOIN opensearch-sql_test_index_bank ", - "ON opensearch-sql_test_index_bank.firstname = opensearch-sql_test_index_bank.lastname" - )); + String explain = + explainQuery( + query( + "SELECT opensearch-sql_test_index_bank.firstname", + "FROM opensearch-sql_test_index_bank ", + "JOIN opensearch-sql_test_index_bank ", + "ON opensearch-sql_test_index_bank.firstname =" + + " opensearch-sql_test_index_bank.lastname")); } @Test @@ -211,16 +212,12 @@ public void sameTablesWithExplicitAliasOnFirst() throws IOException { "SELECT opensearch-sql_test_index_bank.firstname, a.lastname ", "FROM opensearch-sql_test_index_bank a", "JOIN opensearch-sql_test_index_bank ", - "ON opensearch-sql_test_index_bank.firstname = a.lastname " - ), + "ON opensearch-sql_test_index_bank.firstname = a.lastname "), query( "SELECT opensearch-sql_test_index_bank_0.firstname, a.lastname ", "FROM opensearch-sql_test_index_bank a", "JOIN opensearch-sql_test_index_bank opensearch-sql_test_index_bank_0", - "ON opensearch-sql_test_index_bank_0.firstname = a.lastname " - ) - - ); + "ON opensearch-sql_test_index_bank_0.firstname = a.lastname ")); } @Test @@ -230,16 +227,12 @@ public void sameTablesWithExplicitAliasOnSecond() throws IOException { "SELECT opensearch-sql_test_index_bank.firstname, a.lastname ", "FROM opensearch-sql_test_index_bank ", "JOIN opensearch-sql_test_index_bank a", - "ON opensearch-sql_test_index_bank.firstname = a.lastname " - ), + "ON opensearch-sql_test_index_bank.firstname = a.lastname "), query( "SELECT opensearch-sql_test_index_bank_0.firstname, a.lastname ", "FROM opensearch-sql_test_index_bank opensearch-sql_test_index_bank_0", "JOIN opensearch-sql_test_index_bank a", - "ON opensearch-sql_test_index_bank_0.firstname = a.lastname " - ) - - ); + "ON opensearch-sql_test_index_bank_0.firstname = a.lastname ")); } private void sameExplain(String actualQuery, String expectedQuery) throws IOException { diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/JoinIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/JoinIT.java index 46515be134..8019454b77 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/JoinIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/JoinIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.anyOf; @@ -62,10 +61,14 @@ public void joinParseCheckSelectedFieldsSplitNL() throws IOException { @Test public void joinParseWithHintsCheckSelectedFieldsSplitHASH() throws IOException { - String query = String.format(Locale.ROOT, "SELECT /*! HASH_WITH_TERMS_FILTER*/ " + - "a.firstname ,a.lastname, a.gender ,d.dog_name FROM %s a JOIN %s d " + - "ON d.holdersName = a.firstname WHERE (a.age > 10 OR a.balance > 2000) AND d.age > 1", - TEST_INDEX_PEOPLE, TEST_INDEX_DOG); + String query = + String.format( + Locale.ROOT, + "SELECT /*! HASH_WITH_TERMS_FILTER*/ a.firstname ,a.lastname, a.gender ,d.dog_name FROM" + + " %s a JOIN %s d ON d.holdersName = a.firstname WHERE (a.age > 10 OR a.balance >" + + " 2000) AND d.age > 1", + TEST_INDEX_PEOPLE, + TEST_INDEX_DOG); JSONObject result = executeQuery(query); verifyJoinParseCheckSelectedFieldsSplitResult(result, false); @@ -75,9 +78,9 @@ public void joinParseWithHintsCheckSelectedFieldsSplitHASH() throws IOException // TODO: figure out why explain does not show results from first query in term filter and // fix either the test or the code. - //Arrays.asList("daenerys","nanette","virginia","aurelia","mcgee","hattie","elinor","burton").forEach(name -> { + // Arrays.asList("daenerys","nanette","virginia","aurelia","mcgee","hattie","elinor","burton").forEach(name -> { // Assert.assertThat(explanation, containsString(name)); - //}); + // }); } @Test @@ -95,8 +98,11 @@ public void joinWithNoWhereButWithConditionNL() throws IOException { @Test public void joinWithStarHASH() throws IOException { - String query = String.format(Locale.ROOT, "SELECT * FROM %1$s c " + - "JOIN %1$s h ON h.hname = c.house ", TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + Locale.ROOT, + "SELECT * FROM %1$s c " + "JOIN %1$s h ON h.hname = c.house ", + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -269,9 +275,13 @@ public void testLeftJoinWithLimitNL() throws IOException { @Test public void hintMultiSearchCanRunFewTimesNL() throws IOException { - String query = String.format(Locale.ROOT, "SELECT /*! USE_NL*/ /*! NL_MULTISEARCH_SIZE(2)*/ " + - "c.name.firstname,c.parents.father,h.hname,h.words FROM %1$s c " + - "JOIN %1$s h", TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + Locale.ROOT, + "SELECT /*! USE_NL*/ /*! NL_MULTISEARCH_SIZE(2)*/ " + + "c.name.firstname,c.parents.father,h.hname,h.words FROM %1$s c " + + "JOIN %1$s h", + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -281,9 +291,13 @@ public void hintMultiSearchCanRunFewTimesNL() throws IOException { @Test public void joinWithGeoIntersectNL() throws IOException { - String query = String.format(Locale.ROOT, "SELECT p1.description,p2.description " + - "FROM %s p1 JOIN %s p2 ON GEO_INTERSECTS(p2.place,p1.place)", - TEST_INDEX_LOCATION, TEST_INDEX_LOCATION2); + String query = + String.format( + Locale.ROOT, + "SELECT p1.description,p2.description " + + "FROM %s p1 JOIN %s p2 ON GEO_INTERSECTS(p2.place,p1.place)", + TEST_INDEX_LOCATION, + TEST_INDEX_LOCATION2); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -299,11 +313,15 @@ public void joinWithGeoIntersectNL() throws IOException { @Test public void joinWithInQuery() throws IOException { - //TODO: Either change the ON condition field to keyword or create a different subquery - String query = String.format(Locale.ROOT, "SELECT c.gender,c.name.firstname,h.hname,h.words " + - "FROM %1$s c JOIN %1$s h ON h.hname = c.house " + - "WHERE c.name.firstname IN (SELECT holdersName FROM %2$s)", - TEST_INDEX_GAME_OF_THRONES, TEST_INDEX_DOG); + // TODO: Either change the ON condition field to keyword or create a different subquery + String query = + String.format( + Locale.ROOT, + "SELECT c.gender,c.name.firstname,h.hname,h.words " + + "FROM %1$s c JOIN %1$s h ON h.hname = c.house " + + "WHERE c.name.firstname IN (SELECT holdersName FROM %2$s)", + TEST_INDEX_GAME_OF_THRONES, + TEST_INDEX_DOG); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -327,10 +345,14 @@ public void joinWithOrNL() throws IOException { @Test public void joinWithOrWithTermsFilterOpt() throws IOException { - String query = String.format(Locale.ROOT, "SELECT /*! HASH_WITH_TERMS_FILTER*/ " + - "d.dog_name,c.name.firstname FROM %s c " + - "JOIN %s d ON d.holdersName = c.name.firstname OR d.age = c.name.ofHisName", - TEST_INDEX_GAME_OF_THRONES, TEST_INDEX_DOG); + String query = + String.format( + Locale.ROOT, + "SELECT /*! HASH_WITH_TERMS_FILTER*/ " + + "d.dog_name,c.name.firstname FROM %s c " + + "JOIN %s d ON d.holdersName = c.name.firstname OR d.age = c.name.ofHisName", + TEST_INDEX_GAME_OF_THRONES, + TEST_INDEX_DOG); executeQuery(query); String explanation = explainQuery(query); @@ -338,9 +360,8 @@ public void joinWithOrWithTermsFilterOpt() throws IOException { Assert.assertTrue(containsTerm(explanation, "holdersName")); Assert.assertTrue(containsTerm(explanation, "age")); - Arrays.asList("daenerys", "brandon", "eddard", "jaime").forEach( - name -> Assert.assertTrue(explanation.contains(name)) - ); + Arrays.asList("daenerys", "brandon", "eddard", "jaime") + .forEach(name -> Assert.assertTrue(explanation.contains(name))); } @Test @@ -394,26 +415,32 @@ public void leftJoinWithAllFromSecondTableNL() throws IOException { @Test public void joinParseCheckSelectedFieldsSplitNLConditionOrderEQ() throws IOException { - final String query = String.format(Locale.ROOT, "SELECT /*! USE_NL*/ " + - "a.firstname, a.lastname, a.gender, d.dog_name FROM %s a JOIN %s d " + - "ON a.firstname = d.holdersName WHERE (a.age > 10 OR a.balance > 2000) AND d.age > 1", - TEST_INDEX_PEOPLE2, TEST_INDEX_DOG2); + final String query = + String.format( + Locale.ROOT, + "SELECT /*! USE_NL*/ a.firstname, a.lastname, a.gender, d.dog_name FROM %s a JOIN %s d" + + " ON a.firstname = d.holdersName WHERE (a.age > 10 OR a.balance > 2000) AND d.age" + + " > 1", + TEST_INDEX_PEOPLE2, + TEST_INDEX_DOG2); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); Assert.assertThat(hits.length(), equalTo(2)); - Map match1 = ImmutableMap.of( - "a.firstname", "Daenerys", - "a.lastname", "Targaryen", - "a.gender", "M", - "d.dog_name", "rex"); - Map match2 = ImmutableMap.of( - "a.firstname", "Hattie", - "a.lastname", "Bond", - "a.gender", "M", - "d.dog_name", "snoopy"); + Map match1 = + ImmutableMap.of( + "a.firstname", "Daenerys", + "a.lastname", "Targaryen", + "a.gender", "M", + "d.dog_name", "rex"); + Map match2 = + ImmutableMap.of( + "a.firstname", "Hattie", + "a.lastname", "Bond", + "a.gender", "M", + "d.dog_name", "snoopy"); Assert.assertTrue(hitsInclude(hits, match1)); Assert.assertTrue(hitsInclude(hits, match2)); @@ -422,21 +449,44 @@ public void joinParseCheckSelectedFieldsSplitNLConditionOrderEQ() throws IOExcep @Test public void joinParseCheckSelectedFieldsSplitNLConditionOrderGT() throws IOException { - final String query = String.format(Locale.ROOT, "SELECT /*! USE_NL*/ " + - "a.firstname, a.lastname, a.gender, d.firstname, d.age FROM " + - "%s a JOIN %s d on a.age < d.age " + - "WHERE (d.firstname = 'Lynn' OR d.firstname = 'Obrien') AND a.firstname = 'Mcgee'", - TEST_INDEX_PEOPLE, TEST_INDEX_ACCOUNT); + final String query = + String.format( + Locale.ROOT, + "SELECT /*! USE_NL*/ a.firstname, a.lastname, a.gender, d.firstname, d.age FROM %s a" + + " JOIN %s d on a.age < d.age WHERE (d.firstname = 'Lynn' OR d.firstname =" + + " 'Obrien') AND a.firstname = 'Mcgee'", + TEST_INDEX_PEOPLE, + TEST_INDEX_ACCOUNT); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); Assert.assertThat(hits.length(), equalTo(2)); - Map oneMatch = ImmutableMap.of("a.firstname", "Mcgee", "a.lastname", "Mooney", - "a.gender", "M", "d.firstname", "Obrien", "d.age", 40); - Map secondMatch = ImmutableMap.of("a.firstname", "Mcgee", "a.lastname", "Mooney", - "a.gender", "M", "d.firstname", "Lynn", "d.age", 40); + Map oneMatch = + ImmutableMap.of( + "a.firstname", + "Mcgee", + "a.lastname", + "Mooney", + "a.gender", + "M", + "d.firstname", + "Obrien", + "d.age", + 40); + Map secondMatch = + ImmutableMap.of( + "a.firstname", + "Mcgee", + "a.lastname", + "Mooney", + "a.gender", + "M", + "d.firstname", + "Lynn", + "d.age", + 40); Assert.assertTrue(hitsInclude(hits, oneMatch)); Assert.assertTrue(hitsInclude(hits, secondMatch)); @@ -445,21 +495,44 @@ public void joinParseCheckSelectedFieldsSplitNLConditionOrderGT() throws IOExcep @Test public void joinParseCheckSelectedFieldsSplitNLConditionOrderLT() throws IOException { - final String query = String.format(Locale.ROOT, "SELECT /*! USE_NL*/ " + - "a.firstname, a.lastname, a.gender, d.firstname, d.age FROM " + - "%s a JOIN %s d on a.age > d.age " + - "WHERE (d.firstname = 'Sandoval' OR d.firstname = 'Hewitt') AND a.firstname = 'Fulton'", - TEST_INDEX_PEOPLE, TEST_INDEX_ACCOUNT); + final String query = + String.format( + Locale.ROOT, + "SELECT /*! USE_NL*/ a.firstname, a.lastname, a.gender, d.firstname, d.age FROM %s a" + + " JOIN %s d on a.age > d.age WHERE (d.firstname = 'Sandoval' OR d.firstname =" + + " 'Hewitt') AND a.firstname = 'Fulton'", + TEST_INDEX_PEOPLE, + TEST_INDEX_ACCOUNT); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); Assert.assertThat(hits.length(), equalTo(2)); - Map oneMatch = ImmutableMap.of("a.firstname", "Fulton", "a.lastname", "Holt", - "a.gender", "F", "d.firstname", "Sandoval", "d.age", 22); - Map secondMatch = ImmutableMap.of("a.firstname", "Fulton", "a.lastname", "Holt", - "a.gender", "F", "d.firstname", "Hewitt", "d.age", 22); + Map oneMatch = + ImmutableMap.of( + "a.firstname", + "Fulton", + "a.lastname", + "Holt", + "a.gender", + "F", + "d.firstname", + "Sandoval", + "d.age", + 22); + Map secondMatch = + ImmutableMap.of( + "a.firstname", + "Fulton", + "a.lastname", + "Holt", + "a.gender", + "F", + "d.firstname", + "Hewitt", + "d.age", + 22); Assert.assertTrue(hitsInclude(hits, oneMatch)); Assert.assertTrue(hitsInclude(hits, secondMatch)); @@ -516,9 +589,12 @@ public void innerJoinNLWithNullInCondition3() throws IOException { private void joinWithAllFromSecondTable(boolean useNestedLoops) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; - final String query = String.format(Locale.ROOT, "SELECT%1$s c.name.firstname, d.* " + - "FROM %2$s c JOIN %2$s d ON d.hname = c.house", - hint, TEST_INDEX_GAME_OF_THRONES); + final String query = + String.format( + Locale.ROOT, + "SELECT%1$s c.name.firstname, d.* " + "FROM %2$s c JOIN %2$s d ON d.hname = c.house", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -534,9 +610,12 @@ private void joinWithAllFromSecondTable(boolean useNestedLoops) throws IOExcepti private void joinWithAllFromFirstTable(boolean useNestedLoops) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; - final String query = String.format(Locale.ROOT, "SELECT%1$s c.name.firstname " + - "FROM %2$s d JOIN %2$s c ON c.house = d.hname", - hint, TEST_INDEX_GAME_OF_THRONES); + final String query = + String.format( + Locale.ROOT, + "SELECT%1$s c.name.firstname " + "FROM %2$s d JOIN %2$s c ON c.house = d.hname", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -552,30 +631,40 @@ private void joinWithAllFromFirstTable(boolean useNestedLoops) throws IOExceptio private void leftJoinWithAllFromSecondTable(boolean useNestedLoops) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; - final String query = String.format(Locale.ROOT, "SELECT%1$s c.name.firstname, d.* " + - "FROM %2$s c LEFT JOIN %2$s d ON d.hname = c.house", - hint, TEST_INDEX_GAME_OF_THRONES); + final String query = + String.format( + Locale.ROOT, + "SELECT%1$s c.name.firstname, d.* " + + "FROM %2$s c LEFT JOIN %2$s d ON d.hname = c.house", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); Assert.assertThat(hits.length(), equalTo(7)); - hits.forEach(hitObj -> { - JSONObject hit = (JSONObject) hitObj; + hits.forEach( + hitObj -> { + JSONObject hit = (JSONObject) hitObj; - Assert.assertThat(hit.getJSONObject("_source").length(), - equalTo(hit.getString("_id").endsWith("0") ? 1 : 5)); - }); + Assert.assertThat( + hit.getJSONObject("_source").length(), + equalTo(hit.getString("_id").endsWith("0") ? 1 : 5)); + }); } private void joinParseCheckSelectedFieldsSplit(boolean useNestedLoops) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; String query = - String.format(Locale.ROOT, "SELECT%s a.firstname ,a.lastname,a.gender,d.dog_name " + - "FROM %s a JOIN %s d ON d.holdersName = a.firstname " + - "WHERE (a.age > 10 OR a.balance > 2000) AND d.age > 1", hint, TEST_INDEX_PEOPLE, + String.format( + Locale.ROOT, + "SELECT%s a.firstname ,a.lastname,a.gender,d.dog_name " + + "FROM %s a JOIN %s d ON d.holdersName = a.firstname " + + "WHERE (a.age > 10 OR a.balance > 2000) AND d.age > 1", + hint, + TEST_INDEX_PEOPLE, TEST_INDEX_DOG); JSONObject result = executeQuery(query); @@ -585,9 +674,13 @@ private void joinParseCheckSelectedFieldsSplit(boolean useNestedLoops) throws IO private void joinNoConditionButWithWhere(boolean useNestedLoops) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; - String query = String.format(Locale.ROOT, "SELECT%s c.gender,h.hname,h.words FROM %2$s c " + - "JOIN %2$s h WHERE match_phrase(c.name.firstname, 'Daenerys')", - hint, TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + Locale.ROOT, + "SELECT%s c.gender,h.hname,h.words FROM %2$s c " + + "JOIN %2$s h WHERE match_phrase(c.name.firstname, 'Daenerys')", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -598,9 +691,12 @@ private void joinNoConditionAndNoWhere(boolean useNestedLoops) throws IOExceptio final String hint = useNestedLoops ? USE_NL_HINT : ""; String query = - String.format(Locale.ROOT, "SELECT%s c.name.firstname,c.parents.father,h.hname,h.words " + - "FROM %2$s c JOIN %2$s h", - hint, TEST_INDEX_GAME_OF_THRONES); + String.format( + Locale.ROOT, + "SELECT%s c.name.firstname,c.parents.father,h.hname,h.words " + + "FROM %2$s c JOIN %2$s h", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -610,17 +706,21 @@ private void joinNoConditionAndNoWhere(boolean useNestedLoops) throws IOExceptio private void joinWithNoWhereButWithCondition(boolean useNestedLoops) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; - String query = String.format(Locale.ROOT, "SELECT%s c.gender,h.hname,h.words " + - "FROM %2$s c JOIN %2$s h ON h.hname = c.house", - hint, TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + Locale.ROOT, + "SELECT%s c.gender,h.hname,h.words " + "FROM %2$s c JOIN %2$s h ON h.hname = c.house", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); - Map someMatch = ImmutableMap.of( - "c.gender", "F", - "h.hname", "Targaryen", - "h.words", "fireAndBlood"); + Map someMatch = + ImmutableMap.of( + "c.gender", "F", + "h.hname", "Targaryen", + "h.words", "fireAndBlood"); if (useNestedLoops) { // TODO: should the NL result be different? @@ -631,24 +731,26 @@ private void joinWithNoWhereButWithCondition(boolean useNestedLoops) throws IOEx } } - private void verifyJoinParseCheckSelectedFieldsSplitResult(JSONObject result, - boolean useNestedLoops) { + private void verifyJoinParseCheckSelectedFieldsSplitResult( + JSONObject result, boolean useNestedLoops) { - Map match1 = ImmutableMap.of( - "a.firstname", "Daenerys", - "a.lastname", "Targaryen", - "a.gender", "M", - "d.dog_name", "rex"); - Map match2 = ImmutableMap.of( - "a.firstname", "Hattie", - "a.lastname", "Bond", - "a.gender", "M", - "d.dog_name", "snoopy"); + Map match1 = + ImmutableMap.of( + "a.firstname", "Daenerys", + "a.lastname", "Targaryen", + "a.gender", "M", + "d.dog_name", "rex"); + Map match2 = + ImmutableMap.of( + "a.firstname", "Hattie", + "a.lastname", "Bond", + "a.gender", "M", + "d.dog_name", "snoopy"); JSONArray hits = getHits(result); if (useNestedLoops) { - //TODO: change field mapping in ON condition to keyword or change query to get result + // TODO: change field mapping in ON condition to keyword or change query to get result // TODO: why does NL query return no results? Assert.assertThat(hits.length(), equalTo(0)); } else { @@ -662,9 +764,12 @@ private void joinNoConditionAndNoWhereWithTotalLimit(boolean useNestedLoops) thr final String hint = useNestedLoops ? USE_NL_HINT : ""; String query = - String.format(Locale.ROOT, "SELECT%s c.name.firstname,c.parents.father,h.hname,h.words" + - " FROM %2$s c JOIN %2$s h LIMIT 9", - hint, TEST_INDEX_GAME_OF_THRONES); + String.format( + Locale.ROOT, + "SELECT%s c.name.firstname,c.parents.father,h.hname,h.words" + + " FROM %2$s c JOIN %2$s h LIMIT 9", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -675,18 +780,22 @@ private void joinWithNestedFieldsOnReturn(boolean useNestedLoops) throws IOExcep final String hint = useNestedLoops ? USE_NL_HINT : ""; String query = - String.format(Locale.ROOT, "SELECT%s c.name.firstname,c.parents.father,h.hname,h.words " + - "FROM %2$s c JOIN %2$s h ON h.hname = c.house " + - "WHERE match_phrase(c.name.firstname, 'Daenerys')", - hint, TEST_INDEX_GAME_OF_THRONES); + String.format( + Locale.ROOT, + "SELECT%s c.name.firstname,c.parents.father,h.hname,h.words " + + "FROM %2$s c JOIN %2$s h ON h.hname = c.house " + + "WHERE match_phrase(c.name.firstname, 'Daenerys')", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); - final Map expectedMatch = ImmutableMap.of( - "c.name.firstname", "Daenerys", - "c.parents.father", "Aerys", - "h.hname", "Targaryen", - "h.words", "fireAndBlood"); + final Map expectedMatch = + ImmutableMap.of( + "c.name.firstname", "Daenerys", + "c.parents.father", "Aerys", + "h.hname", "Targaryen", + "h.words", "fireAndBlood"); if (useNestedLoops) { Assert.assertThat(hits.length(), equalTo(0)); } else { @@ -699,17 +808,21 @@ private void joinWithAllAliasOnReturn(boolean useNestedLoops) throws IOException final String hint = useNestedLoops ? USE_NL_HINT : ""; String query = - String.format(Locale.ROOT, "SELECT%s c.name.firstname name,c.parents.father father," + - "h.hname house FROM %2$s c JOIN %2$s h ON h.hname = c.house " + - "WHERE match_phrase(c.name.firstname, 'Daenerys')", - hint, TEST_INDEX_GAME_OF_THRONES); + String.format( + Locale.ROOT, + "SELECT%s c.name.firstname name,c.parents.father father," + + "h.hname house FROM %2$s c JOIN %2$s h ON h.hname = c.house " + + "WHERE match_phrase(c.name.firstname, 'Daenerys')", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); - final Map expectedMatch = ImmutableMap.of( - "name", "Daenerys", - "father", "Aerys", - "house", "Targaryen"); + final Map expectedMatch = + ImmutableMap.of( + "name", "Daenerys", + "father", "Aerys", + "house", "Targaryen"); if (useNestedLoops) { Assert.assertThat(hits.length(), equalTo(0)); @@ -723,20 +836,24 @@ private void joinWithSomeAliasOnReturn(boolean useNestedLoops) throws IOExceptio final String hint = useNestedLoops ? USE_NL_HINT : ""; String query = - String.format(Locale.ROOT, "SELECT%s c.name.firstname ,c.parents.father father, " + - "h.hname house FROM %2$s c JOIN %2$s h ON h.hname = c.house " + - "WHERE match_phrase(c.name.firstname, 'Daenerys')", - hint, TEST_INDEX_GAME_OF_THRONES); + String.format( + Locale.ROOT, + "SELECT%s c.name.firstname ,c.parents.father father, " + + "h.hname house FROM %2$s c JOIN %2$s h ON h.hname = c.house " + + "WHERE match_phrase(c.name.firstname, 'Daenerys')", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); - final Map expectedMatch = ImmutableMap.of( - "c.name.firstname", "Daenerys", - "father", "Aerys", - "house", "Targaryen"); + final Map expectedMatch = + ImmutableMap.of( + "c.name.firstname", "Daenerys", + "father", "Aerys", + "house", "Targaryen"); if (useNestedLoops) { - //TODO: Either change the ON condition field to keyword or create a different subquery + // TODO: Either change the ON condition field to keyword or create a different subquery Assert.assertThat(hits.length(), equalTo(0)); } else { Assert.assertThat(hits.length(), equalTo(1)); @@ -749,18 +866,22 @@ private void joinWithNestedFieldsOnComparisonAndOnReturn(boolean useNestedLoops) final String hint = useNestedLoops ? USE_NL_HINT : ""; String query = - String.format(Locale.ROOT, "SELECT%s c.name.firstname,c.parents.father, h.hname,h.words " + - " FROM %2$s c JOIN %2$s h ON h.hname = c.name.lastname " + - "WHERE match_phrase(c.name.firstname, 'Daenerys')", - hint, TEST_INDEX_GAME_OF_THRONES); + String.format( + Locale.ROOT, + "SELECT%s c.name.firstname,c.parents.father, h.hname,h.words " + + " FROM %2$s c JOIN %2$s h ON h.hname = c.name.lastname " + + "WHERE match_phrase(c.name.firstname, 'Daenerys')", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); - final Map expectedMatch = ImmutableMap.of( - "c.name.firstname", "Daenerys", - "c.parents.father", "Aerys", - "h.hname", "Targaryen", - "h.words", "fireAndBlood"); + final Map expectedMatch = + ImmutableMap.of( + "c.name.firstname", "Daenerys", + "c.parents.father", "Aerys", + "h.hname", "Targaryen", + "h.words", "fireAndBlood"); if (useNestedLoops) { Assert.assertThat(hits.length(), equalTo(0)); @@ -773,10 +894,12 @@ private void joinWithNestedFieldsOnComparisonAndOnReturn(boolean useNestedLoops) private void testLeftJoin(boolean useNestedLoops) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; - String query = String.format("SELECT%s c.name.firstname, f.name.firstname,f.name.lastname " + - "FROM %2$s c LEFT JOIN %2$s f " + - "ON f.name.firstname = c.parents.father", - hint, TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + "SELECT%s c.name.firstname, f.name.firstname,f.name.lastname " + + "FROM %2$s c LEFT JOIN %2$s f " + + "ON f.name.firstname = c.parents.father", + hint, TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -805,10 +928,14 @@ private void testLeftJoin(boolean useNestedLoops) throws IOException { private void hintLimits_firstLimitSecondNull(boolean useNestedLoops) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; - String query = String.format(Locale.ROOT, "SELECT%s /*! JOIN_TABLES_LIMIT(2,null) */ " + - "c.name.firstname,c.parents.father, h.hname,h.words " + - "FROM %2$s c JOIN %2$s h", - hint, TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + Locale.ROOT, + "SELECT%s /*! JOIN_TABLES_LIMIT(2,null) */ " + + "c.name.firstname,c.parents.father, h.hname,h.words " + + "FROM %2$s c JOIN %2$s h", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -818,9 +945,14 @@ private void hintLimits_firstLimitSecondNull(boolean useNestedLoops) throws IOEx private void hintLimits_firstLimitSecondLimit(boolean useNestedLoops) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; - String query = String.format(Locale.ROOT, "SELECT%s /*! JOIN_TABLES_LIMIT(2,2) */ " + - "c.name.firstname,c.parents.father, h.hname,h.words FROM %2$s c " + - "JOIN %2$s h", hint, TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + Locale.ROOT, + "SELECT%s /*! JOIN_TABLES_LIMIT(2,2) */ " + + "c.name.firstname,c.parents.father, h.hname,h.words FROM %2$s c " + + "JOIN %2$s h", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -830,10 +962,14 @@ private void hintLimits_firstLimitSecondLimit(boolean useNestedLoops) throws IOE private void hintLimits_firstLimitSecondLimitOnlyOne(boolean useNestedLoops) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; - String query = String.format(Locale.ROOT, "SELECT%s /*! JOIN_TABLES_LIMIT(3,1) */ " + - "c.name.firstname,c.parents.father , h.hname,h.words FROM %2$s h " + - "JOIN %2$s c ON c.name.lastname = h.hname", - hint, TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + Locale.ROOT, + "SELECT%s /*! JOIN_TABLES_LIMIT(3,1) */ " + + "c.name.firstname,c.parents.father , h.hname,h.words FROM %2$s h " + + "JOIN %2$s c ON c.name.lastname = h.hname", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -843,9 +979,14 @@ private void hintLimits_firstLimitSecondLimitOnlyOne(boolean useNestedLoops) thr private void hintLimits_firstNullSecondLimit(boolean useNestedLoops) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; - String query = String.format(Locale.ROOT, "SELECT%s /*! JOIN_TABLES_LIMIT(null,2) */ " + - "c.name.firstname,c.parents.father , h.hname,h.words FROM %2$s c " + - "JOIN %2$s h", hint, TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + Locale.ROOT, + "SELECT%s /*! JOIN_TABLES_LIMIT(null,2) */ " + + "c.name.firstname,c.parents.father , h.hname,h.words FROM %2$s c " + + "JOIN %2$s h", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -855,10 +996,14 @@ private void hintLimits_firstNullSecondLimit(boolean useNestedLoops) throws IOEx private void testLeftJoinWithLimit(boolean useNestedLoops) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; - String query = String.format(Locale.ROOT, "SELECT%s /*! JOIN_TABLES_LIMIT(3,null) */ " + - "c.name.firstname, f.name.firstname,f.name.lastname FROM %2$s c " + - "LEFT JOIN %2$s f ON f.name.firstname = c.parents.father", - hint, TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + Locale.ROOT, + "SELECT%s /*! JOIN_TABLES_LIMIT(3,null) */ " + + "c.name.firstname, f.name.firstname,f.name.lastname FROM %2$s c " + + "LEFT JOIN %2$s f ON f.name.firstname = c.parents.father", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -868,20 +1013,27 @@ private void testLeftJoinWithLimit(boolean useNestedLoops) throws IOException { private void joinWithOr(boolean useNestedLoops) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; - String query = String.format(Locale.ROOT, "SELECT%s d.dog_name,c.name.firstname " + - "FROM %s c JOIN %s d " + - "ON d.holdersName = c.name.firstname OR d.age = c.name.ofHisName", - hint, TEST_INDEX_GAME_OF_THRONES, TEST_INDEX_DOG); + String query = + String.format( + Locale.ROOT, + "SELECT%s d.dog_name,c.name.firstname " + + "FROM %s c JOIN %s d " + + "ON d.holdersName = c.name.firstname OR d.age = c.name.ofHisName", + hint, + TEST_INDEX_GAME_OF_THRONES, + TEST_INDEX_DOG); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); - final Map firstMatch = ImmutableMap.of( - "c.name.firstname", "Daenerys", - "d.dog_name", "rex"); - final Map secondMatch = ImmutableMap.of( - "c.name.firstname", "Brandon", - "d.dog_name", "snoopy"); + final Map firstMatch = + ImmutableMap.of( + "c.name.firstname", "Daenerys", + "d.dog_name", "rex"); + final Map secondMatch = + ImmutableMap.of( + "c.name.firstname", "Brandon", + "d.dog_name", "snoopy"); if (useNestedLoops) { Assert.assertThat(hits.length(), equalTo(1)); @@ -896,10 +1048,14 @@ private void joinWithOr(boolean useNestedLoops) throws IOException { private void joinWithOrderFirstTable(boolean useNestedLoops) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; - String query = String.format(Locale.ROOT, "SELECT%s c.name.firstname,d.words " + - "FROM %2$s c JOIN %2$s d ON d.hname = c.house " + - "ORDER BY c.name.firstname", - hint, TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + Locale.ROOT, + "SELECT%s c.name.firstname,d.words " + + "FROM %2$s c JOIN %2$s d ON d.hname = c.house " + + "ORDER BY c.name.firstname", + hint, + TEST_INDEX_GAME_OF_THRONES); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -912,35 +1068,42 @@ private void joinWithOrderFirstTable(boolean useNestedLoops) throws IOException String[] expectedNames = {"Brandon", "Daenerys", "Eddard", "Jaime"}; - IntStream.rangeClosed(0, 3).forEach(i -> { - String firstnamePath = String.format(Locale.ROOT, "/%d/_source/c.name.firstname", i); - Assert.assertThat(hits.query(firstnamePath), equalTo(expectedNames[i])); - }); + IntStream.rangeClosed(0, 3) + .forEach( + i -> { + String firstnamePath = + String.format(Locale.ROOT, "/%d/_source/c.name.firstname", i); + Assert.assertThat(hits.query(firstnamePath), equalTo(expectedNames[i])); + }); } } private boolean containsTerm(final String explainedQuery, final String termName) { return Pattern.compile( - Pattern.quote("\"terms\":{") - + ".*" - + Pattern.quote("\"" + termName + "\":[") - ) + Pattern.quote("\"terms\":{") + ".*" + Pattern.quote("\"" + termName + "\":[")) .matcher(explainedQuery.replaceAll("\\s+", "")) .find(); } - private void joinWithNullInCondition(boolean useNestedLoops, String left, - String oper1, String oper2, int expectedNum) + private void joinWithNullInCondition( + boolean useNestedLoops, String left, String oper1, String oper2, int expectedNum) throws IOException { final String hint = useNestedLoops ? USE_NL_HINT : ""; String query = - String.format(Locale.ROOT, "SELECT%s c.name.firstname,c.parents.father,c.hname," + - "f.name.firstname,f.house,f.hname FROM %s c " + - "%s JOIN %s f ON f.name.firstname = c.parents.father " + - "%s f.house = c.hname %s f.house = c.name.firstname", - hint, TEST_INDEX_GAME_OF_THRONES, left, TEST_INDEX_GAME_OF_THRONES, oper1, oper2); + String.format( + Locale.ROOT, + "SELECT%s c.name.firstname,c.parents.father,c.hname," + + "f.name.firstname,f.house,f.hname FROM %s c " + + "%s JOIN %s f ON f.name.firstname = c.parents.father " + + "%s f.house = c.hname %s f.house = c.name.firstname", + hint, + TEST_INDEX_GAME_OF_THRONES, + left, + TEST_INDEX_GAME_OF_THRONES, + oper1, + oper2); JSONObject result = executeQuery(query); JSONArray hits = getHits(result); @@ -968,20 +1131,22 @@ private boolean hitsInclude(final JSONArray actualHits, Map expectedS return false; } - private void assertHitMatches(final JSONObject actualHit, - final Map expectedSourceValues) { + private void assertHitMatches( + final JSONObject actualHit, final Map expectedSourceValues) { final JSONObject src = actualHit.getJSONObject("_source"); Assert.assertThat(src.length(), equalTo(expectedSourceValues.size())); - src.keySet().forEach(key -> { - Assert.assertTrue(expectedSourceValues.containsKey(key)); - Object value = src.get(key); - Assert.assertThat(value, equalTo(expectedSourceValues.get(key))); - }); + src.keySet() + .forEach( + key -> { + Assert.assertTrue(expectedSourceValues.containsKey(key)); + Object value = src.get(key); + Assert.assertThat(value, equalTo(expectedSourceValues.get(key))); + }); } - private boolean hitMatches(final Map actualHit, - final Map expectedSourceValues) { + private boolean hitMatches( + final Map actualHit, final Map expectedSourceValues) { final Map src = uncheckedGetMap(actualHit.get("_source")); @@ -997,8 +1162,8 @@ private boolean hitMatches(final Map actualHit, Object actualValue = src.get(key); Object expectedValue = expectedSourceValues.get(key); - if ((actualValue == null && expectedValue != null) || - (actualValue != null && expectedValue == null)) { + if ((actualValue == null && expectedValue != null) + || (actualValue != null && expectedValue == null)) { return false; } else if (actualValue != null && !actualValue.equals(expectedValue)) { return false; diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/MathFunctionsIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/MathFunctionsIT.java index b42819bdf7..fcf1edf3e0 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/MathFunctionsIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/MathFunctionsIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.closeTo; @@ -32,9 +31,7 @@ protected void init() throws Exception { @Test public void lowerCaseFunctionCall() throws IOException { - SearchHit[] hits = query( - "SELECT abs(age - 100) AS abs" - ); + SearchHit[] hits = query("SELECT abs(age - 100) AS abs"); for (SearchHit hit : hits) { double abs = (double) getField(hit, "abs"); assertThat(abs, greaterThanOrEqualTo(0.0)); @@ -43,9 +40,7 @@ public void lowerCaseFunctionCall() throws IOException { @Test public void upperCaseFunctionCall() throws IOException { - SearchHit[] hits = query( - "SELECT ABS(age - 100) AS abs" - ); + SearchHit[] hits = query("SELECT ABS(age - 100) AS abs"); for (SearchHit hit : hits) { double abs = (double) getField(hit, "abs"); assertThat(abs, greaterThanOrEqualTo(0.0)); @@ -54,36 +49,28 @@ public void upperCaseFunctionCall() throws IOException { @Test public void eulersNumber() throws IOException { - SearchHit[] hits = query( - "SELECT E() AS e" - ); + SearchHit[] hits = query("SELECT E() AS e"); double e = (double) getField(hits[0], "e"); assertThat(e, equalTo(Math.E)); } @Test public void pi() throws IOException { - SearchHit[] hits = query( - "SELECT PI() AS pi" - ); + SearchHit[] hits = query("SELECT PI() AS pi"); double pi = (double) getField(hits[0], "pi"); assertThat(pi, equalTo(Math.PI)); } @Test public void expm1Function() throws IOException { - SearchHit[] hits = query( - "SELECT EXPM1(2) AS expm1" - ); + SearchHit[] hits = query("SELECT EXPM1(2) AS expm1"); double expm1 = (double) getField(hits[0], "expm1"); assertThat(expm1, equalTo(Math.expm1(2))); } @Test public void degreesFunction() throws IOException { - SearchHit[] hits = query( - "SELECT age, DEGREES(age) AS degrees" - ); + SearchHit[] hits = query("SELECT age, DEGREES(age) AS degrees"); for (SearchHit hit : hits) { int age = (int) getFieldFromSource(hit, "age"); double degrees = (double) getField(hit, "degrees"); @@ -93,9 +80,7 @@ public void degreesFunction() throws IOException { @Test public void radiansFunction() throws IOException { - SearchHit[] hits = query( - "SELECT age, RADIANS(age) as radians" - ); + SearchHit[] hits = query("SELECT age, RADIANS(age) as radians"); for (SearchHit hit : hits) { int age = (int) getFieldFromSource(hit, "age"); double radians = (double) getField(hit, "radians"); @@ -105,65 +90,54 @@ public void radiansFunction() throws IOException { @Test public void sin() throws IOException { - SearchHit[] hits = query( - "SELECT SIN(PI()) as sin" - ); + SearchHit[] hits = query("SELECT SIN(PI()) as sin"); double sin = (double) getField(hits[0], "sin"); assertThat(sin, equalTo(Math.sin(Math.PI))); } @Test public void asin() throws IOException { - SearchHit[] hits = query( - "SELECT ASIN(PI()) as asin" - ); + SearchHit[] hits = query("SELECT ASIN(PI()) as asin"); double asin = Double.valueOf((String) getField(hits[0], "asin")); assertThat(asin, equalTo(Math.asin(Math.PI))); } @Test public void sinh() throws IOException { - SearchHit[] hits = query( - "SELECT SINH(PI()) as sinh" - ); + SearchHit[] hits = query("SELECT SINH(PI()) as sinh"); double sinh = (double) getField(hits[0], "sinh"); assertThat(sinh, equalTo(Math.sinh(Math.PI))); } @Test public void power() throws IOException { - SearchHit[] hits = query( - "SELECT POWER(age, 2) AS power", - "WHERE (age IS NOT NULL) AND (balance IS NOT NULL) and (POWER(balance, 3) > 0)" - ); + SearchHit[] hits = + query( + "SELECT POWER(age, 2) AS power", + "WHERE (age IS NOT NULL) AND (balance IS NOT NULL) and (POWER(balance, 3) > 0)"); double power = (double) getField(hits[0], "power"); assertTrue(power >= 0); } @Test public void atan2() throws IOException { - SearchHit[] hits = query( - "SELECT ATAN2(age, age) AS atan2", - "WHERE (age IS NOT NULL) AND (ATAN2(age, age) > 0)" - ); + SearchHit[] hits = + query( + "SELECT ATAN2(age, age) AS atan2", "WHERE (age IS NOT NULL) AND (ATAN2(age, age) > 0)"); double atan2 = (double) getField(hits[0], "atan2"); assertThat(atan2, equalTo(Math.atan2(1, 1))); } @Test public void cot() throws IOException { - SearchHit[] hits = query( - "SELECT COT(PI()) AS cot" - ); + SearchHit[] hits = query("SELECT COT(PI()) AS cot"); double cot = (double) getField(hits[0], "cot"); assertThat(cot, closeTo(1 / Math.tan(Math.PI), 0.001)); } @Test public void sign() throws IOException { - SearchHit[] hits = query( - "SELECT SIGN(E()) AS sign" - ); + SearchHit[] hits = query("SELECT SIGN(E()) AS sign"); double sign = (double) getField(hits[0], "sign"); assertThat(sign, equalTo(Math.signum(Math.E))); } @@ -186,18 +160,18 @@ public void logWithTwoParams() throws IOException { public void logInAggregationShouldPass() { assertThat( executeQuery( - "SELECT LOG(age) FROM " + TestsConstants.TEST_INDEX_ACCOUNT - + " WHERE age IS NOT NULL GROUP BY LOG(age) ORDER BY LOG(age)", "jdbc" - ), - containsString("\"type\": \"double\"") - ); + "SELECT LOG(age) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " WHERE age IS NOT NULL GROUP BY LOG(age) ORDER BY LOG(age)", + "jdbc"), + containsString("\"type\": \"double\"")); assertThat( executeQuery( - "SELECT LOG(2, age) FROM " + TestsConstants.TEST_INDEX_ACCOUNT + - " WHERE age IS NOT NULL GROUP BY LOG(2, age) ORDER BY LOG(2, age)", "jdbc" - ), - containsString("\"type\": \"double\"") - ); + "SELECT LOG(2, age) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " WHERE age IS NOT NULL GROUP BY LOG(2, age) ORDER BY LOG(2, age)", + "jdbc"), + containsString("\"type\": \"double\"")); } @Test @@ -218,11 +192,11 @@ public void ln() throws IOException { public void lnInAggregationShouldPass() { assertThat( executeQuery( - "SELECT LN(age) FROM " + TestsConstants.TEST_INDEX_ACCOUNT + - " WHERE age IS NOT NULL GROUP BY LN(age) ORDER BY LN(age)", "jdbc" - ), - containsString("\"type\": \"double\"") - ); + "SELECT LN(age) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " WHERE age IS NOT NULL GROUP BY LN(age) ORDER BY LN(age)", + "jdbc"), + containsString("\"type\": \"double\"")); } @Test @@ -238,10 +212,11 @@ private SearchHit[] query(String select, String... statements) throws IOExceptio final String response = executeQueryWithStringOutput(select + " " + FROM + " " + String.join(" ", statements)); - final XContentParser parser = new JsonXContentParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - new JsonFactory().createParser(response)); + final XContentParser parser = + new JsonXContentParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + new JsonFactory().createParser(response)); return SearchResponse.fromXContent(parser).getHits().getHits(); } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/MetaDataQueriesIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/MetaDataQueriesIT.java index 9f0fca68d5..287e0b5cca 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/MetaDataQueriesIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/MetaDataQueriesIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.equalTo; @@ -26,162 +25,44 @@ import org.opensearch.client.Request; import org.opensearch.sql.legacy.utils.StringUtils; - /** - * The following are tests for SHOW/DESCRIBE query support under Pretty Format Response protocol using JDBC format. - *

- * Unlike SELECT queries, the JDBC format response of SHOW and DESCRIBE queries has determined "schema" fields. - *

- * Since these integration tests are receiving the JSON response as output, "datarows" values can't be validated by - * key since it is a JSONArray, so the expected length of "schema" will be used instead as well as the expected - * position of the field data in "datarows". - *

- * These are the outputs of "schema" for SHOW and DESCRIBE, the position of the value in "datarows" will match the - * position of the field in "schema": - *

- * 1) SHOW query (based on the getTables() method listed here https://docs.oracle.com/javase/8/docs/api/java/sql/DatabaseMetaData.html) - * "schema": [ - * { - * "name": "TABLE_CAT", - * "type": "keyword" - * }, - * { - * "name": "TABLE_SCHEM", - * "type": "keyword" - * }, - * { - * "name": "TABLE_NAME", - * "type": "keyword" - * }, - * { - * "name": "TABLE_TYPE", - * "type": "keyword" - * }, - * { - * "name": "REMARKS", - * "type": "keyword" - * }, - * { - * "name": "TYPE_CAT", - * "type": "keyword" - * }, - * { - * "name": "TYPE_SCHEM", - * "type": "keyword" - * }, - * { - * "name": "TYPE_NAME", - * "type": "keyword" - * }, - * { - * "name": "SELF_REFERENCING_COL_NAME", - * "type": "keyword" - * }, - * { - * "name": "REF_GENERATION", - * "type": "keyword" - * } - * ] - *

- * 2) DESCRIBE query (based on the getColumns() method listed here https://docs.oracle.com/javase/8/docs/api/java/sql/DatabaseMetaData.html) - * "schema": [ - * { - * "name": "TABLE_CAT", - * "type": "keyword" - * }, - * { - * "name": "TABLE_SCHEM", - * "type": "keyword" - * }, - * { - * "name": "TABLE_NAME", - * "type": "keyword" - * }, - * { - * "name": "COLUMN_NAME", - * "type": "keyword" - * }, - * { - * "name": "DATA_TYPE", - * "type": "integer" - * }, - * { - * "name": "TYPE_NAME", - * "type": "keyword" - * }, - * { - * "name": "COLUMN_SIZE", - * "type": "integer" - * }, - * { - * "name": "BUFFER_LENGTH", - * "type": "integer" - * }, - * { - * "name": "DECIMAL_DIGITS", - * "type": "integer" - * }, - * { - * "name": "NUM_PREC_RADIX", - * "type": "integer" - * }, - * { - * "name": "NULLABLE", - * "type": "integer" - * }, - * { - * "name": "REMARKS", - * "type": "keyword" - * }, - * { - * "name": "COLUMN_DEF", - * "type": "keyword" - * }, - * { - * "name": "SQL_DATA_TYPE", - * "type": "integer" - * }, - * { - * "name": "SQL_DATETIME_SUB", - * "type": "integer" - * }, - * { - * "name": "CHAR_OCTET_LENGTH", - * "type": "integer" - * }, - * { - * "name": "ORDINAL_POSITION", - * "type": "integer" - * }, - * { - * "name": "IS_NULLABLE", - * "type": "keyword" - * }, - * { - * "name": "SCOPE_CATALOG", - * "type": "keyword" - * }, - * { - * "name": "SCOPE_SCHEMA", - * "type": "keyword" - * }, - * { - * "name": "SCOPE_TABLE", - * "type": "keyword" - * }, - * { - * "name": "SOURCE_DATA_TYPE", - * "type": "short" - * }, - * { - * "name": "IS_AUTOINCREMENT", - * "type": "keyword" - * }, - * { - * "name": "IS_GENERATEDCOLUMN", - * "type": "keyword" - * } + * The following are tests for SHOW/DESCRIBE query support under Pretty Format Response protocol + * using JDBC format. + * + *

Unlike SELECT queries, the JDBC format response of SHOW and DESCRIBE queries has determined + * "schema" fields. + * + *

Since these integration tests are receiving the JSON response as output, "datarows" values + * can't be validated by key since it is a JSONArray, so the expected length of "schema" will be + * used instead as well as the expected position of the field data in "datarows". + * + *

These are the outputs of "schema" for SHOW and DESCRIBE, the position of the value in + * "datarows" will match the position of the field in "schema": + * + *

1) SHOW query (based on the getTables() method listed here + * https://docs.oracle.com/javase/8/docs/api/java/sql/DatabaseMetaData.html) "schema": [ { "name": + * "TABLE_CAT", "type": "keyword" }, { "name": "TABLE_SCHEM", "type": "keyword" }, { "name": + * "TABLE_NAME", "type": "keyword" }, { "name": "TABLE_TYPE", "type": "keyword" }, { "name": + * "REMARKS", "type": "keyword" }, { "name": "TYPE_CAT", "type": "keyword" }, { "name": + * "TYPE_SCHEM", "type": "keyword" }, { "name": "TYPE_NAME", "type": "keyword" }, { "name": + * "SELF_REFERENCING_COL_NAME", "type": "keyword" }, { "name": "REF_GENERATION", "type": "keyword" } * ] + * + *

2) DESCRIBE query (based on the getColumns() method listed here + * https://docs.oracle.com/javase/8/docs/api/java/sql/DatabaseMetaData.html) "schema": [ { "name": + * "TABLE_CAT", "type": "keyword" }, { "name": "TABLE_SCHEM", "type": "keyword" }, { "name": + * "TABLE_NAME", "type": "keyword" }, { "name": "COLUMN_NAME", "type": "keyword" }, { "name": + * "DATA_TYPE", "type": "integer" }, { "name": "TYPE_NAME", "type": "keyword" }, { "name": + * "COLUMN_SIZE", "type": "integer" }, { "name": "BUFFER_LENGTH", "type": "integer" }, { "name": + * "DECIMAL_DIGITS", "type": "integer" }, { "name": "NUM_PREC_RADIX", "type": "integer" }, { "name": + * "NULLABLE", "type": "integer" }, { "name": "REMARKS", "type": "keyword" }, { "name": + * "COLUMN_DEF", "type": "keyword" }, { "name": "SQL_DATA_TYPE", "type": "integer" }, { "name": + * "SQL_DATETIME_SUB", "type": "integer" }, { "name": "CHAR_OCTET_LENGTH", "type": "integer" }, { + * "name": "ORDINAL_POSITION", "type": "integer" }, { "name": "IS_NULLABLE", "type": "keyword" }, { + * "name": "SCOPE_CATALOG", "type": "keyword" }, { "name": "SCOPE_SCHEMA", "type": "keyword" }, { + * "name": "SCOPE_TABLE", "type": "keyword" }, { "name": "SOURCE_DATA_TYPE", "type": "short" }, { + * "name": "IS_AUTOINCREMENT", "type": "keyword" }, { "name": "IS_GENERATEDCOLUMN", "type": + * "keyword" } ] */ public class MetaDataQueriesIT extends SQLIntegTestCase { @@ -294,29 +175,27 @@ public void describeSingleIndex() throws IOException { @Ignore("Breaking change, the new engine will return alias instead of index name") @Test public void showSingleIndexAlias() throws IOException { - client().performRequest(new Request("PUT", - TestsConstants.TEST_INDEX_ACCOUNT + "/_alias/acc")); + client().performRequest(new Request("PUT", TestsConstants.TEST_INDEX_ACCOUNT + "/_alias/acc")); JSONObject expected = executeQuery("SHOW TABLES LIKE " + TestsConstants.TEST_INDEX_ACCOUNT); JSONObject actual = executeQuery("SHOW TABLES LIKE acc"); assertThat(getDataRows(actual).length(), equalTo(1)); - assertTrue(StringUtils.format("Expected: %s, actual: %s", expected, actual), - expected.similar(actual)); + assertTrue( + StringUtils.format("Expected: %s, actual: %s", expected, actual), expected.similar(actual)); } @Ignore("Breaking change, the new engine will return alias instead of index name") @Test public void describeSingleIndexAlias() throws IOException { - client().performRequest(new Request("PUT", - TestsConstants.TEST_INDEX_ACCOUNT + "/_alias/acc")); + client().performRequest(new Request("PUT", TestsConstants.TEST_INDEX_ACCOUNT + "/_alias/acc")); JSONObject expected = executeQuery("DESCRIBE TABLES LIKE " + TestsConstants.TEST_INDEX_ACCOUNT); JSONObject actual = executeQuery("DESCRIBE TABLES LIKE acc"); assertThat(getDataRows(actual).length(), greaterThan(0)); - assertTrue(StringUtils.format("Expected: %s, actual: %s", expected, actual), - expected.similar(actual)); + assertTrue( + StringUtils.format("Expected: %s, actual: %s", expected, actual), expected.similar(actual)); } @Test @@ -355,7 +234,8 @@ public void describeSingleIndexWithObjectFieldShouldPass() throws IOException { assertThat(dataRows.length(), greaterThan(0)); assertThat(dataRows.getJSONArray(0).length(), equalTo(DESCRIBE_FIELD_LENGTH)); - verifySome(dataRows, + verifySome( + dataRows, describeRow(TEST_INDEX_GAME_OF_THRONES, "nickname", "text"), describeRow(TEST_INDEX_GAME_OF_THRONES, "name", "object"), describeRow(TEST_INDEX_GAME_OF_THRONES, "name.firstname", "text"), @@ -402,8 +282,10 @@ public void describeWildcardIndex() throws IOException { @Test public void describeWildcardColumn() throws IOException { - JSONObject response = executeQuery(String.format("DESCRIBE TABLES LIKE %s COLUMNS LIKE %%name", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + "DESCRIBE TABLES LIKE %s COLUMNS LIKE %%name", TestsConstants.TEST_INDEX_ACCOUNT)); String pattern = ".*name"; JSONArray dataRows = getDataRows(response); @@ -418,8 +300,10 @@ public void describeWildcardColumn() throws IOException { @Test public void describeSingleCharacterWildcard() throws IOException { - JSONObject response = executeQuery(String.format("DESCRIBE TABLES LIKE %s COLUMNS LIKE %%na_e", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + "DESCRIBE TABLES LIKE %s COLUMNS LIKE %%na_e", TestsConstants.TEST_INDEX_ACCOUNT)); String pattern = ".*na.e"; JSONArray dataRows = getDataRows(response); diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/MethodQueryIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/MethodQueryIT.java index 027228a92b..0db2e921bf 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/MethodQueryIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/MethodQueryIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.both; @@ -28,162 +27,136 @@ protected void init() throws Exception { } /** - * query - * "query" : { - * query_string" : { - * "query" : "address:880 Holmes Lane" - * } - * } + * query "query" : { query_string" : { "query" : "address:880 Holmes Lane" } } * * @throws IOException */ @Test public void queryTest() throws IOException { - final String result = explainQuery(String.format(Locale.ROOT, - "select address from %s where query('address:880 Holmes Lane') limit 3", - TestsConstants.TEST_INDEX_ACCOUNT)); - Assert.assertThat(result, - containsString("query_string\\\":{\\\"query\\\":\\\"address:880 Holmes Lane")); - + final String result = + explainQuery( + String.format( + Locale.ROOT, + "select address from %s where query('address:880 Holmes Lane') limit 3", + TestsConstants.TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result, containsString("query_string\\\":{\\\"query\\\":\\\"address:880 Holmes Lane")); } /** - * matchQuery - * "query" : { - * "match" : { - * "address" : { - * "query" : "880 Holmes Lane", - * "type" : "boolean" - * } - * } - * } + * matchQuery "query" : { "match" : { "address" : { "query" : "880 Holmes Lane", "type" : + * "boolean" } } } * * @throws IOException */ @Test public void matchQueryTest() throws IOException { - final String result = explainQuery(String.format(Locale.ROOT, - "select address from %s where address= matchQuery('880 Holmes Lane') limit 3", - TestsConstants.TEST_INDEX_ACCOUNT)); - Assert.assertThat(result, + final String result = + explainQuery( + String.format( + Locale.ROOT, + "select address from %s where address= matchQuery('880 Holmes Lane') limit 3", + TestsConstants.TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result, containsString("{\\\"match\\\":{\\\"address\\\":{\\\"query\\\":\\\"880 Holmes Lane\\\"")); } /** - * matchQuery - * { - * "query": { - * "bool": { - * "must": { - * "bool": { - * "should": [ - * { - * "constant_score": { - * "query": { - * "match": { - * "address": { - * "query": "Lane", - * "type": "boolean" - * } - * } - * }, - * "boost": 100 - * } - * }, - * { - * "constant_score": { - * "query": { - * "match": { - * "address": { - * "query": "Street", - * "type": "boolean" - * } - * } - * }, - * "boost": 0.5 - * } - * } - * ] - * } - * } - * } - * } - * } + * matchQuery { "query": { "bool": { "must": { "bool": { "should": [ { "constant_score": { + * "query": { "match": { "address": { "query": "Lane", "type": "boolean" } } }, "boost": 100 } }, + * { "constant_score": { "query": { "match": { "address": { "query": "Street", "type": "boolean" } + * } }, "boost": 0.5 } } ] } } } } } * * @throws IOException */ @Test - @Ignore("score query no longer maps to constant_score in the V2 engine - @see org.opensearch.sql.sql.ScoreQueryIT") + @Ignore( + "score query no longer maps to constant_score in the V2 engine - @see" + + " org.opensearch.sql.sql.ScoreQueryIT") public void scoreQueryTest() throws IOException { - final String result = explainQuery(String.format(Locale.ROOT, - "select address from %s " + - "where score(matchQuery(address, 'Lane'),100) " + - "or score(matchQuery(address,'Street'),0.5) order by _score desc limit 3", - TestsConstants.TEST_INDEX_ACCOUNT)); - Assert.assertThat(result, - both(containsString("{\"constant_score\":" + - "{\"filter\":{\"match\":{\"address\":{\"query\":\"Lane\"")).and( - containsString("{\"constant_score\":" + - "{\"filter\":{\"match\":{\"address\":{\"query\":\"Street\""))); + final String result = + explainQuery( + String.format( + Locale.ROOT, + "select address from %s " + + "where score(matchQuery(address, 'Lane'),100) " + + "or score(matchQuery(address,'Street'),0.5) order by _score desc limit 3", + TestsConstants.TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result, + both(containsString( + "{\"constant_score\":" + "{\"filter\":{\"match\":{\"address\":{\"query\":\"Lane\"")) + .and( + containsString( + "{\"constant_score\":" + + "{\"filter\":{\"match\":{\"address\":{\"query\":\"Street\""))); } @Test public void regexpQueryTest() throws IOException { - final String result = explainQuery(String.format(Locale.ROOT, - "SELECT * FROM %s WHERE address=REGEXP_QUERY('.*')", - TestsConstants.TEST_INDEX_ACCOUNT)); - Assert.assertThat(result, - containsString("{\"bool\":{\"must\":[{\"regexp\":" - + "{\"address\":{\"value\":\".*\",\"flags_value\":255,\"max_determinized_states\":10000,\"boost\":1.0}}}")); + final String result = + explainQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE address=REGEXP_QUERY('.*')", + TestsConstants.TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result, + containsString( + "{\"bool\":{\"must\":[{\"regexp\":" + + "{\"address\":{\"value\":\".*\",\"flags_value\":255,\"max_determinized_states\":10000,\"boost\":1.0}}}")); } @Test public void negativeRegexpQueryTest() throws IOException { - final String result = explainQuery(String.format(Locale.ROOT, - "SELECT * FROM %s WHERE NOT(address=REGEXP_QUERY('.*'))", - TestsConstants.TEST_INDEX_ACCOUNT)); - Assert.assertThat(result, - containsString("{\"bool\":{\"must_not\":[{\"regexp\":" - + "{\"address\":{\"value\":\".*\",\"flags_value\":255,\"max_determinized_states\":10000,\"boost\":1.0}}}")); + final String result = + explainQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE NOT(address=REGEXP_QUERY('.*'))", + TestsConstants.TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result, + containsString( + "{\"bool\":{\"must_not\":[{\"regexp\":" + + "{\"address\":{\"value\":\".*\",\"flags_value\":255,\"max_determinized_states\":10000,\"boost\":1.0}}}")); } /** - * wildcardQuery - * l*e means leae ltae ... - * "wildcard": { - * "address" : { - * "wildcard" : "l*e" - * } - * } + * wildcardQuery l*e means leae ltae ... "wildcard": { "address" : { "wildcard" : "l*e" } } * * @throws IOException */ @Test public void wildcardQueryTest() throws IOException { - final String result = explainQuery(String.format(Locale.ROOT, - "select address from %s where address= wildcardQuery('l*e') order by _score desc limit 3", - TestsConstants.TEST_INDEX_ACCOUNT)); - Assert.assertThat(result, - containsString("{\"wildcard\":{\"address\":{\"wildcard\":\"l*e\"")); + final String result = + explainQuery( + String.format( + Locale.ROOT, + "select address from %s where address= wildcardQuery('l*e') order by _score desc" + + " limit 3", + TestsConstants.TEST_INDEX_ACCOUNT)); + Assert.assertThat(result, containsString("{\"wildcard\":{\"address\":{\"wildcard\":\"l*e\"")); } /** - * matchPhraseQuery - * "address" : { - * "query" : "671 Bristol Street", - * "type" : "phrase" - * } + * matchPhraseQuery "address" : { "query" : "671 Bristol Street", "type" : "phrase" } * * @throws IOException */ @Test - @Ignore("score query no longer handled by legacy engine - @see org.opensearch.sql.sql.ScoreQueryIT") + @Ignore( + "score query no longer handled by legacy engine - @see org.opensearch.sql.sql.ScoreQueryIT") public void matchPhraseQueryTest() throws IOException { - final String result = explainQuery(String.format(Locale.ROOT, - "select address from %s " + - "where address= matchPhrase('671 Bristol Street') order by _score desc limit 3", - TestsConstants.TEST_INDEX_ACCOUNT)); - Assert.assertThat(result, - containsString("{\"match_phrase\":{\"address\":{\"query\":\"671 Bristol Street\"")); + final String result = + explainQuery( + String.format( + Locale.ROOT, + "select address from %s where address= matchPhrase('671 Bristol Street') order by" + + " _score desc limit 3", + TestsConstants.TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result, containsString("{\"match_phrase\":{\"address\":{\"query\":\"671 Bristol Street\"")); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/MetricsIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/MetricsIT.java index 3eeac66b97..238d3aeaff 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/MetricsIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/MetricsIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.equalTo; @@ -47,9 +46,7 @@ private void multiQueries(int n) throws IOException { } private Request makeStatRequest() { - return new Request( - "GET", STATS_API_ENDPOINT - ); + return new Request("GET", STATS_API_ENDPOINT); } private String executeStatRequest(final Request request) throws IOException { @@ -69,5 +66,4 @@ private String executeStatRequest(final Request request) throws IOException { return sb.toString(); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/MultiQueryIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/MultiQueryIT.java index d8d2b8875a..84750f8a27 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/MultiQueryIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/MultiQueryIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.equalTo; @@ -34,15 +33,17 @@ protected void init() throws Exception { @Test public void unionAllSameRequestOnlyOneRecordTwice() throws IOException { - String query = String.format("SELECT firstname " + - "FROM %s " + - "WHERE firstname = 'Amber' " + - "LIMIT 1 " + - "UNION ALL " + - "SELECT firstname " + - "FROM %s " + - "WHERE firstname = 'Amber'", - TestsConstants.TEST_INDEX_ACCOUNT, TestsConstants.TEST_INDEX_ACCOUNT); + String query = + String.format( + "SELECT firstname " + + "FROM %s " + + "WHERE firstname = 'Amber' " + + "LIMIT 1 " + + "UNION ALL " + + "SELECT firstname " + + "FROM %s " + + "WHERE firstname = 'Amber'", + TestsConstants.TEST_INDEX_ACCOUNT, TestsConstants.TEST_INDEX_ACCOUNT); JSONObject response = executeQuery(query); JSONArray hits = getHits(response); @@ -58,10 +59,12 @@ public void unionAllSameRequestOnlyOneRecordTwice() throws IOException { @Test public void unionAllOnlyOneRecordEachWithAlias() throws IOException { - String query = String.format("SELECT firstname FROM %s WHERE firstname = 'Amber' " + - "UNION ALL " + - "SELECT dog_name as firstname FROM %s WHERE dog_name = 'rex'", - TestsConstants.TEST_INDEX_ACCOUNT, TestsConstants.TEST_INDEX_DOG); + String query = + String.format( + "SELECT firstname FROM %s WHERE firstname = 'Amber' " + + "UNION ALL " + + "SELECT dog_name as firstname FROM %s WHERE dog_name = 'rex'", + TestsConstants.TEST_INDEX_ACCOUNT, TestsConstants.TEST_INDEX_DOG); JSONObject response = executeQuery(query); assertThat(getHits(response).length(), equalTo(2)); @@ -80,12 +83,14 @@ public void unionAllOnlyOneRecordEachWithAlias() throws IOException { @Test public void unionAllOnlyOneRecordEachWithComplexAlias() throws IOException { - String query = String.format("SELECT firstname FROM %s WHERE firstname = 'Amber' " + - "UNION ALL " + - "SELECT name.firstname as firstname " + - "FROM %s " + - "WHERE name.firstname = 'daenerys'", - TestsConstants.TEST_INDEX_ACCOUNT, TestsConstants.TEST_INDEX_GAME_OF_THRONES); + String query = + String.format( + "SELECT firstname FROM %s WHERE firstname = 'Amber' " + + "UNION ALL " + + "SELECT name.firstname as firstname " + + "FROM %s " + + "WHERE name.firstname = 'daenerys'", + TestsConstants.TEST_INDEX_ACCOUNT, TestsConstants.TEST_INDEX_GAME_OF_THRONES); JSONObject response = executeQuery(query); assertThat(getHits(response).length(), equalTo(2)); @@ -144,10 +149,12 @@ public void minusCMinusDTwoFieldsNoAliasWithScrolling() throws IOException { @Test public void minusCMinusDTwoFieldsAliasOnBothSecondTableFields() throws IOException { - String query = String.format("SELECT pk, letter FROM %s WHERE system_name = 'C' " + - "MINUS " + - "SELECT myId as pk, myLetter as letter FROM %s WHERE system_name = 'E'", - TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); + String query = + String.format( + "SELECT pk, letter FROM %s WHERE system_name = 'C' " + + "MINUS " + + "SELECT myId as pk, myLetter as letter FROM %s WHERE system_name = 'E'", + TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); JSONObject response = executeQuery(query); assertThat(getHits(response).length(), equalTo(1)); @@ -174,10 +181,12 @@ public void minusCMinusDTwoFieldsAliasOnBothTablesWithScrolling() throws IOExcep @Test public void minusCMinusCTwoFieldsOneAlias() throws IOException { - String query = String.format("SELECT pk as myId, letter FROM %s WHERE system_name = 'C' " + - "MINUS " + - "SELECT pk as myId, letter FROM %s WHERE system_name = 'C'", - TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); + String query = + String.format( + "SELECT pk as myId, letter FROM %s WHERE system_name = 'C' " + + "MINUS " + + "SELECT pk as myId, letter FROM %s WHERE system_name = 'C'", + TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); JSONObject response = executeQuery(query); assertThat(getHits(response).length(), equalTo(0)); @@ -185,10 +194,12 @@ public void minusCMinusCTwoFieldsOneAlias() throws IOException { @Test public void minusCMinusTNonExistentFieldTwoFields() throws IOException { - String query = String.format("SELECT pk, letter FROM %s WHERE system_name = 'C' " + - "MINUS " + - "SELECT pk, letter FROM %s WHERE system_name = 'T' ", - TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); + String query = + String.format( + "SELECT pk, letter FROM %s WHERE system_name = 'C' " + + "MINUS " + + "SELECT pk, letter FROM %s WHERE system_name = 'T' ", + TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); JSONObject response = executeQuery(query); assertThat(getHits(response).length(), equalTo(3)); @@ -229,20 +240,24 @@ public void minusTMinusCNonExistentFieldFirstQueryWithScrollingAndOptimization() } private void innerMinusAMinusANoAlias(String hint) throws IOException { - String query = String.format("SELECT %s pk FROM %s WHERE system_name = 'A' " + - "MINUS " + - "SELECT pk FROM %s WHERE system_name = 'A'", - hint, TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); + String query = + String.format( + "SELECT %s pk FROM %s WHERE system_name = 'A' " + + "MINUS " + + "SELECT pk FROM %s WHERE system_name = 'A'", + hint, TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); JSONObject response = executeQuery(query); assertThat(getHits(response).length(), equalTo(0)); } private void innerMinusAMinusBNoAlias(String hint) throws IOException { - String query = String.format("SELECT %s pk FROM %s WHERE system_name = 'A' " + - "MINUS " + - "SELECT pk FROM %s WHERE system_name = 'B'", - hint, TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); + String query = + String.format( + "SELECT %s pk FROM %s WHERE system_name = 'A' " + + "MINUS " + + "SELECT pk FROM %s WHERE system_name = 'B'", + hint, TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); JSONObject response = executeQuery(query); assertThat(getHits(response).length(), equalTo(1)); @@ -255,10 +270,12 @@ private void innerMinusAMinusBNoAlias(String hint) throws IOException { } private void innerMinusCMinusDTwoFieldsNoAlias(String hint) throws IOException { - String query = String.format("SELECT %s pk, letter FROM %s WHERE system_name = 'C' " + - "MINUS " + - "SELECT pk, letter FROM %s WHERE system_name = 'D'", - hint, TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); + String query = + String.format( + "SELECT %s pk, letter FROM %s WHERE system_name = 'C' " + + "MINUS " + + "SELECT pk, letter FROM %s WHERE system_name = 'D'", + hint, TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); JSONObject response = executeQuery(query); assertThat(getHits(response).length(), equalTo(1)); @@ -274,10 +291,12 @@ private void innerMinusCMinusDTwoFieldsNoAlias(String hint) throws IOException { } private void innerMinusCMinusDTwoFieldsAliasOnBothTables(String hint) throws IOException { - String query = String.format("SELECT %s pk as myId, letter FROM %s WHERE system_name = 'C' " + - "MINUS " + - "SELECT myId, myLetter as letter FROM %s WHERE system_name = 'E'", - hint, TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); + String query = + String.format( + "SELECT %s pk as myId, letter FROM %s WHERE system_name = 'C' " + + "MINUS " + + "SELECT myId, myLetter as letter FROM %s WHERE system_name = 'E'", + hint, TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); JSONObject response = executeQuery(query); assertThat(getHits(response).length(), equalTo(1)); @@ -293,20 +312,24 @@ private void innerMinusCMinusDTwoFieldsAliasOnBothTables(String hint) throws IOE } private void innerMinusCMinusTNonExistentFieldOneField(String hint) throws IOException { - String query = String.format("SELECT %s letter FROM %s WHERE system_name = 'C' " + - "MINUS " + - "SELECT letter FROM %s WHERE system_name = 'T'", - hint, TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); + String query = + String.format( + "SELECT %s letter FROM %s WHERE system_name = 'C' " + + "MINUS " + + "SELECT letter FROM %s WHERE system_name = 'T'", + hint, TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); JSONObject response = executeQuery(query); assertThat(getHits(response).length(), equalTo(3)); } private void innerMinusTMinusCNonExistentFieldFirstQuery(String hint) throws IOException { - String query = String.format("SELECT %s letter FROM %s WHERE system_name = 'T' " + - "MINUS " + - "SELECT letter FROM %s WHERE system_name = 'C'", - hint, TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); + String query = + String.format( + "SELECT %s letter FROM %s WHERE system_name = 'T' " + + "MINUS " + + "SELECT letter FROM %s WHERE system_name = 'C'", + hint, TestsConstants.TEST_INDEX_SYSTEM, TestsConstants.TEST_INDEX_SYSTEM); JSONObject response = executeQuery(query); assertThat(getHits(response).length(), equalTo(0)); diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/NestedFieldQueryIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/NestedFieldQueryIT.java index e568be0ae6..6e73a9366e 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/NestedFieldQueryIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/NestedFieldQueryIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.allOf; @@ -34,35 +33,28 @@ import org.opensearch.client.ResponseException; import org.opensearch.common.xcontent.LoggingDeprecationHandler; import org.opensearch.common.xcontent.json.JsonXContentParser; +import org.opensearch.core.rest.RestStatus; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.XContentParser; -import org.opensearch.core.rest.RestStatus; import org.opensearch.search.SearchHit; /** * Integration test cases for both rewriting and projection logic. - *

- * Test result: - * 1) SELECT * or any field or aggregate function on any field - * 2) WHERE single or multiple conditions on nested type - * 3) GROUP BY regular field and aggregate on any field - * 4) GROUP BY nested field and COUNT(*) - * 5) UNION/MINUS but only SELECT nested field - *

- * Does NOT support: - * 1) GROUP BY nested field and aggregate other than COUNT - * 2) UNION/MINUS and SELECT nested field (need to flatten during set computation) - * 3) JOIN (doesn't work if put alias before nested field name and may have similar problem as UNION/MINUS during computation) - * 4) Subquery - * 5) HAVING - * 6) Verification for conditions mixed with regular and nested fields + * + *

Test result: 1) SELECT * or any field or aggregate function on any field 2) WHERE single or + * multiple conditions on nested type 3) GROUP BY regular field and aggregate on any field 4) GROUP + * BY nested field and COUNT(*) 5) UNION/MINUS but only SELECT nested field + * + *

Does NOT support: 1) GROUP BY nested field and aggregate other than COUNT 2) UNION/MINUS and + * SELECT nested field (need to flatten during set computation) 3) JOIN (doesn't work if put alias + * before nested field name and may have similar problem as UNION/MINUS during computation) 4) + * Subquery 5) HAVING 6) Verification for conditions mixed with regular and nested fields */ public class NestedFieldQueryIT extends SQLIntegTestCase { private static final String FROM = "FROM " + TestsConstants.TEST_INDEX_NESTED_TYPE + " n, n.message m"; - @Override protected void init() throws Exception { loadIndex(Index.NESTED); @@ -83,188 +75,71 @@ private void queryAll(String sql) throws IOException { assertThat( query(sql), hits( - hit( - myNum(1), - someField("b"), - innerHits("message", - hit( - author("e"), - info("a") - ) - ) - ), - hit( - myNum(2), - someField("a"), - innerHits("message", - hit( - author("f"), - info("b") - ) - ) - ), - hit( - myNum(3), - someField("a"), - innerHits("message", - hit( - author("g"), - info("c") - ) - ) - ), + hit(myNum(1), someField("b"), innerHits("message", hit(author("e"), info("a")))), + hit(myNum(2), someField("a"), innerHits("message", hit(author("f"), info("b")))), + hit(myNum(3), someField("a"), innerHits("message", hit(author("g"), info("c")))), hit( myNum(4), someField("b"), - innerHits("message", - hit( - author("h"), - info("c") - ), - hit( - author("i"), - info("a") - ) - ) - ), + innerHits("message", hit(author("h"), info("c")), hit(author("i"), info("a")))), hit( myNum(new int[] {3, 4}), someField("a"), - innerHits("message", - hit( - author("zz"), - info("zz") - ) - ) - ) - ) - ); + innerHits("message", hit(author("zz"), info("zz")))))); } @Test public void singleCondition() throws IOException { assertThat( - query( - "SELECT myNum, m.author, m.info", - "WHERE m.info = 'c'" - ), + query("SELECT myNum, m.author, m.info", "WHERE m.info = 'c'"), hits( - hit( - myNum(3), - innerHits("message", - hit( - author("g"), - info("c") - ) - ) - ), - hit( - myNum(4), - innerHits("message", - hit( - author("h"), - info("c") - ) - ) - ) - ) - ); + hit(myNum(3), innerHits("message", hit(author("g"), info("c")))), + hit(myNum(4), innerHits("message", hit(author("h"), info("c")))))); } @Test public void multipleConditionsOfNestedField() throws IOException { assertThat( - query( - "SELECT someField, m.author, m.info", - "WHERE m.info = 'c' AND m.author = 'h'" - ), - hits( - hit( - someField("b"), - innerHits("message", - hit( - author("h"), - info("c") - ) - ) - ) - ) - ); + query("SELECT someField, m.author, m.info", "WHERE m.info = 'c' AND m.author = 'h'"), + hits(hit(someField("b"), innerHits("message", hit(author("h"), info("c")))))); } @Test public void multipleConditionsOfNestedFieldNoMatch() throws IOException { assertThat( - query( - "SELECT someField, m.author, m.info", - "WHERE m.info = 'c' AND m.author = 'i'" - ), - hits() - ); + query("SELECT someField, m.author, m.info", "WHERE m.info = 'c' AND m.author = 'i'"), + hits()); } @Test public void multipleConditionsOfRegularAndNestedField() throws IOException { assertThat( - query( - "SELECT myNum, m.author, m.info", - "WHERE myNum = 3 AND m.info = 'c'" - ), - hits( - hit( - myNum(3), - innerHits("message", - hit( - author("g"), - info("c") - ) - ) - ) - ) - ); + query("SELECT myNum, m.author, m.info", "WHERE myNum = 3 AND m.info = 'c'"), + hits(hit(myNum(3), innerHits("message", hit(author("g"), info("c")))))); } @Test public void multipleConditionsOfRegularOrNestedField() throws IOException { assertThat( - query( - "SELECT myNum, m.author, m.info", - "WHERE myNum = 2 OR m.info = 'c'" - ), + query("SELECT myNum, m.author, m.info", "WHERE myNum = 2 OR m.info = 'c'"), hits( - hit( - myNum(2) - ), // Note: no inner hit here because of no match in nested field - hit( - myNum(3), - innerHits("message", - hit( - author("g"), - info("c") - ) - ) - ), - hit( - myNum(4), - innerHits("message", - hit( - author("h"), - info("c") - ) - ) - ) - ) - ); + hit(myNum(2)), // Note: no inner hit here because of no match in nested field + hit(myNum(3), innerHits("message", hit(author("g"), info("c")))), + hit(myNum(4), innerHits("message", hit(author("h"), info("c")))))); } @Test public void leftJoinSelectAll() throws IOException { - String sql = "SELECT * " + - "FROM opensearch-sql_test_index_employee_nested e " + - "LEFT JOIN e.projects p"; + String sql = + "SELECT * " + + "FROM opensearch-sql_test_index_employee_nested e " + + "LEFT JOIN e.projects p"; String explain = explainQuery(sql); - assertThat(explain, containsString("{\"bool\":{\"must_not\":[{\"nested\":{\"query\":" + - "{\"exists\":{\"field\":\"projects\",\"boost\":1.0}},\"path\":\"projects\"")); + assertThat( + explain, + containsString( + "{\"bool\":{\"must_not\":[{\"nested\":{\"query\":" + + "{\"exists\":{\"field\":\"projects\",\"boost\":1.0}},\"path\":\"projects\"")); assertThat(explain, containsString("\"_source\":{\"includes\":[\"projects.*\"")); @@ -274,42 +149,50 @@ public void leftJoinSelectAll() throws IOException { @Test public void leftJoinSpecificFields() throws IOException { - String sql = "SELECT e.name, p.name, p.started_year " + - "FROM opensearch-sql_test_index_employee_nested e " + - "LEFT JOIN e.projects p"; + String sql = + "SELECT e.name, p.name, p.started_year " + + "FROM opensearch-sql_test_index_employee_nested e " + + "LEFT JOIN e.projects p"; String explain = explainQuery(sql); - assertThat(explain, containsString("{\"bool\":{\"must_not\":[{\"nested\":{\"query\":" + - "{\"exists\":{\"field\":\"projects\",\"boost\":1.0}},\"path\":\"projects\"")); + assertThat( + explain, + containsString( + "{\"bool\":{\"must_not\":[{\"nested\":{\"query\":" + + "{\"exists\":{\"field\":\"projects\",\"boost\":1.0}},\"path\":\"projects\"")); assertThat(explain, containsString("\"_source\":{\"includes\":[\"name\"],")); - assertThat(explain, + assertThat( + explain, containsString("\"_source\":{\"includes\":[\"projects.name\",\"projects.started_year\"]")); JSONObject results = executeQuery(sql); Assert.assertThat(getTotalHits(results), equalTo(4)); } - @Ignore("Comma join in left join won't pass syntax check in new ANTLR parser. " - + "Ignore for now and require to change grammar too when we want to support this case.") + @Ignore( + "Comma join in left join won't pass syntax check in new ANTLR parser. " + + "Ignore for now and require to change grammar too when we want to support this case.") @Test public void leftJoinExceptionOnExtraNestedFields() throws IOException { - String sql = "SELECT * " + - "FROM opensearch-sql_test_index_employee_nested e " + - "LEFT JOIN e.projects p, e.comments c"; + String sql = + "SELECT * " + + "FROM opensearch-sql_test_index_employee_nested e " + + "LEFT JOIN e.projects p, e.comments c"; try { String explain = explainQuery(sql); Assert.fail("Expected ResponseException, but none was thrown"); } catch (ResponseException e) { - assertThat(e.getResponse().getStatusLine().getStatusCode(), + assertThat( + e.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.BAD_REQUEST.getStatus())); final String entity = TestUtils.getResponseBody(e.getResponse()); - assertThat(entity, + assertThat( + entity, containsString("only single nested field is allowed as right table for LEFT JOIN")); assertThat(entity, containsString("\"type\":\"verification_exception\"")); } } - @Test public void aggregationWithoutGroupBy() throws IOException { String sql = "SELECT AVG(m.dayOfWeek) AS avgDay " + FROM; @@ -317,7 +200,9 @@ public void aggregationWithoutGroupBy() throws IOException { JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "message.dayOfWeek@NESTED"); - Assert.assertThat(((BigDecimal) aggregation.query("/avgDay/value")).doubleValue(), closeTo(3.166666666, 0.01)); + Assert.assertThat( + ((BigDecimal) aggregation.query("/avgDay/value")).doubleValue(), + closeTo(3.166666666, 0.01)); } @Test @@ -351,39 +236,36 @@ public void groupByRegularFieldAndSum() throws IOException { Assert.assertNotNull(msgInfoBuckets); Assert.assertThat(msgInfoBuckets.length(), equalTo(2)); Assert.assertThat(msgInfoBuckets.query("/0/key"), equalTo("a")); - Assert.assertThat(((BigDecimal) msgInfoBuckets.query("/0/message.dayOfWeek@NESTED/sumDay/value")).doubleValue(), + Assert.assertThat( + ((BigDecimal) msgInfoBuckets.query("/0/message.dayOfWeek@NESTED/sumDay/value")) + .doubleValue(), closeTo(9.0, 0.01)); Assert.assertThat(msgInfoBuckets.query("/1/key"), equalTo("b")); - Assert.assertThat(((BigDecimal) msgInfoBuckets.query("/1/message.dayOfWeek@NESTED/sumDay/value")).doubleValue(), + Assert.assertThat( + ((BigDecimal) msgInfoBuckets.query("/1/message.dayOfWeek@NESTED/sumDay/value")) + .doubleValue(), closeTo(10.0, 0.01)); } @Test public void nestedFiledIsNotNull() throws IOException { - String sql = "SELECT e.name " + - "FROM opensearch-sql_test_index_employee_nested as e, e.projects as p " + - "WHERE p IS NOT NULL"; + String sql = + "SELECT e.name " + + "FROM opensearch-sql_test_index_employee_nested as e, e.projects as p " + + "WHERE p IS NOT NULL"; assertThat( executeQuery(sql), hitAll( kvString("/_source/name", Is.is("Bob Smith")), - kvString("/_source/name", Is.is("Jane Smith")) - ) - ); + kvString("/_source/name", Is.is("Jane Smith")))); } // Doesn't support: aggregate function other than COUNT() @SuppressWarnings("unused") public void groupByNestedFieldAndAvg() throws IOException { - query( - "SELECT m.info, AVG(m.dayOfWeek)", - "GROUP BY m.info" - ); - query( - "SELECT m.info, AVG(myNum)", - "GROUP BY m.info" - ); + query("SELECT m.info, AVG(m.dayOfWeek)", "GROUP BY m.info"); + query("SELECT m.info, AVG(myNum)", "GROUP BY m.info"); } @Test @@ -418,10 +300,11 @@ public void groupByNestedAndRegularField() throws IOException { @Test public void countAggWithoutWhere() throws IOException { - String sql = "SELECT e.name, COUNT(p) as c " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "GROUP BY e.name " + - "HAVING c > 1"; + String sql = + "SELECT e.name, COUNT(p) as c " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "GROUP BY e.name " + + "HAVING c > 1"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -437,11 +320,12 @@ public void countAggWithoutWhere() throws IOException { @Test public void countAggWithWhereOnParent() throws IOException { - String sql = "SELECT e.name, COUNT(p) as c " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE e.name like '%smith%' " + - "GROUP BY e.name " + - "HAVING c > 1"; + String sql = + "SELECT e.name, COUNT(p) as c " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE e.name like '%smith%' " + + "GROUP BY e.name " + + "HAVING c > 1"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -457,11 +341,12 @@ public void countAggWithWhereOnParent() throws IOException { @Test public void countAggWithWhereOnNested() throws IOException { - String sql = "SELECT e.name, COUNT(p) as c " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING c > 1"; + String sql = + "SELECT e.name, COUNT(p) as c " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING c > 1"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -477,11 +362,12 @@ public void countAggWithWhereOnNested() throws IOException { @Test public void countAggWithWhereOnParentOrNested() throws IOException { - String sql = "SELECT e.name, COUNT(p) as c " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE e.name like '%smith%' or p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING c > 1"; + String sql = + "SELECT e.name, COUNT(p) as c " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE e.name like '%smith%' or p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING c > 1"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -497,11 +383,12 @@ public void countAggWithWhereOnParentOrNested() throws IOException { @Test public void countAggWithWhereOnParentAndNested() throws IOException { - String sql = "SELECT e.name, COUNT(p) as c " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE e.name like '%smith%' AND p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING c > 1"; + String sql = + "SELECT e.name, COUNT(p) as c " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE e.name like '%smith%' AND p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING c > 1"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -517,11 +404,12 @@ public void countAggWithWhereOnParentAndNested() throws IOException { @Test public void countAggWithWhereOnNestedAndNested() throws IOException { - String sql = "SELECT e.name, COUNT(p) as c " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE p.started_year > 2000 AND p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING c > 0"; + String sql = + "SELECT e.name, COUNT(p) as c " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE p.started_year > 2000 AND p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING c > 0"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -537,11 +425,12 @@ public void countAggWithWhereOnNestedAndNested() throws IOException { @Test public void countAggWithWhereOnNestedOrNested() throws IOException { - String sql = "SELECT e.name, COUNT(p) as c " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE p.started_year > 2000 OR p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING c > 1"; + String sql = + "SELECT e.name, COUNT(p) as c " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE p.started_year > 2000 OR p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING c > 1"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -557,11 +446,12 @@ public void countAggWithWhereOnNestedOrNested() throws IOException { @Test public void countAggOnNestedInnerFieldWithoutWhere() throws IOException { - String sql = "SELECT e.name, COUNT(p.started_year) as count " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING count > 0"; + String sql = + "SELECT e.name, COUNT(p.started_year) as count " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING count > 0"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -581,10 +471,11 @@ public void countAggOnNestedInnerFieldWithoutWhere() throws IOException { @Test public void maxAggOnNestedInnerFieldWithoutWhere() throws IOException { - String sql = "SELECT e.name, MAX(p.started_year) as max " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE p.name LIKE '%security%' " + - "GROUP BY e.name"; + String sql = + "SELECT e.name, MAX(p.started_year) as max " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE p.name LIKE '%security%' " + + "GROUP BY e.name"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -594,20 +485,27 @@ public void maxAggOnNestedInnerFieldWithoutWhere() throws IOException { Assert.assertThat(bucket.length(), equalTo(2)); Assert.assertThat(bucket.query("/0/key"), equalTo("Bob Smith")); Assert.assertThat( - ((BigDecimal) bucket.query("/0/projects.started_year@NESTED/projects.started_year@FILTER/max/value")).doubleValue(), + ((BigDecimal) + bucket.query( + "/0/projects.started_year@NESTED/projects.started_year@FILTER/max/value")) + .doubleValue(), closeTo(2015.0, 0.01)); Assert.assertThat(bucket.query("/1/key"), equalTo("Jane Smith")); Assert.assertThat( - ((BigDecimal) bucket.query("/1/projects.started_year@NESTED/projects.started_year@FILTER/max/value")).doubleValue(), + ((BigDecimal) + bucket.query( + "/1/projects.started_year@NESTED/projects.started_year@FILTER/max/value")) + .doubleValue(), closeTo(2015.0, 0.01)); } @Test public void havingCountAggWithoutWhere() throws IOException { - String sql = "SELECT e.name " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "GROUP BY e.name " + - "HAVING COUNT(p) > 1"; + String sql = + "SELECT e.name " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "GROUP BY e.name " + + "HAVING COUNT(p) > 1"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -623,11 +521,12 @@ public void havingCountAggWithoutWhere() throws IOException { @Test public void havingCountAggWithWhereOnParent() throws IOException { - String sql = "SELECT e.name " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE e.name like '%smith%' " + - "GROUP BY e.name " + - "HAVING COUNT(p) > 1"; + String sql = + "SELECT e.name " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE e.name like '%smith%' " + + "GROUP BY e.name " + + "HAVING COUNT(p) > 1"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -643,11 +542,12 @@ public void havingCountAggWithWhereOnParent() throws IOException { @Test public void havingCountAggWithWhereOnNested() throws IOException { - String sql = "SELECT e.name " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING COUNT(p) > 1"; + String sql = + "SELECT e.name " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING COUNT(p) > 1"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -663,11 +563,12 @@ public void havingCountAggWithWhereOnNested() throws IOException { @Test public void havingCountAggWithWhereOnParentOrNested() throws IOException { - String sql = "SELECT e.name " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE e.name like '%smith%' or p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING COUNT(p) > 1"; + String sql = + "SELECT e.name " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE e.name like '%smith%' or p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING COUNT(p) > 1"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -683,11 +584,12 @@ public void havingCountAggWithWhereOnParentOrNested() throws IOException { @Test public void havingCountAggWithWhereOnParentAndNested() throws IOException { - String sql = "SELECT e.name " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE e.name like '%smith%' AND p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING COUNT(p) > 1"; + String sql = + "SELECT e.name " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE e.name like '%smith%' AND p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING COUNT(p) > 1"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -703,11 +605,12 @@ public void havingCountAggWithWhereOnParentAndNested() throws IOException { @Test public void havingCountAggWithWhereOnNestedAndNested() throws IOException { - String sql = "SELECT e.name " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE p.started_year > 2000 AND p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING COUNT(p) > 0"; + String sql = + "SELECT e.name " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE p.started_year > 2000 AND p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING COUNT(p) > 0"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -723,11 +626,12 @@ public void havingCountAggWithWhereOnNestedAndNested() throws IOException { @Test public void havingCountAggWithWhereOnNestedOrNested() throws IOException { - String sql = "SELECT e.name " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE p.started_year > 2000 OR p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING COUNT(p) > 1"; + String sql = + "SELECT e.name " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE p.started_year > 2000 OR p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING COUNT(p) > 1"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -743,11 +647,12 @@ public void havingCountAggWithWhereOnNestedOrNested() throws IOException { @Test public void havingCountAggOnNestedInnerFieldWithoutWhere() throws IOException { - String sql = "SELECT e.name " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING COUNT(p.started_year) > 0"; + String sql = + "SELECT e.name " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING COUNT(p.started_year) > 0"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -767,11 +672,12 @@ public void havingCountAggOnNestedInnerFieldWithoutWhere() throws IOException { @Test public void havingMaxAggOnNestedInnerFieldWithoutWhere() throws IOException { - String sql = "SELECT e.name " + - "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + - "WHERE p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING MAX(p.started_year) > 1990"; + String sql = + "SELECT e.name " + + "FROM opensearch-sql_test_index_employee_nested AS e, e.projects AS p " + + "WHERE p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING MAX(p.started_year) > 1990"; JSONObject result = executeQuery(sql); JSONObject aggregation = getAggregation(result, "name.keyword"); @@ -781,22 +687,28 @@ public void havingMaxAggOnNestedInnerFieldWithoutWhere() throws IOException { Assert.assertThat(bucket.length(), equalTo(2)); Assert.assertThat(bucket.query("/0/key"), equalTo("Bob Smith")); Assert.assertThat( - ((BigDecimal) bucket.query("/0/projects.started_year@NESTED/projects.started_year@FILTER/max_0/value")).doubleValue(), + ((BigDecimal) + bucket.query( + "/0/projects.started_year@NESTED/projects.started_year@FILTER/max_0/value")) + .doubleValue(), closeTo(2015.0, 0.01)); Assert.assertThat(bucket.query("/1/key"), equalTo("Jane Smith")); Assert.assertThat( - ((BigDecimal) bucket.query("/1/projects.started_year@NESTED/projects.started_year@FILTER/max_0/value")).doubleValue(), + ((BigDecimal) + bucket.query( + "/1/projects.started_year@NESTED/projects.started_year@FILTER/max_0/value")) + .doubleValue(), closeTo(2015.0, 0.01)); } /*********************************************************** - Matchers for Non-Aggregation Testing + * Matchers for Non-Aggregation Testing ***********************************************************/ @SafeVarargs private final Matcher hits(Matcher... subMatchers) { - return featureValueOf("hits", arrayContainingInAnyOrder(subMatchers), - resp -> resp.getHits().getHits()); + return featureValueOf( + "hits", arrayContainingInAnyOrder(subMatchers), resp -> resp.getHits().getHits()); } @SafeVarargs @@ -834,8 +746,7 @@ public boolean matches(Object item) { } @Override - public void describeTo(Description description) { - } + public void describeTo(Description description) {} }; } @@ -860,16 +771,15 @@ private final Matcher innerHits(String path, Matcher... in return featureValueOf( "innerHits", arrayContainingInAnyOrder(innerHitMatchers), - hit -> hit.getInnerHits().get(path).getHits() - ); + hit -> hit.getInnerHits().get(path).getHits()); } /*********************************************************** - Matchers for Aggregation Testing + * Matchers for Aggregation Testing ***********************************************************/ - private FeatureMatcher featureValueOf(String name, Matcher subMatcher, - Function getter) { + private FeatureMatcher featureValueOf( + String name, Matcher subMatcher, Function getter) { return new FeatureMatcher(subMatcher, name, name) { @Override protected U featureValueOf(T actual) { @@ -879,7 +789,7 @@ protected U featureValueOf(T actual) { } /*********************************************************** - Query Utility to Fetch Response for SQL + * Query Utility to Fetch Response for SQL ***********************************************************/ private SearchResponse query(String select, String... statements) throws IOException { @@ -889,10 +799,11 @@ private SearchResponse query(String select, String... statements) throws IOExcep private SearchResponse execute(String sql) throws IOException { final JSONObject jsonObject = executeQuery(sql); - final XContentParser parser = new JsonXContentParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - new JsonFactory().createParser(jsonObject.toString())); + final XContentParser parser = + new JsonXContentParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + new JsonFactory().createParser(jsonObject.toString())); return SearchResponse.fromXContent(parser); } @@ -904,5 +815,4 @@ private JSONObject getAggregation(final JSONObject queryResult, final String agg Assert.assertTrue(aggregations.has(aggregationName)); return aggregations.getJSONObject(aggregationName); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/ObjectFieldSelectIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/ObjectFieldSelectIT.java index ce781123d6..3a2f48d497 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/ObjectFieldSelectIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/ObjectFieldSelectIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_DEEP_NESTED; @@ -18,9 +17,8 @@ import org.opensearch.sql.legacy.utils.StringUtils; /** - * Integration test for OpenSearch object field (and nested field). - * This class is focused on simple SELECT-FROM query to ensure right column - * number and value is returned. + * Integration test for OpenSearch object field (and nested field). This class is focused on simple + * SELECT-FROM query to ensure right column number and value is returned. */ public class ObjectFieldSelectIT extends SQLIntegTestCase { @@ -36,33 +34,28 @@ public void testSelectObjectFieldItself() { verifySchema(response, schema("city", null, "object")); // Expect object field itself is returned in a single cell - verifyDataRows(response, - rows(new JSONObject( - "{\n" - + " \"name\": \"Seattle\",\n" - + " \"location\": {\"latitude\": 10.5}\n" - + "}") - ) - ); + verifyDataRows( + response, + rows( + new JSONObject( + "{\n" + + " \"name\": \"Seattle\",\n" + + " \"location\": {\"latitude\": 10.5}\n" + + "}"))); } @Test public void testSelectObjectInnerFields() { - JSONObject response = new JSONObject(query( - "SELECT city.location, city.location.latitude FROM %s")); + JSONObject response = + new JSONObject(query("SELECT city.location, city.location.latitude FROM %s")); - verifySchema(response, + verifySchema( + response, schema("city.location", null, "object"), - schema("city.location.latitude", null, "double") - ); + schema("city.location.latitude", null, "double")); // Expect inner regular or object field returned in its single cell - verifyDataRows(response, - rows( - new JSONObject("{\"latitude\": 10.5}"), - 10.5 - ) - ); + verifyDataRows(response, rows(new JSONObject("{\"latitude\": 10.5}"), 10.5)); } @Test @@ -72,15 +65,15 @@ public void testSelectNestedFieldItself() { verifySchema(response, schema("projects", null, "nested")); // Expect nested field itself is returned in a single cell - verifyDataRows(response, - rows(new JSONArray( - "[\n" - + " {\"name\": \"AWS Redshift Spectrum querying\"},\n" - + " {\"name\": \"AWS Redshift security\"},\n" - + " {\"name\": \"AWS Aurora security\"}\n" - + "]") - ) - ); + verifyDataRows( + response, + rows( + new JSONArray( + "[\n" + + " {\"name\": \"AWS Redshift Spectrum querying\"},\n" + + " {\"name\": \"AWS Redshift security\"},\n" + + " {\"name\": \"AWS Aurora security\"}\n" + + "]"))); } @Test @@ -100,10 +93,6 @@ public void testSelectObjectFieldOfArrayValuesInnerFields() { } private String query(String sql) { - return executeQuery( - StringUtils.format(sql, TEST_INDEX_DEEP_NESTED), - "jdbc" - ); + return executeQuery(StringUtils.format(sql, TEST_INDEX_DEEP_NESTED), "jdbc"); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/OpenSearchSQLRestTestCase.java b/integ-test/src/test/java/org/opensearch/sql/legacy/OpenSearchSQLRestTestCase.java index b5e21ad475..5e0d768e3b 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/OpenSearchSQLRestTestCase.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/OpenSearchSQLRestTestCase.java @@ -3,9 +3,10 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; +import static java.util.Collections.unmodifiableList; + import java.io.IOException; import java.util.ArrayList; import java.util.List; @@ -36,11 +37,10 @@ import org.opensearch.common.util.io.IOUtils; import org.opensearch.test.rest.OpenSearchRestTestCase; -import static java.util.Collections.unmodifiableList; - /** - * OpenSearch SQL integration test base class to support both security disabled and enabled OpenSearch cluster. - * Allows interaction with multiple external test clusters using OpenSearch's {@link RestClient}. + * OpenSearch SQL integration test base class to support both security disabled and enabled + * OpenSearch cluster. Allows interaction with multiple external test clusters using OpenSearch's + * {@link RestClient}. */ public abstract class OpenSearchSQLRestTestCase extends OpenSearchRestTestCase { @@ -49,17 +49,20 @@ public abstract class OpenSearchSQLRestTestCase extends OpenSearchRestTestCase { public static final String MATCH_ALL_REMOTE_CLUSTER = "*"; private static RestClient remoteClient; + /** - * A client for the running remote OpenSearch cluster configured to take test administrative actions - * like remove all indexes after the test completes + * A client for the running remote OpenSearch cluster configured to take test administrative + * actions like remove all indexes after the test completes */ private static RestClient remoteAdminClient; protected boolean isHttps() { - boolean isHttps = Optional.ofNullable(System.getProperty("https")) - .map("true"::equalsIgnoreCase).orElse(false); + boolean isHttps = + Optional.ofNullable(System.getProperty("https")) + .map("true"::equalsIgnoreCase) + .orElse(false); if (isHttps) { - //currently only external cluster is supported for security enabled testing + // currently only external cluster is supported for security enabled testing if (!Optional.ofNullable(System.getProperty("tests.rest.cluster")).isPresent()) { throw new RuntimeException( "external cluster url should be provided for security enabled testing"); @@ -73,16 +76,14 @@ protected String getProtocol() { return isHttps() ? "https" : "http"; } - /** - * Get the client to remote cluster used for ordinary api calls while writing a test. - */ + /** Get the client to remote cluster used for ordinary api calls while writing a test. */ protected static RestClient remoteClient() { return remoteClient; } /** - * Get the client to remote cluster used for test administrative actions. - * Do not use this while writing a test. Only use it for cleaning up after tests. + * Get the client to remote cluster used for test administrative actions. Do not use this while + * writing a test. Only use it for cleaning up after tests. */ protected static RestClient remoteAdminClient() { return remoteAdminClient; @@ -124,9 +125,7 @@ public void initRemoteClient() throws IOException { assert remoteAdminClient != null; } - /** - * Get a comma delimited list of [host:port] to which to send REST requests. - */ + /** Get a comma delimited list of [host:port] to which to send REST requests. */ protected String getTestRestCluster(String clusterName) { String cluster = System.getProperty("tests.rest." + clusterName + ".http_hosts"); if (cluster == null) { @@ -134,15 +133,12 @@ protected String getTestRestCluster(String clusterName) { "Must specify [tests.rest." + clusterName + ".http_hosts] system property with a comma delimited list of [host:port] " - + "to which to send REST requests" - ); + + "to which to send REST requests"); } return cluster; } - /** - * Get a comma delimited list of [host:port] for connections between clusters. - */ + /** Get a comma delimited list of [host:port] for connections between clusters. */ protected String getTestTransportCluster(String clusterName) { String cluster = System.getProperty("tests.rest." + clusterName + ".transport_hosts"); if (cluster == null) { @@ -150,8 +146,7 @@ protected String getTestTransportCluster(String clusterName) { "Must specify [tests.rest." + clusterName + ".transport_hosts] system property with a comma delimited list of [host:port] " - + "for connections between clusters" - ); + + "for connections between clusters"); } return cluster; } @@ -176,7 +171,8 @@ protected static void wipeAllOpenSearchIndices() throws IOException { protected static void wipeAllOpenSearchIndices(RestClient client) throws IOException { // include all the indices, included hidden indices. // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-indices.html#cat-indices-api-query-params - Response response = client.performRequest(new Request("GET", "/_cat/indices?format=json&expand_wildcards=all")); + Response response = + client.performRequest(new Request("GET", "/_cat/indices?format=json&expand_wildcards=all")); JSONArray jsonArray = new JSONArray(EntityUtils.toString(response.getEntity(), "UTF-8")); for (Object object : jsonArray) { JSONObject jsonObject = (JSONObject) object; @@ -187,7 +183,8 @@ protected static void wipeAllOpenSearchIndices(RestClient client) throws IOExcep client.performRequest(new Request("DELETE", "/" + indexName)); } } catch (Exception e) { - // TODO: Ignore index delete error for now. Remove this if strict check on system index added above. + // TODO: Ignore index delete error for now. Remove this if strict check on system index + // added above. LOG.warn("Failed to delete index: " + indexName, e); } } @@ -202,30 +199,36 @@ protected static void configureHttpsClient(RestClientBuilder builder, Settings s defaultHeaders[i++] = new BasicHeader(entry.getKey(), entry.getValue()); } builder.setDefaultHeaders(defaultHeaders); - builder.setHttpClientConfigCallback(httpClientBuilder -> { - String userName = Optional.ofNullable(System.getProperty("user")) - .orElseThrow(() -> new RuntimeException("user name is missing")); - String password = Optional.ofNullable(System.getProperty("password")) - .orElseThrow(() -> new RuntimeException("password is missing")); - CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); - credentialsProvider - .setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(userName, password)); - try { - return httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider) - //disable the certificate since our testing cluster just uses the default security configuration - .setSSLHostnameVerifier(NoopHostnameVerifier.INSTANCE) - .setSSLContext(SSLContextBuilder.create() - .loadTrustMaterial(null, (chains, authType) -> true) - .build()); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); + builder.setHttpClientConfigCallback( + httpClientBuilder -> { + String userName = + Optional.ofNullable(System.getProperty("user")) + .orElseThrow(() -> new RuntimeException("user name is missing")); + String password = + Optional.ofNullable(System.getProperty("password")) + .orElseThrow(() -> new RuntimeException("password is missing")); + CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); + credentialsProvider.setCredentials( + AuthScope.ANY, new UsernamePasswordCredentials(userName, password)); + try { + return httpClientBuilder + .setDefaultCredentialsProvider(credentialsProvider) + // disable the certificate since our testing cluster just uses the default security + // configuration + .setSSLHostnameVerifier(NoopHostnameVerifier.INSTANCE) + .setSSLContext( + SSLContextBuilder.create() + .loadTrustMaterial(null, (chains, authType) -> true) + .build()); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); final String socketTimeoutString = settings.get(CLIENT_SOCKET_TIMEOUT); final TimeValue socketTimeout = - TimeValue.parseTimeValue(socketTimeoutString == null ? "60s" : socketTimeoutString, - CLIENT_SOCKET_TIMEOUT); + TimeValue.parseTimeValue( + socketTimeoutString == null ? "60s" : socketTimeoutString, CLIENT_SOCKET_TIMEOUT); builder.setRequestConfigCallback( conf -> conf.setSocketTimeout(Math.toIntExact(socketTimeout.getMillis()))); if (settings.hasValue(CLIENT_PATH_PREFIX)) { @@ -234,18 +237,19 @@ protected static void configureHttpsClient(RestClientBuilder builder, Settings s } /** - * Initialize rest client to remote cluster, - * and create a connection to it from the coordinating cluster. + * Initialize rest client to remote cluster, and create a connection to it from the coordinating + * cluster. */ public void configureMultiClusters() throws IOException { initRemoteClient(); Request connectionRequest = new Request("PUT", "_cluster/settings"); - String connectionSetting = "{\"persistent\": {\"cluster\": {\"remote\": {\"" - + REMOTE_CLUSTER - + "\": {\"seeds\": [\"" - + getTestTransportCluster(REMOTE_CLUSTER).split(",")[0] - + "\"]}}}}}"; + String connectionSetting = + "{\"persistent\": {\"cluster\": {\"remote\": {\"" + + REMOTE_CLUSTER + + "\": {\"seeds\": [\"" + + getTestTransportCluster(REMOTE_CLUSTER).split(",")[0] + + "\"]}}}}}"; connectionRequest.setJsonEntity(connectionSetting); adminClient().performRequest(connectionRequest); } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/OrderIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/OrderIT.java index c8b4b87f69..3e0191c009 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/OrderIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/OrderIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.equalTo; @@ -76,17 +75,17 @@ public void orderByIsNull() throws IOException { assertThat(query(hits, "/0/_source/id"), equalTo("5")); // Another equivalent syntax - assertThat(explainQuery("SELECT * FROM opensearch-sql_test_index_order " + - "ORDER BY id IS NULL, id DESC"), - equalTo(explainQuery("SELECT * FROM opensearch-sql_test_index_order " + - "ORDER BY id IS NULL DESC")) - ); + assertThat( + explainQuery( + "SELECT * FROM opensearch-sql_test_index_order " + "ORDER BY id IS NULL, id DESC"), + equalTo( + explainQuery( + "SELECT * FROM opensearch-sql_test_index_order " + "ORDER BY id IS NULL DESC"))); } @Test public void orderByIsNotNull() throws IOException { - String query = - "SELECT id, name FROM opensearch-sql_test_index_order ORDER BY name IS NOT NULL"; + String query = "SELECT id, name FROM opensearch-sql_test_index_order ORDER BY name IS NOT NULL"; JSONArray result = getSortExplain(query); assertThat(1, equalTo(result.length())); assertThat(query(result, "/0/name.keyword/order"), equalTo("asc")); @@ -95,21 +94,24 @@ public void orderByIsNotNull() throws IOException { JSONObject response = executeQuery(query); JSONArray hits = getHits(response); assertFalse(hits.getJSONObject(0).getJSONObject("_source").has("name")); - assertThat(hits.getJSONObject(hits.length() - 1).query("/_source/name").toString(), - equalTo("f")); + assertThat( + hits.getJSONObject(hits.length() - 1).query("/_source/name").toString(), equalTo("f")); // Another equivalent syntax - assertThat(explainQuery("SELECT id, name FROM opensearch-sql_test_index_order " + - "ORDER BY name IS NOT NULL"), - equalTo(explainQuery("SELECT id, name FROM opensearch-sql_test_index_order " + - "ORDER BY name IS NOT NULL ASC")) - ); + assertThat( + explainQuery( + "SELECT id, name FROM opensearch-sql_test_index_order " + "ORDER BY name IS NOT NULL"), + equalTo( + explainQuery( + "SELECT id, name FROM opensearch-sql_test_index_order " + + "ORDER BY name IS NOT NULL ASC"))); } @Test public void multipleOrderByWithNulls() throws IOException { String query = - "SELECT id, name FROM opensearch-sql_test_index_order ORDER BY id IS NULL, name IS NOT NULL"; + "SELECT id, name FROM opensearch-sql_test_index_order ORDER BY id IS NULL, name IS NOT" + + " NULL"; JSONArray result = getSortExplain(query); assertThat(result.length(), equalTo(2)); assertThat(query(result, "/0/id/missing"), equalTo("_last")); @@ -118,8 +120,9 @@ public void multipleOrderByWithNulls() throws IOException { @Test public void testOrderByMergeForSameField() throws IOException { - String query = "SELECT * FROM opensearch-sql_test_index_order " + - "ORDER BY id IS NULL, name DESC, id DESC, id IS NOT NULL, name IS NULL"; + String query = + "SELECT * FROM opensearch-sql_test_index_order " + + "ORDER BY id IS NULL, name DESC, id DESC, id IS NOT NULL, name IS NULL"; JSONArray result = getSortExplain(query); assertThat(2, equalTo(result.length())); assertThat(query(result, "/0/id/order"), equalTo("asc")); diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/OrdinalAliasRewriterIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/OrdinalAliasRewriterIT.java index ecec5844be..caea2aa7c6 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/OrdinalAliasRewriterIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/OrdinalAliasRewriterIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.equalTo; @@ -23,125 +22,191 @@ protected void init() throws Exception { // tests query results with jdbc output @Test public void simpleGroupByOrdinal() { - String expected = executeQuery(StringUtils.format( - "SELECT lastname FROM %s AS b GROUP BY lastname LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc"); - String actual = executeQuery(StringUtils.format( - "SELECT lastname FROM %s AS b GROUP BY 1 LIMIT 3", TestsConstants.TEST_INDEX_ACCOUNT), - "jdbc"); + String expected = + executeQuery( + StringUtils.format( + "SELECT lastname FROM %s AS b GROUP BY lastname LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); + String actual = + executeQuery( + StringUtils.format( + "SELECT lastname FROM %s AS b GROUP BY 1 LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); assertThat(actual, equalTo(expected)); } @Test public void multipleGroupByOrdinal() { - String expected = executeQuery(StringUtils.format( - "SELECT lastname, firstname, age FROM %s AS b GROUP BY firstname, age, lastname LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc"); - String actual = executeQuery(StringUtils.format( - "SELECT lastname, firstname, age FROM %s AS b GROUP BY 2, 3, 1 LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc"); + String expected = + executeQuery( + StringUtils.format( + "SELECT lastname, firstname, age FROM %s AS b GROUP BY firstname, age, lastname" + + " LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); + String actual = + executeQuery( + StringUtils.format( + "SELECT lastname, firstname, age FROM %s AS b GROUP BY 2, 3, 1 LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); assertThat(actual, equalTo(expected)); } @Test public void selectFieldiWithBacticksGroupByOrdinal() { - String expected = executeQuery(StringUtils.format( - "SELECT `lastname` FROM %s AS b GROUP BY `lastname` LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc"); - String actual = executeQuery(StringUtils.format( - "SELECT `lastname` FROM %s AS b GROUP BY 1 LIMIT 3", TestsConstants.TEST_INDEX_ACCOUNT), - "jdbc"); + String expected = + executeQuery( + StringUtils.format( + "SELECT `lastname` FROM %s AS b GROUP BY `lastname` LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); + String actual = + executeQuery( + StringUtils.format( + "SELECT `lastname` FROM %s AS b GROUP BY 1 LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); assertThat(actual, equalTo(expected)); } @Test public void selectFieldiWithBacticksAndTableAliasGroupByOrdinal() { - String expected = executeQuery(StringUtils.format( - "SELECT `b`.`lastname`, `age`, firstname FROM %s AS b GROUP BY `age`, `b`.`lastname` , firstname LIMIT 10", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc"); - String actual = executeQuery(StringUtils.format( - "SELECT `b`.`lastname`, `age`, firstname FROM %s AS b GROUP BY 2, 1, 3 LIMIT 10", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc"); + String expected = + executeQuery( + StringUtils.format( + "SELECT `b`.`lastname`, `age`, firstname FROM %s AS b GROUP BY `age`," + + " `b`.`lastname` , firstname LIMIT 10", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); + String actual = + executeQuery( + StringUtils.format( + "SELECT `b`.`lastname`, `age`, firstname FROM %s AS b GROUP BY 2, 1, 3 LIMIT 10", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); assertThat(actual, equalTo(expected)); } @Test public void simpleOrderByOrdinal() { - String expected = executeQuery(StringUtils.format( - "SELECT lastname FROM %s AS b ORDER BY lastname LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc"); - String actual = executeQuery(StringUtils.format( - "SELECT lastname FROM %s AS b ORDER BY 1 LIMIT 3", TestsConstants.TEST_INDEX_ACCOUNT), - "jdbc"); + String expected = + executeQuery( + StringUtils.format( + "SELECT lastname FROM %s AS b ORDER BY lastname LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); + String actual = + executeQuery( + StringUtils.format( + "SELECT lastname FROM %s AS b ORDER BY 1 LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); assertThat(actual, equalTo(expected)); } @Test public void multipleOrderByOrdinal() { - String expected = executeQuery(StringUtils.format( - "SELECT lastname, firstname, age FROM %s AS b ORDER BY firstname, age, lastname LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc"); - String actual = executeQuery(StringUtils.format( - "SELECT lastname, firstname, age FROM %s AS b ORDER BY 2, 3, 1 LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc"); + String expected = + executeQuery( + StringUtils.format( + "SELECT lastname, firstname, age FROM %s AS b ORDER BY firstname, age, lastname" + + " LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); + String actual = + executeQuery( + StringUtils.format( + "SELECT lastname, firstname, age FROM %s AS b ORDER BY 2, 3, 1 LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); assertThat(actual, equalTo(expected)); } @Test public void selectFieldiWithBacticksOrderByOrdinal() { - String expected = executeQuery(StringUtils.format( - "SELECT `lastname` FROM %s AS b ORDER BY `lastname` LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc"); - String actual = executeQuery(StringUtils.format( - "SELECT `lastname` FROM %s AS b ORDER BY 1 LIMIT 3", TestsConstants.TEST_INDEX_ACCOUNT), - "jdbc"); + String expected = + executeQuery( + StringUtils.format( + "SELECT `lastname` FROM %s AS b ORDER BY `lastname` LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); + String actual = + executeQuery( + StringUtils.format( + "SELECT `lastname` FROM %s AS b ORDER BY 1 LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); assertThat(actual, equalTo(expected)); } @Test public void selectFieldiWithBacticksAndTableAliasOrderByOrdinal() { - String expected = executeQuery(StringUtils.format( - "SELECT `b`.`lastname` FROM %s AS b ORDER BY `b`.`lastname` LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc"); - String actual = executeQuery(StringUtils.format( - "SELECT `b`.`lastname` FROM %s AS b ORDER BY 1 LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc"); + String expected = + executeQuery( + StringUtils.format( + "SELECT `b`.`lastname` FROM %s AS b ORDER BY `b`.`lastname` LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); + String actual = + executeQuery( + StringUtils.format( + "SELECT `b`.`lastname` FROM %s AS b ORDER BY 1 LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); assertThat(actual, equalTo(expected)); } // ORDER BY IS NULL/NOT NULL @Test public void selectFieldiWithBacticksAndTableAliasOrderByOrdinalAndNull() { - String expected = executeQuery(StringUtils.format( - "SELECT `b`.`lastname`, age FROM %s AS b ORDER BY `b`.`lastname` IS NOT NULL DESC, age is NULL LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc"); - String actual = executeQuery(StringUtils.format( - "SELECT `b`.`lastname`, age FROM %s AS b ORDER BY 1 IS NOT NULL DESC, 2 IS NULL LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc"); + String expected = + executeQuery( + StringUtils.format( + "SELECT `b`.`lastname`, age FROM %s AS b ORDER BY `b`.`lastname` IS NOT NULL DESC," + + " age is NULL LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); + String actual = + executeQuery( + StringUtils.format( + "SELECT `b`.`lastname`, age FROM %s AS b ORDER BY 1 IS NOT NULL DESC, 2 IS NULL" + + " LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc"); assertThat(actual, equalTo(expected)); } - // explain @Test public void explainSelectFieldiWithBacticksAndTableAliasGroupByOrdinal() throws IOException { - String expected = explainQuery(StringUtils.format( - "SELECT `b`.`lastname` FROM %s AS b GROUP BY `b`.`lastname` LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT)); - String actual = explainQuery(StringUtils.format( - "SELECT `b`.`lastname` FROM %s AS b GROUP BY 1 LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT)); + String expected = + explainQuery( + StringUtils.format( + "SELECT `b`.`lastname` FROM %s AS b GROUP BY `b`.`lastname` LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT)); + String actual = + explainQuery( + StringUtils.format( + "SELECT `b`.`lastname` FROM %s AS b GROUP BY 1 LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT)); assertThat(actual, equalTo(expected)); } @Test public void explainSelectFieldiWithBacticksAndTableAliasOrderByOrdinal() throws IOException { - String expected = explainQuery(StringUtils.format( - "SELECT `b`.`lastname` FROM %s AS b ORDER BY `b`.`lastname` LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT)); - String actual = explainQuery(StringUtils.format( - "SELECT `b`.`lastname` FROM %s AS b ORDER BY 1 LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT)); + String expected = + explainQuery( + StringUtils.format( + "SELECT `b`.`lastname` FROM %s AS b ORDER BY `b`.`lastname` LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT)); + String actual = + explainQuery( + StringUtils.format( + "SELECT `b`.`lastname` FROM %s AS b ORDER BY 1 LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT)); assertThat(actual, equalTo(expected)); } @@ -149,12 +214,18 @@ public void explainSelectFieldiWithBacticksAndTableAliasOrderByOrdinal() throws @Ignore("only work for legacy engine") public void explainSelectFieldiWithBacticksAndTableAliasOrderByOrdinalAndNull() throws IOException { - String expected = explainQuery(StringUtils.format( - "SELECT `b`.`lastname`, age FROM %s AS b ORDER BY `b`.`lastname` IS NOT NULL DESC, age is NULL LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT)); - String actual = explainQuery(StringUtils.format( - "SELECT `b`.`lastname`, age FROM %s AS b ORDER BY 1 IS NOT NULL DESC, 2 IS NULL LIMIT 3", - TestsConstants.TEST_INDEX_ACCOUNT)); + String expected = + explainQuery( + StringUtils.format( + "SELECT `b`.`lastname`, age FROM %s AS b ORDER BY `b`.`lastname` IS NOT NULL DESC," + + " age is NULL LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT)); + String actual = + explainQuery( + StringUtils.format( + "SELECT `b`.`lastname`, age FROM %s AS b ORDER BY 1 IS NOT NULL DESC, 2 IS NULL" + + " LIMIT 3", + TestsConstants.TEST_INDEX_ACCOUNT)); assertThat(actual, equalTo(expected)); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/PluginIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/PluginIT.java index a0032e7e6a..0ea749a5bb 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/PluginIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/PluginIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.equalTo; @@ -33,8 +32,9 @@ protected void init() throws Exception { public void sqlEnableSettingsTest() throws IOException { loadIndex(Index.ACCOUNT); updateClusterSettings(new ClusterSetting(PERSISTENT, "plugins.sql.enabled", "true")); - String query = String - .format(Locale.ROOT, "SELECT firstname FROM %s WHERE account_number=1", TEST_INDEX_ACCOUNT); + String query = + String.format( + Locale.ROOT, "SELECT firstname FROM %s WHERE account_number=1", TEST_INDEX_ACCOUNT); JSONObject queryResult = executeQuery(query); assertThat(getHits(queryResult).length(), equalTo(1)); @@ -50,16 +50,19 @@ public void sqlEnableSettingsTest() throws IOException { assertThat(queryResult.getInt("status"), equalTo(400)); JSONObject error = queryResult.getJSONObject("error"); assertThat(error.getString("reason"), equalTo("Invalid SQL query")); - assertThat(error.getString("details"), equalTo( - "Either plugins.sql.enabled or rest.action.multi.allow_explicit_index setting is false")); + assertThat( + error.getString("details"), + equalTo( + "Either plugins.sql.enabled or rest.action.multi.allow_explicit_index setting is" + + " false")); assertThat(error.getString("type"), equalTo("SQLFeatureDisabledException")); wipeAllClusterSettings(); } @Test public void sqlDeleteSettingsTest() throws IOException { - updateClusterSettings(new ClusterSetting(PERSISTENT, - Settings.Key.SQL_DELETE_ENABLED.getKeyValue(), "false")); + updateClusterSettings( + new ClusterSetting(PERSISTENT, Settings.Key.SQL_DELETE_ENABLED.getKeyValue(), "false")); String deleteQuery = StringUtils.format("DELETE FROM %s", TestsConstants.TEST_INDEX_ACCOUNT); final ResponseException exception = @@ -70,8 +73,8 @@ public void sqlDeleteSettingsTest() throws IOException { "{\n" + " \"error\": {\n" + " \"reason\": \"Invalid SQL query\",\n" - + " \"details\": \"DELETE clause is disabled by default and will be deprecated. Using " - + "the plugins.sql.delete.enabled setting to enable it\",\n" + + " \"details\": \"DELETE clause is disabled by default and will be deprecated." + + " Using the plugins.sql.delete.enabled setting to enable it\",\n" + " \"type\": \"SQLFeatureDisabledException\"\n" + " },\n" + " \"status\": 400\n" @@ -84,329 +87,355 @@ public void sqlDeleteSettingsTest() throws IOException { @Test public void sqlTransientOnlySettingTest() throws IOException { // (1) compact form - String settings = "{" + - " \"transient\": {" + - " \"plugins.query.metrics.rolling_interval\": \"80\"" + - " }" + - "}"; + String settings = + "{" + + " \"transient\": {" + + " \"plugins.query.metrics.rolling_interval\": \"80\"" + + " }" + + "}"; JSONObject actual = updateViaSQLSettingsAPI(settings); - JSONObject expected = new JSONObject("{" + - " \"acknowledged\" : true," + - " \"persistent\" : { }," + - " \"transient\" : {" + - " \"plugins\" : {" + - " \"query\" : {" + - " \"metrics\" : {" + - " \"rolling_interval\" : \"80\"" + - " }" + - " }" + - " }" + - " }" + - "}"); + JSONObject expected = + new JSONObject( + "{" + + " \"acknowledged\" : true," + + " \"persistent\" : { }," + + " \"transient\" : {" + + " \"plugins\" : {" + + " \"query\" : {" + + " \"metrics\" : {" + + " \"rolling_interval\" : \"80\"" + + " }" + + " }" + + " }" + + " }" + + "}"); assertTrue(actual.similar(expected)); // (2) partial expanded form - settings = "{" + - " \"transient\": {" + - " \"plugins\" : {" + - " \"query\" : {" + - " \"metrics.rolling_interval\": \"75\"" + - " }" + - " }" + - " }" + - "}"; + settings = + "{" + + " \"transient\": {" + + " \"plugins\" : {" + + " \"query\" : {" + + " \"metrics.rolling_interval\": \"75\"" + + " }" + + " }" + + " }" + + "}"; actual = updateViaSQLSettingsAPI(settings); - expected = new JSONObject("{" + - " \"acknowledged\" : true," + - " \"persistent\" : { }," + - " \"transient\" : {" + - " \"plugins\" : {" + - " \"query\" : {" + - " \"metrics\" : {" + - " \"rolling_interval\" : \"75\"" + - " }" + - " }" + - " }" + - " }" + - "}"); + expected = + new JSONObject( + "{" + + " \"acknowledged\" : true," + + " \"persistent\" : { }," + + " \"transient\" : {" + + " \"plugins\" : {" + + " \"query\" : {" + + " \"metrics\" : {" + + " \"rolling_interval\" : \"75\"" + + " }" + + " }" + + " }" + + " }" + + "}"); assertTrue(actual.similar(expected)); - // (3) full expanded form - settings = "{" + - " \"transient\": {" + - " \"plugins\" : {" + - " \"query\" : {" + - " \"metrics\": {" + - " \"rolling_interval\": \"65\"" + - " }" + - " }" + - " }" + - " }" + - "}"; + settings = + "{" + + " \"transient\": {" + + " \"plugins\" : {" + + " \"query\" : {" + + " \"metrics\": {" + + " \"rolling_interval\": \"65\"" + + " }" + + " }" + + " }" + + " }" + + "}"; actual = updateViaSQLSettingsAPI(settings); - expected = new JSONObject("{" + - " \"acknowledged\" : true," + - " \"persistent\" : { }," + - " \"transient\" : {" + - " \"plugins\" : {" + - " \"query\" : {" + - " \"metrics\" : {" + - " \"rolling_interval\" : \"65\"" + - " }" + - " }" + - " }" + - " }" + - "}"); + expected = + new JSONObject( + "{" + + " \"acknowledged\" : true," + + " \"persistent\" : { }," + + " \"transient\" : {" + + " \"plugins\" : {" + + " \"query\" : {" + + " \"metrics\" : {" + + " \"rolling_interval\" : \"65\"" + + " }" + + " }" + + " }" + + " }" + + "}"); assertTrue(actual.similar(expected)); } @Test public void sqlPersistentOnlySettingTest() throws IOException { // (1) compact form - String settings = "{" + - " \"persistent\": {" + - " \"plugins.query.metrics.rolling_interval\": \"80\"" + - " }" + - "}"; + String settings = + "{" + + " \"persistent\": {" + + " \"plugins.query.metrics.rolling_interval\": \"80\"" + + " }" + + "}"; JSONObject actual = updateViaSQLSettingsAPI(settings); - JSONObject expected = new JSONObject("{" + - " \"acknowledged\" : true," + - " \"transient\" : { }," + - " \"persistent\" : {" + - " \"plugins\" : {" + - " \"query\" : {" + - " \"metrics\" : {" + - " \"rolling_interval\" : \"80\"" + - " }" + - " }" + - " }" + - " }" + - "}"); + JSONObject expected = + new JSONObject( + "{" + + " \"acknowledged\" : true," + + " \"transient\" : { }," + + " \"persistent\" : {" + + " \"plugins\" : {" + + " \"query\" : {" + + " \"metrics\" : {" + + " \"rolling_interval\" : \"80\"" + + " }" + + " }" + + " }" + + " }" + + "}"); assertTrue(actual.similar(expected)); // (2) partial expanded form - settings = "{" + - " \"persistent\": {" + - " \"plugins\" : {" + - " \"query\" : {" + - " \"metrics.rolling_interval\": \"75\"" + - " }" + - " }" + - " }" + - "}"; + settings = + "{" + + " \"persistent\": {" + + " \"plugins\" : {" + + " \"query\" : {" + + " \"metrics.rolling_interval\": \"75\"" + + " }" + + " }" + + " }" + + "}"; actual = updateViaSQLSettingsAPI(settings); - expected = new JSONObject("{" + - " \"acknowledged\" : true," + - " \"transient\" : { }," + - " \"persistent\" : {" + - " \"plugins\" : {" + - " \"query\" : {" + - " \"metrics\" : {" + - " \"rolling_interval\" : \"75\"" + - " }" + - " }" + - " }" + - " }" + - "}"); + expected = + new JSONObject( + "{" + + " \"acknowledged\" : true," + + " \"transient\" : { }," + + " \"persistent\" : {" + + " \"plugins\" : {" + + " \"query\" : {" + + " \"metrics\" : {" + + " \"rolling_interval\" : \"75\"" + + " }" + + " }" + + " }" + + " }" + + "}"); assertTrue(actual.similar(expected)); - // (3) full expanded form - settings = "{" + - " \"persistent\": {" + - " \"plugins\" : {" + - " \"query\" : {" + - " \"metrics\": {" + - " \"rolling_interval\": \"65\"" + - " }" + - " }" + - " }" + - " }" + - "}"; + settings = + "{" + + " \"persistent\": {" + + " \"plugins\" : {" + + " \"query\" : {" + + " \"metrics\": {" + + " \"rolling_interval\": \"65\"" + + " }" + + " }" + + " }" + + " }" + + "}"; actual = updateViaSQLSettingsAPI(settings); - expected = new JSONObject("{" + - " \"acknowledged\" : true," + - " \"transient\" : { }," + - " \"persistent\" : {" + - " \"plugins\" : {" + - " \"query\" : {" + - " \"metrics\" : {" + - " \"rolling_interval\" : \"65\"" + - " }" + - " }" + - " }" + - " }" + - "}"); + expected = + new JSONObject( + "{" + + " \"acknowledged\" : true," + + " \"transient\" : { }," + + " \"persistent\" : {" + + " \"plugins\" : {" + + " \"query\" : {" + + " \"metrics\" : {" + + " \"rolling_interval\" : \"65\"" + + " }" + + " }" + + " }" + + " }" + + "}"); assertTrue(actual.similar(expected)); } /** - * Both transient and persistent settings are applied for same settings. - * This is similar to _cluster/settings behavior + * Both transient and persistent settings are applied for same settings. This is similar to + * _cluster/settings behavior */ @Test public void sqlCombinedSettingTest() throws IOException { - String settings = "{" + - " \"transient\": {" + - " \"plugins.query.metrics.rolling_window\": \"3700\"" + - " }," + - " \"persistent\": {" + - " \"plugins.sql.slowlog\" : \"2\"" + - " }" + - "}"; + String settings = + "{" + + " \"transient\": {" + + " \"plugins.query.metrics.rolling_window\": \"3700\"" + + " }," + + " \"persistent\": {" + + " \"plugins.sql.slowlog\" : \"2\"" + + " }" + + "}"; JSONObject actual = updateViaSQLSettingsAPI(settings); - JSONObject expected = new JSONObject("{" + - " \"acknowledged\" : true," + - " \"persistent\" : {" + - " \"plugins\" : {" + - " \"sql\" : {" + - " \"slowlog\" : \"2\"" + - " }" + - " }" + - " }," + - " \"transient\" : {" + - " \"plugins\" : {" + - " \"query\" : {" + - " \"metrics\" : {" + - " \"rolling_window\" : \"3700\"" + - " }" + - " }" + - " }" + - " }" + - "}"); + JSONObject expected = + new JSONObject( + "{" + + " \"acknowledged\" : true," + + " \"persistent\" : {" + + " \"plugins\" : {" + + " \"sql\" : {" + + " \"slowlog\" : \"2\"" + + " }" + + " }" + + " }," + + " \"transient\" : {" + + " \"plugins\" : {" + + " \"query\" : {" + + " \"metrics\" : {" + + " \"rolling_window\" : \"3700\"" + + " }" + + " }" + + " }" + + " }" + + "}"); assertTrue(actual.similar(expected)); } - /** - * Ignore all non plugins.sql settings. - * Only settings starting with plugins.sql. are affected - */ + /** Ignore all non plugins.sql settings. Only settings starting with plugins.sql. are affected */ @Test public void ignoreNonSQLSettingsTest() throws IOException { - String settings = "{" + - " \"transient\": {" + - " \"plugins.query.metrics.rolling_window\": \"3700\"," + - " \"plugins.alerting.metrics.rolling_window\": \"3700\"," + - " \"search.max_buckets\": \"10000\"," + - " \"search.max_keep_alive\": \"24h\"" + - " }," + - " \"persistent\": {" + - " \"plugins.sql.slowlog\": \"2\"," + - " \"plugins.alerting.metrics.rolling_window\": \"3700\"," + - " \"thread_pool.analyze.queue_size\": \"16\"" + - " }" + - "}"; + String settings = + "{" + + " \"transient\": {" + + " \"plugins.query.metrics.rolling_window\": \"3700\"," + + " \"plugins.alerting.metrics.rolling_window\": \"3700\"," + + " \"search.max_buckets\": \"10000\"," + + " \"search.max_keep_alive\": \"24h\"" + + " }," + + " \"persistent\": {" + + " \"plugins.sql.slowlog\": \"2\"," + + " \"plugins.alerting.metrics.rolling_window\": \"3700\"," + + " \"thread_pool.analyze.queue_size\": \"16\"" + + " }" + + "}"; JSONObject actual = updateViaSQLSettingsAPI(settings); - JSONObject expected = new JSONObject("{" + - " \"acknowledged\" : true," + - " \"persistent\" : {" + - " \"plugins\" : {" + - " \"sql\" : {" + - " \"slowlog\" : \"2\"" + - " }" + - " }" + - " }," + - " \"transient\" : {" + - " \"plugins\" : {" + - " \"query\" : {" + - " \"metrics\" : {" + - " \"rolling_window\" : \"3700\"" + - " }" + - " }" + - " }" + - " }" + - "}"); + JSONObject expected = + new JSONObject( + "{" + + " \"acknowledged\" : true," + + " \"persistent\" : {" + + " \"plugins\" : {" + + " \"sql\" : {" + + " \"slowlog\" : \"2\"" + + " }" + + " }" + + " }," + + " \"transient\" : {" + + " \"plugins\" : {" + + " \"query\" : {" + + " \"metrics\" : {" + + " \"rolling_window\" : \"3700\"" + + " }" + + " }" + + " }" + + " }" + + "}"); assertTrue(actual.similar(expected)); } @Test public void ignoreNonTransientNonPersistentSettingsTest() throws IOException { - String settings = "{" + - " \"transient\": {" + - " \"plugins.query.metrics.rolling_window\": \"3700\"" + - " }," + - " \"persistent\": {" + - " \"plugins.sql.slowlog\": \"2\"" + - " }," + - " \"hello\": {" + - " \"world\" : {" + - " \"name\" : \"John Doe\"" + - " }" + - " }" + - "}"; + String settings = + "{" + + " \"transient\": {" + + " \"plugins.query.metrics.rolling_window\": \"3700\"" + + " }," + + " \"persistent\": {" + + " \"plugins.sql.slowlog\": \"2\"" + + " }," + + " \"hello\": {" + + " \"world\" : {" + + " \"name\" : \"John Doe\"" + + " }" + + " }" + + "}"; JSONObject actual = updateViaSQLSettingsAPI(settings); - JSONObject expected = new JSONObject("{" + - " \"acknowledged\" : true," + - " \"persistent\" : {" + - " \"plugins\" : {" + - " \"sql\" : {" + - " \"slowlog\" : \"2\"" + - " }" + - " }" + - " }," + - " \"transient\" : {" + - " \"plugins\" : {" + - " \"query\" : {" + - " \"metrics\" : {" + - " \"rolling_window\" : \"3700\"" + - " }" + - " }" + - " }" + - " }" + - "}"); + JSONObject expected = + new JSONObject( + "{" + + " \"acknowledged\" : true," + + " \"persistent\" : {" + + " \"plugins\" : {" + + " \"sql\" : {" + + " \"slowlog\" : \"2\"" + + " }" + + " }" + + " }," + + " \"transient\" : {" + + " \"plugins\" : {" + + " \"query\" : {" + + " \"metrics\" : {" + + " \"rolling_window\" : \"3700\"" + + " }" + + " }" + + " }" + + " }" + + "}"); assertTrue(actual.similar(expected)); } @Test public void sqlCombinedMixedSettingTest() throws IOException { - String settings = "{" + - " \"transient\": {" + - " \"plugins.query.metrics.rolling_window\": \"3700\"" + - " }," + - " \"persistent\": {" + - " \"plugins\": {" + - " \"sql\": {" + - " \"slowlog\": \"1\"" + - " }" + - " }" + - " }," + - " \"hello\": {" + - " \"world\": {" + - " \"city\": \"Seattle\"" + - " }" + - " }" + - "}"; + String settings = + "{" + + " \"transient\": {" + + " \"plugins.query.metrics.rolling_window\": \"3700\"" + + " }," + + " \"persistent\": {" + + " \"plugins\": {" + + " \"sql\": {" + + " \"slowlog\": \"1\"" + + " }" + + " }" + + " }," + + " \"hello\": {" + + " \"world\": {" + + " \"city\": \"Seattle\"" + + " }" + + " }" + + "}"; JSONObject actual = updateViaSQLSettingsAPI(settings); - JSONObject expected = new JSONObject("{" + - " \"acknowledged\" : true," + - " \"persistent\" : {" + - " \"plugins\" : {" + - " \"sql\" : {" + - " \"slowlog\" : \"1\"" + - " }" + - " }" + - " }," + - " \"transient\" : {" + - " \"plugins\" : {" + - " \"query\" : {" + - " \"metrics\" : {" + - " \"rolling_window\" : \"3700\"" + - " }" + - " }" + - " }" + - " }" + - "}"); + JSONObject expected = + new JSONObject( + "{" + + " \"acknowledged\" : true," + + " \"persistent\" : {" + + " \"plugins\" : {" + + " \"sql\" : {" + + " \"slowlog\" : \"1\"" + + " }" + + " }" + + " }," + + " \"transient\" : {" + + " \"plugins\" : {" + + " \"query\" : {" + + " \"metrics\" : {" + + " \"rolling_window\" : \"3700\"" + + " }" + + " }" + + " }" + + " }" + + "}"); assertTrue(actual.similar(expected)); } @Test public void nonRegisteredSQLSettingsThrowException() throws IOException { - String settings = "{" + - " \"transient\": {" + - " \"plugins.sql.query.state.city\": \"Seattle\"" + - " }" + - "}"; + String settings = + "{" + + " \"transient\": {" + + " \"plugins.sql.query.state.city\": \"Seattle\"" + + " }" + + "}"; JSONObject actual; Response response = null; @@ -421,8 +450,7 @@ public void nonRegisteredSQLSettingsThrowException() throws IOException { assertThat(actual.query("/error/type"), equalTo("illegal_argument_exception")); assertThat( actual.query("/error/reason"), - equalTo("transient setting [plugins.sql.query.state.city], not recognized") - ); + equalTo("transient setting [plugins.sql.query.state.city], not recognized")); } protected static JSONObject updateViaSQLSettingsAPI(String body) throws IOException { diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/PreparedStatementIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/PreparedStatementIT.java index 88f72d1907..dd177ec1f1 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/PreparedStatementIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/PreparedStatementIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import java.io.IOException; @@ -25,27 +24,34 @@ protected void init() throws Exception { public void testPreparedStatement() throws IOException { int ageToCompare = 35; - JSONObject response = executeRequest(String.format("{\n" + - " \"query\": \"SELECT * FROM %s WHERE age > ? AND state in (?, ?) LIMIT ?\",\n" + - " \"parameters\": [\n" + - " {\n" + - " \"type\": \"integer\",\n" + - " \"value\": \"" + ageToCompare + "\"\n" + - " },\n" + - " {\n" + - " \"type\": \"string\",\n" + - " \"value\": \"TN\"\n" + - " },\n" + - " {\n" + - " \"type\": \"string\",\n" + - " \"value\": \"UT\"\n" + - " },\n" + - " {\n" + - " \"type\": \"integer\",\n" + - " \"value\": \"20\"\n" + - " }\n" + - " ]\n" + - "}", TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeRequest( + String.format( + "{\n" + + " \"query\": \"SELECT * FROM %s WHERE age > ? AND state in (?, ?) LIMIT" + + " ?\",\n" + + " \"parameters\": [\n" + + " {\n" + + " \"type\": \"integer\",\n" + + " \"value\": \"" + + ageToCompare + + "\"\n" + + " },\n" + + " {\n" + + " \"type\": \"string\",\n" + + " \"value\": \"TN\"\n" + + " },\n" + + " {\n" + + " \"type\": \"string\",\n" + + " \"value\": \"UT\"\n" + + " },\n" + + " {\n" + + " \"type\": \"integer\",\n" + + " \"value\": \"20\"\n" + + " }\n" + + " ]\n" + + "}", + TestsConstants.TEST_INDEX_ACCOUNT)); Assert.assertTrue(response.has("hits")); Assert.assertTrue(response.getJSONObject("hits").has("hits")); @@ -58,23 +64,23 @@ public void testPreparedStatement() throws IOException { } } - /* currently the integ test case will fail if run using Intellj, have to run using gradle command - * because the integ test cluster created by IntellJ has http diabled, need to spend some time later to - * figure out how to configure the integ test cluster properly. Related online resources: - * https://discuss.elastic.co/t/http-enabled-with-OpenSearchIntegTestCase/102032 - * https://discuss.elastic.co/t/help-with-OpenSearchIntegTestCase/105245 - @Override - protected Collection> nodePlugins() { - return Arrays.asList(MockTcpTransportPlugin.class); - } + /* currently the integ test case will fail if run using Intellj, have to run using gradle command + * because the integ test cluster created by IntellJ has http diabled, need to spend some time later to + * figure out how to configure the integ test cluster properly. Related online resources: + * https://discuss.elastic.co/t/http-enabled-with-OpenSearchIntegTestCase/102032 + * https://discuss.elastic.co/t/help-with-OpenSearchIntegTestCase/105245 + @Override + protected Collection> nodePlugins() { + return Arrays.asList(MockTcpTransportPlugin.class); + } - @Override - protected Settings nodeSettings(int nodeOrdinal) { - return Settings.builder().put(super.nodeSettings(nodeOrdinal)) - // .put("node.mode", "network") - .put("http.enabled", true) - //.put("http.type", "netty4") - .build(); - } - */ + @Override + protected Settings nodeSettings(int nodeOrdinal) { + return Settings.builder().put(super.nodeSettings(nodeOrdinal)) + // .put("node.mode", "network") + .put("http.enabled", true) + //.put("http.type", "netty4") + .build(); + } + */ } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/PrettyFormatResponseIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/PrettyFormatResponseIT.java index ef80098df6..3247975a67 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/PrettyFormatResponseIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/PrettyFormatResponseIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static java.util.stream.Collectors.toSet; @@ -30,31 +29,39 @@ import org.opensearch.client.Request; /** - * PrettyFormatResponseIT will likely be excluding some of the tests written in PrettyFormatResponseTest since - * those tests were asserting on class objects directly. These updated tests will only be making assertions based - * on the REST response. - *

- * Any integ tests from PrettyFormatResponseTest that were excluded can perhaps later be changed and moved over - * to be unit tests. - *

- * Tests from original integ tests excluded: - * - noIndexType() - * - withIndexType() + * PrettyFormatResponseIT will likely be excluding some of the tests written in + * PrettyFormatResponseTest since those tests were asserting on class objects directly. These + * updated tests will only be making assertions based on the REST response. + * + *

Any integ tests from PrettyFormatResponseTest that were excluded can perhaps later be changed + * and moved over to be unit tests. + * + *

Tests from original integ tests excluded: - noIndexType() - withIndexType() */ public class PrettyFormatResponseIT extends SQLIntegTestCase { - private static final Set allAccountFields = Sets.newHashSet( - "account_number", "balance", "firstname", "lastname", "age", "gender", "address", "employer", - "email", "city", "state" - ); + private static final Set allAccountFields = + Sets.newHashSet( + "account_number", + "balance", + "firstname", + "lastname", + "age", + "gender", + "address", + "employer", + "email", + "city", + "state"); private static final Set regularFields = Sets.newHashSet("someField", "myNum"); - private static final Set messageFields = Sets.newHashSet( - "message.dayOfWeek", "message.info", "message.author"); + private static final Set messageFields = + Sets.newHashSet("message.dayOfWeek", "message.info", "message.author"); - private static final Set messageFieldsWithNestedFunction = Sets.newHashSet( - "nested(message.dayOfWeek)", "nested(message.info)", "nested(message.author)"); + private static final Set messageFieldsWithNestedFunction = + Sets.newHashSet( + "nested(message.dayOfWeek)", "nested(message.info)", "nested(message.author)"); private static final Set commentFields = Sets.newHashSet("comment.data", "comment.likes"); @@ -83,19 +90,20 @@ protected Request getSqlRequest(String request, boolean explain) { public void wrongIndexType() throws IOException { String type = "wrongType"; try { - executeQuery(String.format(Locale.ROOT, "SELECT * FROM %s/%s", - TestsConstants.TEST_INDEX_ACCOUNT, type)); + executeQuery( + String.format( + Locale.ROOT, "SELECT * FROM %s/%s", TestsConstants.TEST_INDEX_ACCOUNT, type)); } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), - is(String.format(Locale.ROOT, "Index type %s does not exist", type))); + assertThat( + e.getMessage(), is(String.format(Locale.ROOT, "Index type %s does not exist", type))); } } @Test public void selectAll() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format(Locale.ROOT, "SELECT * FROM %s", TestsConstants.TEST_INDEX_ACCOUNT)); // This also tests that .keyword fields are ignored when SELECT * is called assertContainsColumnsInAnyOrder(getSchema(response), allAccountFields); @@ -104,9 +112,12 @@ public void selectAll() throws IOException { @Test public void selectNames() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT firstname, lastname FROM %s", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT firstname, lastname FROM %s", + TestsConstants.TEST_INDEX_ACCOUNT)); assertContainsColumns(getSchema(response), nameFields); assertContainsData(getDataRows(response), nameFields); @@ -115,13 +126,15 @@ public void selectNames() throws IOException { @Ignore("Semantic analysis takes care of this") @Test public void selectWrongField() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT wrongField FROM %s", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, "SELECT wrongField FROM %s", TestsConstants.TEST_INDEX_ACCOUNT)); assertThat(getSchema(response).length(), equalTo(0)); - // DataRows object will still get populated with SearchHits but since wrongField is not available in the Map + // DataRows object will still get populated with SearchHits but since wrongField is not + // available in the Map // each row in the response will be empty // TODO Perhaps a code change should be made to format logic to ensure a // 'datarows' length of 0 in response for this case @@ -131,9 +144,12 @@ public void selectWrongField() throws IOException { @Test @Ignore("_score tested in V2 engine - @see org.opensearch.sql.sql.ScoreQueryIT") public void selectScore() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT _score FROM %s WHERE SCORE(match_phrase(phrase, 'brown fox'))", - TestsConstants.TEST_INDEX_PHRASE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT _score FROM %s WHERE SCORE(match_phrase(phrase, 'brown fox'))", + TestsConstants.TEST_INDEX_PHRASE)); List fields = Collections.singletonList("_score"); assertContainsColumns(getSchema(response), fields); @@ -142,14 +158,14 @@ public void selectScore() throws IOException { @Test public void selectAllFromNestedWithoutFieldInFrom() throws IOException { - assertNestedFieldQueryResultContainsColumnsAndData("SELECT * FROM %s", - regularFields, fields("message", "comment")); + assertNestedFieldQueryResultContainsColumnsAndData( + "SELECT * FROM %s", regularFields, fields("message", "comment")); } @Test public void selectAllFromNestedWithFieldInFrom() throws IOException { - assertNestedFieldQueryResultContainsColumnsAndData("SELECT * FROM %s e, e.message m", - regularFields, messageFields); + assertNestedFieldQueryResultContainsColumnsAndData( + "SELECT * FROM %s e, e.message m", regularFields, messageFields); } @Test @@ -161,29 +177,27 @@ public void selectAllFromNestedWithMultipleFieldsInFrom() throws IOException { @Test public void selectAllNestedFromNestedWithFieldInFrom() throws IOException { - assertNestedFieldQueryResultContainsColumnsAndData("SELECT m.* FROM %s e, e.message m", - messageFields); + assertNestedFieldQueryResultContainsColumnsAndData( + "SELECT m.* FROM %s e, e.message m", messageFields); } @Test public void selectSpecificRegularFieldAndAllFromNestedWithFieldInFrom() throws IOException { assertNestedFieldQueryResultContainsColumnsAndData( - "SELECT e.someField, m.* FROM %s e, e.message m", - fields("someField"), messageFields); + "SELECT e.someField, m.* FROM %s e, e.message m", fields("someField"), messageFields); } /** - * Execute the query against index with nested fields and assert result contains columns and data as expected. + * Execute the query against index with nested fields and assert result contains columns and data + * as expected. */ @SafeVarargs - private final void assertNestedFieldQueryResultContainsColumnsAndData(String query, - Set... expectedFieldNames) - throws IOException { + private final void assertNestedFieldQueryResultContainsColumnsAndData( + String query, Set... expectedFieldNames) throws IOException { JSONObject response = executeQuery(String.format(Locale.ROOT, query, TestsConstants.TEST_INDEX_NESTED_TYPE)); - Set allExpectedFieldNames = Stream.of(expectedFieldNames). - flatMap(Set::stream). - collect(toSet()); + Set allExpectedFieldNames = + Stream.of(expectedFieldNames).flatMap(Set::stream).collect(toSet()); assertContainsColumnsInAnyOrder(getSchema(response), allExpectedFieldNames); assertContainsData(getDataRows(response), allExpectedFieldNames); @@ -195,24 +209,31 @@ private Set fields(String... fieldNames) { @Test public void selectNestedFields() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT nested(message.info), someField FROM %s", - TestsConstants.TEST_INDEX_NESTED_TYPE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT nested(message.info), someField FROM %s", + TestsConstants.TEST_INDEX_NESTED_TYPE)); List fields = Arrays.asList("nested(message.info)", "someField"); assertContainsColumns(getSchema(response), fields); assertContainsData(getDataRows(response), fields); - // The nested test index being used contains 5 entries but one of them has an array of 2 message objects, so + // The nested test index being used contains 5 entries but one of them has an array of 2 message + // objects, so // we check to see if the amount of data rows is 6 since that is the result after flattening assertThat(getDataRows(response).length(), equalTo(6)); } @Test public void selectNestedFieldWithWildcard() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT nested(message.*) FROM %s", - TestsConstants.TEST_INDEX_NESTED_TYPE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT nested(message.*) FROM %s", + TestsConstants.TEST_INDEX_NESTED_TYPE)); assertContainsColumnsInAnyOrder(getSchema(response), messageFieldsWithNestedFunction); assertContainsData(getDataRows(response), messageFields); @@ -221,11 +242,13 @@ public void selectNestedFieldWithWildcard() throws IOException { @Test public void selectWithWhere() throws IOException { int balanceToCompare = 30000; - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT balance " + - "FROM %s " + - "WHERE balance > %d", - TestsConstants.TEST_INDEX_ACCOUNT, balanceToCompare)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT balance " + "FROM %s " + "WHERE balance > %d", + TestsConstants.TEST_INDEX_ACCOUNT, + balanceToCompare)); /* * Previously the DataRows map was used to check specific fields but the JDBC response for "datarows" is a @@ -243,9 +266,10 @@ public void selectWithWhere() throws IOException { @Test public void groupBySingleField() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s GROUP BY age", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, "SELECT * FROM %s GROUP BY age", TestsConstants.TEST_INDEX_ACCOUNT)); List fields = Collections.singletonList("age"); assertContainsColumns(getSchema(response), fields); @@ -254,9 +278,12 @@ public void groupBySingleField() throws IOException { @Test public void groupByMultipleFields() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s GROUP BY age, balance", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s GROUP BY age, balance", + TestsConstants.TEST_INDEX_ACCOUNT)); List fields = Arrays.asList("age", "balance"); assertContainsColumns(getSchema(response), fields); @@ -265,35 +292,42 @@ public void groupByMultipleFields() throws IOException { @Ignore("only work for legacy engine") public void testSizeAndTotal() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE balance > 30000 " + - "LIMIT 5", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + "FROM %s " + "WHERE balance > 30000 " + "LIMIT 5", + TestsConstants.TEST_INDEX_ACCOUNT)); JSONArray dataRows = getDataRows(response); assertThat(dataRows.length(), equalTo(5)); - // The value to compare to here was obtained by running the query in the plugin and looking at the SearchHits + // The value to compare to here was obtained by running the query in the plugin and looking at + // the SearchHits int totalHits = response.getInt("total"); assertThat(totalHits, equalTo(402)); } @Test public void testSizeWithGroupBy() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s GROUP BY age LIMIT 5", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s GROUP BY age LIMIT 5", + TestsConstants.TEST_INDEX_ACCOUNT)); assertThat(getDataRows(response).length(), equalTo(5)); } @Test public void aggregationFunctionInSelect() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT COUNT(*) FROM %s GROUP BY age", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT COUNT(*) FROM %s GROUP BY age", + TestsConstants.TEST_INDEX_ACCOUNT)); List fields = Arrays.asList("COUNT(*)"); assertContainsColumns(getSchema(response), fields); @@ -310,9 +344,12 @@ public void aggregationFunctionInSelect() throws IOException { @Ignore("In MySQL and our new engine, the original text in SELECT is used as final column name") @Test public void aggregationFunctionInSelectCaseCheck() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT count(*) FROM %s GROUP BY age", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT count(*) FROM %s GROUP BY age", + TestsConstants.TEST_INDEX_ACCOUNT)); List fields = Arrays.asList("COUNT(*)"); assertContainsColumns(getSchema(response), fields); @@ -328,9 +365,12 @@ public void aggregationFunctionInSelectCaseCheck() throws IOException { @Ignore("only work for legacy engine") public void aggregationFunctionInSelectWithAlias() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT COUNT(*) AS total FROM %s GROUP BY age", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT COUNT(*) AS total FROM %s GROUP BY age", + TestsConstants.TEST_INDEX_ACCOUNT)); List fields = Arrays.asList("total"); assertContainsColumns(getSchema(response), fields); @@ -346,8 +386,10 @@ public void aggregationFunctionInSelectWithAlias() throws IOException { @Test public void aggregationFunctionInSelectNoGroupBy() throws IOException { - JSONObject response = executeQuery(String.format(Locale.ROOT, "SELECT SUM(age) FROM %s", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, "SELECT SUM(age) FROM %s", TestsConstants.TEST_INDEX_ACCOUNT)); String ageSum = "SUM(age)"; assertContainsColumns(getSchema(response), Collections.singletonList(ageSum)); @@ -363,9 +405,12 @@ public void aggregationFunctionInSelectNoGroupBy() throws IOException { @Test public void multipleAggregationFunctionsInSelect() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT COUNT(*), AVG(age) FROM %s GROUP BY age", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT COUNT(*), AVG(age) FROM %s GROUP BY age", + TestsConstants.TEST_INDEX_ACCOUNT)); List fields = Arrays.asList("COUNT(*)", "AVG(age)"); assertContainsColumns(getSchema(response), fields); @@ -374,12 +419,12 @@ public void multipleAggregationFunctionsInSelect() throws IOException { @Test public void aggregationFunctionInHaving() throws IOException { - JSONObject response = executeQuery(String.format(Locale.ROOT, - "SELECT gender " + - "FROM %s " + - "GROUP BY gender " + - "HAVING count(*) > 500", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT gender " + "FROM %s " + "GROUP BY gender " + "HAVING count(*) > 500", + TestsConstants.TEST_INDEX_ACCOUNT)); String ageSum = "gender"; assertContainsColumns(getSchema(response), Collections.singletonList(ageSum)); @@ -390,20 +435,24 @@ public void aggregationFunctionInHaving() throws IOException { } /** - * This case doesn't seem to be supported by the plugin at the moment. - * Looks like the painless script of the inner function is put inside the aggregation function but - * this syntax may not be correct since it returns 0 which is the default value (since 0 is returned in - * cases like COUNT(wrongField) as well). + * This case doesn't seem to be supported by the plugin at the moment. Looks like the painless + * script of the inner function is put inside the aggregation function but this syntax may not be + * correct since it returns 0 which is the default value (since 0 is returned in cases like + * COUNT(wrongField) as well). */ -// @Test -// public void nestedAggregationFunctionInSelect() { -// String query = String.format(Locale.ROOT, "SELECT SUM(SQRT(age)) FROM age GROUP BY age", TEST_INDEX_ACCOUNT); -// } + // @Test + // public void nestedAggregationFunctionInSelect() { + // String query = String.format(Locale.ROOT, "SELECT SUM(SQRT(age)) FROM age GROUP BY age", + // TEST_INDEX_ACCOUNT); + // } @Test public void fieldsWithAlias() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT firstname AS first, age AS a FROM %s", - TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT firstname AS first, age AS a FROM %s", + TestsConstants.TEST_INDEX_ACCOUNT)); Map aliases = new HashMap<>(); aliases.put("firstname", "first"); @@ -414,25 +463,32 @@ public void fieldsWithAlias() throws IOException { @Test public void indexWithMissingFields() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT phrase, insert_time2 " + - "FROM %s " + - "WHERE match_phrase(phrase, 'brown fox')", - TestsConstants.TEST_INDEX_PHRASE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT phrase, insert_time2 " + + "FROM %s " + + "WHERE match_phrase(phrase, 'brown fox')", + TestsConstants.TEST_INDEX_PHRASE)); JSONArray dataRowEntry = getDataRows(response).getJSONArray(0); assertThat(dataRowEntry.length(), equalTo(2)); assertThat(dataRowEntry.get(0), equalTo("brown fox")); - assertThat(dataRowEntry.get(1), - equalTo(JSONObject.NULL)); // TODO See if this null check is failing + assertThat( + dataRowEntry.get(1), equalTo(JSONObject.NULL)); // TODO See if this null check is failing } @Test public void joinQuery() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT b1.balance, b1.age, b2.firstname " + - "FROM %s b1 JOIN %s b2 ON b1.age = b2.age", - TestsConstants.TEST_INDEX_ACCOUNT, TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT b1.balance, b1.age, b2.firstname " + + "FROM %s b1 JOIN %s b2 ON b1.age = b2.age", + TestsConstants.TEST_INDEX_ACCOUNT, + TestsConstants.TEST_INDEX_ACCOUNT)); List fields = Arrays.asList("b1.balance", "b1.age", "b2.firstname"); assertContainsColumns(getSchema(response), fields); @@ -441,9 +497,14 @@ public void joinQuery() throws IOException { @Test public void joinQueryWithAlias() throws IOException { - JSONObject response = executeQuery(String.format(Locale.ROOT, "SELECT b1.balance AS bal, " + - " b1.age AS age, b2.firstname AS name FROM %s b1 JOIN %s b2 ON b1.age = b2.age", - TestsConstants.TEST_INDEX_ACCOUNT, TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT b1.balance AS bal, b1.age AS age, b2.firstname AS name FROM %s b1 JOIN %s" + + " b2 ON b1.age = b2.age", + TestsConstants.TEST_INDEX_ACCOUNT, + TestsConstants.TEST_INDEX_ACCOUNT)); Map aliases = new HashMap<>(); aliases.put("b1.balance", "bal"); @@ -456,16 +517,20 @@ public void joinQueryWithAlias() throws IOException { @Test public void joinQueryWithObjectFieldInSelect() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT c.name.firstname, d.name.lastname " + - "FROM %s c JOIN %s d ON d.hname = c.house", - TestsConstants.TEST_INDEX_GAME_OF_THRONES, - TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT c.name.firstname, d.name.lastname " + + "FROM %s c JOIN %s d ON d.hname = c.house", + TestsConstants.TEST_INDEX_GAME_OF_THRONES, + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); List fields = Arrays.asList("c.name.firstname", "d.name.lastname"); assertContainsColumns(getSchema(response), fields); - // d.name.lastname is null here since entries with hname don't have a name.lastname entry, so only length is + // d.name.lastname is null here since entries with hname don't have a name.lastname entry, so + // only length is // checked JSONArray dataRows = getDataRows(response); assertThat(dataRows.length(), greaterThan(0)); @@ -476,10 +541,13 @@ public void joinQueryWithObjectFieldInSelect() throws IOException { @Test public void joinQuerySelectOnlyOnOneTable() throws Exception { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT b1.age " + - "FROM %s b1 JOIN %s b2 ON b1.firstname = b2.firstname", - TestsConstants.TEST_INDEX_ACCOUNT, TestsConstants.TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT b1.age " + "FROM %s b1 JOIN %s b2 ON b1.firstname = b2.firstname", + TestsConstants.TEST_INDEX_ACCOUNT, + TestsConstants.TEST_INDEX_ACCOUNT)); List fields = Collections.singletonList("b1.age"); assertContainsColumns(getSchema(response), fields); @@ -508,8 +576,12 @@ private void testFieldOrder(final String[] expectedFields, final Object[] expect throws IOException { final String fields = String.join(", ", expectedFields); - final String query = String.format(Locale.ROOT, "SELECT %s FROM %s " + - "WHERE email='amberduke@pyrami.com'", fields, TestsConstants.TEST_INDEX_ACCOUNT); + final String query = + String.format( + Locale.ROOT, + "SELECT %s FROM %s " + "WHERE email='amberduke@pyrami.com'", + fields, + TestsConstants.TEST_INDEX_ACCOUNT); final JSONObject result = executeQuery(query); for (int i = 0; i < expectedFields.length; ++i) { diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/PrettyFormatterIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/PrettyFormatterIT.java index 463a0bc6db..c81839a6e5 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/PrettyFormatterIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/PrettyFormatterIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.equalTo; @@ -29,16 +28,18 @@ protected void init() throws Exception { public void assertExplainPrettyFormatted() throws IOException { String query = StringUtils.format("SELECT firstname FROM %s", TEST_INDEX_ACCOUNT); - String notPrettyExplainOutputFilePath = TestUtils.getResourceFilePath( - "src/test/resources/expectedOutput/explainIT_format_not_pretty.json"); + String notPrettyExplainOutputFilePath = + TestUtils.getResourceFilePath( + "src/test/resources/expectedOutput/explainIT_format_not_pretty.json"); String notPrettyExplainOutput = Files.toString(new File(notPrettyExplainOutputFilePath), StandardCharsets.UTF_8); assertThat(executeExplainRequest(query, ""), equalTo(notPrettyExplainOutput)); assertThat(executeExplainRequest(query, "pretty=false"), equalTo(notPrettyExplainOutput)); - String prettyExplainOutputFilePath = TestUtils.getResourceFilePath( - "src/test/resources/expectedOutput/explainIT_format_pretty.json"); + String prettyExplainOutputFilePath = + TestUtils.getResourceFilePath( + "src/test/resources/expectedOutput/explainIT_format_pretty.json"); String prettyExplainOutput = Files.toString(new File(prettyExplainOutputFilePath), StandardCharsets.UTF_8); diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/QueryAnalysisIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/QueryAnalysisIT.java index 3a58b7ffc0..62a87d3bff 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/QueryAnalysisIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/QueryAnalysisIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.containsString; @@ -24,9 +23,7 @@ import org.opensearch.sql.legacy.exception.SqlFeatureNotImplementedException; import org.opensearch.sql.legacy.utils.StringUtils; -/** - * Integration test for syntax and semantic analysis against query by new ANTLR parser. - */ +/** Integration test for syntax and semantic analysis against query by new ANTLR parser. */ public class QueryAnalysisIT extends SQLIntegTestCase { @Override @@ -41,9 +38,7 @@ public void missingFromClauseShouldThrowSyntaxException() { @Test public void unsupportedOperatorShouldThrowSyntaxException() { - queryShouldThrowSyntaxException( - "SELECT * FROM opensearch-sql_test_index_bank WHERE age <=> 1" - ); + queryShouldThrowSyntaxException("SELECT * FROM opensearch-sql_test_index_bank WHERE age <=> 1"); } @Test @@ -51,8 +46,8 @@ public void nonExistingFieldNameShouldThrowSemanticException() { queryShouldThrowSemanticException( "SELECT * FROM opensearch-sql_test_index_bank WHERE balance1 = 1000", "Field [balance1] cannot be found or used here." - //"Did you mean [balance]?" - ); + // "Did you mean [balance]?" + ); } @Test @@ -60,16 +55,15 @@ public void nonExistingIndexAliasShouldThrowSemanticException() { queryShouldThrowSemanticException( "SELECT * FROM opensearch-sql_test_index_bank b WHERE a.balance = 1000", "Field [a.balance] cannot be found or used here." - //"Did you mean [b.balance]?" - ); + // "Did you mean [b.balance]?" + ); } @Test public void indexJoinNonNestedFieldShouldThrowSemanticException() { queryShouldThrowSemanticException( "SELECT * FROM opensearch-sql_test_index_bank b1, b1.firstname f1", - "Operator [JOIN] cannot work with [INDEX, KEYWORD]." - ); + "Operator [JOIN] cannot work with [INDEX, KEYWORD]."); } @Test @@ -77,8 +71,7 @@ public void scalarFunctionCallWithTypoInNameShouldThrowSemanticException() { queryShouldThrowSemanticException( "SELECT * FROM opensearch-sql_test_index_bank WHERE ABSa(age) = 1", "Function [ABSA] cannot be found or used here.", - "Did you mean [ABS]?" - ); + "Did you mean [ABS]?"); } @Test @@ -86,17 +79,16 @@ public void scalarFunctionCallWithWrongTypeArgumentShouldThrowSemanticException( queryShouldThrowSemanticException( "SELECT * FROM opensearch-sql_test_index_bank WHERE LOG(lastname) = 1", "Function [LOG] cannot work with [KEYWORD].", - "Usage: LOG(NUMBER T) -> DOUBLE or LOG(NUMBER T, NUMBER) -> DOUBLE" - ); + "Usage: LOG(NUMBER T) -> DOUBLE or LOG(NUMBER T, NUMBER) -> DOUBLE"); } @Test public void aggregateFunctionCallWithWrongNumberOfArgumentShouldThrowSemanticException() { queryShouldThrowSemanticException( - "SELECT city FROM opensearch-sql_test_index_bank GROUP BY city HAVING MAX(age, birthdate) > 1", + "SELECT city FROM opensearch-sql_test_index_bank GROUP BY city HAVING MAX(age, birthdate) >" + + " 1", "Function [MAX] cannot work with [INTEGER, DATE].", - "Usage: MAX(NUMBER T) -> T" - ); + "Usage: MAX(NUMBER T) -> T"); } @Test @@ -104,8 +96,7 @@ public void compareIntegerFieldWithBooleanShouldThrowSemanticException() { queryShouldThrowSemanticException( "SELECT * FROM opensearch-sql_test_index_bank b WHERE b.age IS FALSE", "Operator [IS] cannot work with [INTEGER, BOOLEAN].", - "Usage: Please use compatible types from each side." - ); + "Usage: Please use compatible types from each side."); } @Test @@ -113,8 +104,7 @@ public void compareNumberFieldWithStringShouldThrowSemanticException() { queryShouldThrowSemanticException( "SELECT * FROM opensearch-sql_test_index_bank b WHERE b.age >= 'test'", "Operator [>=] cannot work with [INTEGER, STRING].", - "Usage: Please use compatible types from each side." - ); + "Usage: Please use compatible types from each side."); } @Test @@ -122,43 +112,38 @@ public void compareLogFunctionCallWithNumberFieldWithStringShouldThrowSemanticEx queryShouldThrowSemanticException( "SELECT * FROM opensearch-sql_test_index_bank b WHERE LOG(b.balance) != 'test'", "Operator [!=] cannot work with [DOUBLE, STRING].", - "Usage: Please use compatible types from each side." - ); + "Usage: Please use compatible types from each side."); } @Test public void unionNumberFieldWithStringShouldThrowSemanticException() { queryShouldThrowSemanticException( - "SELECT age FROM opensearch-sql_test_index_bank" + - " UNION SELECT address FROM opensearch-sql_test_index_bank", - "Operator [UNION] cannot work with [INTEGER, TEXT]." - ); + "SELECT age FROM opensearch-sql_test_index_bank" + + " UNION SELECT address FROM opensearch-sql_test_index_bank", + "Operator [UNION] cannot work with [INTEGER, TEXT]."); } @Test public void minusBooleanFieldWithDateShouldThrowSemanticException() { queryShouldThrowSemanticException( - "SELECT male FROM opensearch-sql_test_index_bank" + - " MINUS SELECT birthdate FROM opensearch-sql_test_index_bank", - "Operator [MINUS] cannot work with [BOOLEAN, DATE]." - ); + "SELECT male FROM opensearch-sql_test_index_bank" + + " MINUS SELECT birthdate FROM opensearch-sql_test_index_bank", + "Operator [MINUS] cannot work with [BOOLEAN, DATE]."); } @Test public void useInClauseWithIncompatibleFieldTypesShouldFail() { queryShouldThrowSemanticException( - "SELECT * FROM opensearch-sql_test_index_bank WHERE male " + - " IN (SELECT 1 FROM opensearch-sql_test_index_bank)", - "Operator [IN] cannot work with [BOOLEAN, INTEGER]." - ); + "SELECT * FROM opensearch-sql_test_index_bank WHERE male " + + " IN (SELECT 1 FROM opensearch-sql_test_index_bank)", + "Operator [IN] cannot work with [BOOLEAN, INTEGER]."); } @Test public void queryWithNestedFunctionShouldFail() { queryShouldThrowFeatureNotImplementedException( "SELECT abs(log(balance)) FROM opensearch-sql_test_index_bank", - "Nested function calls like [abs(log(balance))] are not supported yet" - ); + "Nested function calls like [abs(log(balance))] are not supported yet"); } @Test @@ -170,29 +155,24 @@ public void nestedFunctionWithMathConstantAsInnerFunctionShouldPass() { public void aggregateWithFunctionAggregatorShouldFail() { queryShouldThrowFeatureNotImplementedException( "SELECT max(log(age)) FROM opensearch-sql_test_index_bank", - "Aggregation calls with function aggregator like [max(log(age))] are not supported yet" - ); + "Aggregation calls with function aggregator like [max(log(age))] are not supported yet"); } @Test public void queryWithUnsupportedFunctionShouldFail() { queryShouldThrowFeatureNotImplementedException( "SELECT balance DIV age FROM opensearch-sql_test_index_bank", - "Operator [DIV] is not supported yet" - ); + "Operator [DIV] is not supported yet"); } @Test public void useNegativeNumberConstantShouldPass() { queryShouldPassAnalysis( - "SELECT * FROM opensearch-sql_test_index_bank " + - "WHERE age > -1 AND balance < -123.456789" - ); + "SELECT * FROM opensearch-sql_test_index_bank " + + "WHERE age > -1 AND balance < -123.456789"); } - /** - * Run the query with cluster setting changed and cleaned after complete - */ + /** Run the query with cluster setting changed and cleaned after complete */ private void runWithClusterSetting(ClusterSetting setting, Runnable query) { try { updateClusterSettings(setting); @@ -201,7 +181,8 @@ private void runWithClusterSetting(ClusterSetting setting, Runnable query) { throw new IllegalStateException( StringUtils.format("Exception raised when running with cluster setting [%s]", setting)); } finally { - // Clean up or OpenSearch will throw java.lang.AssertionError: test leaves persistent cluster metadata behind + // Clean up or OpenSearch will throw java.lang.AssertionError: test leaves persistent cluster + // metadata behind try { updateClusterSettings(setting.nullify()); } catch (IOException e) { @@ -218,20 +199,19 @@ private void queryShouldThrowSemanticException(String query, String... expectedM queryShouldThrowException(query, SemanticAnalysisException.class, expectedMsgs); } - private void queryShouldThrowFeatureNotImplementedException(String query, - String... expectedMsgs) { - queryShouldThrowExceptionWithRestStatus(query, SqlFeatureNotImplementedException.class, - SERVICE_UNAVAILABLE, expectedMsgs); + private void queryShouldThrowFeatureNotImplementedException( + String query, String... expectedMsgs) { + queryShouldThrowExceptionWithRestStatus( + query, SqlFeatureNotImplementedException.class, SERVICE_UNAVAILABLE, expectedMsgs); } - private void queryShouldThrowException(String query, Class exceptionType, - String... expectedMsgs) { + private void queryShouldThrowException( + String query, Class exceptionType, String... expectedMsgs) { queryShouldThrowExceptionWithRestStatus(query, exceptionType, BAD_REQUEST, expectedMsgs); } - private void queryShouldThrowExceptionWithRestStatus(String query, Class exceptionType, - RestStatus status, - String... expectedMsgs) { + private void queryShouldThrowExceptionWithRestStatus( + String query, Class exceptionType, RestStatus status, String... expectedMsgs) { try { executeQuery(query); Assert.fail("Expected ResponseException, but none was thrown for query: " + query); @@ -244,8 +224,8 @@ private void queryShouldThrowExceptionWithRestStatus(String query, Class } } catch (IOException e) { throw new IllegalStateException( - "Unexpected IOException raised rather than expected AnalysisException for query: " + - query); + "Unexpected IOException raised rather than expected AnalysisException for query: " + + query); } } @@ -285,5 +265,4 @@ void assertBodyContains(String content) { assertThat(body, containsString(content)); } } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/QueryFunctionsIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/QueryFunctionsIT.java index c538db830f..0a22fbc988 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/QueryFunctionsIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/QueryFunctionsIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.anyOf; @@ -44,10 +43,10 @@ public class QueryFunctionsIT extends SQLIntegTestCase { private static final String FROM_PHRASE = "FROM " + TEST_INDEX_PHRASE; /** - * TODO Looks like Math/Date Functions test all use the same query() and execute() functions - * TODO execute/featureValueOf/hits functions are the same as used in NestedFieldQueryIT, should refactor into util + * TODO Looks like Math/Date Functions test all use the same query() and execute() functions TODO + * execute/featureValueOf/hits functions are the same as used in NestedFieldQueryIT, should + * refactor into util */ - @Override protected void init() throws Exception { loadIndex(Index.ACCOUNT); @@ -58,63 +57,39 @@ protected void init() throws Exception { @Test public void query() throws IOException { assertThat( - query( - "SELECT state", - FROM_ACCOUNTS, - "WHERE QUERY('CA')" - ), - hits( - hasValueForFields("CA", "state") - ) - ); + query("SELECT state", FROM_ACCOUNTS, "WHERE QUERY('CA')"), + hits(hasValueForFields("CA", "state"))); } @Test public void matchQueryRegularField() throws IOException { assertThat( - query( - "SELECT firstname", - FROM_ACCOUNTS, - "WHERE MATCH_QUERY(firstname, 'Ayers')" - ), - hits( - hasValueForFields("Ayers", "firstname") - ) - ); + query("SELECT firstname", FROM_ACCOUNTS, "WHERE MATCH_QUERY(firstname, 'Ayers')"), + hits(hasValueForFields("Ayers", "firstname"))); } @Test public void matchQueryNestedField() throws IOException { SearchHit[] hits = query("SELECT comment.data", FROM_NESTED, "WHERE MATCH_QUERY(NESTED(comment.data), 'aa')") - .getHits().getHits(); + .getHits() + .getHits(); Map source = hits[0].getSourceAsMap(); // SearchHits innerHits = hits[0].getInnerHits().get("comment"); assertThat( - query( - "SELECT comment.data", - FROM_NESTED, - "WHERE MATCH_QUERY(NESTED(comment.data), 'aa')" - ), + query("SELECT comment.data", FROM_NESTED, "WHERE MATCH_QUERY(NESTED(comment.data), 'aa')"), hits( - anyOf(hasNestedField("comment", "data", "aa"), - hasNestedArrayField("comment", "data", "aa")) - ) - ); + anyOf( + hasNestedField("comment", "data", "aa"), + hasNestedArrayField("comment", "data", "aa")))); } @Test public void scoreQuery() throws IOException { assertThat( query( - "SELECT firstname", - FROM_ACCOUNTS, - "WHERE SCORE(MATCH_QUERY(firstname, 'Ayers'), 10)" - ), - hits( - hasValueForFields("Ayers", "firstname") - ) - ); + "SELECT firstname", FROM_ACCOUNTS, "WHERE SCORE(MATCH_QUERY(firstname, 'Ayers'), 10)"), + hits(hasValueForFields("Ayers", "firstname"))); } @Test @@ -123,42 +98,24 @@ public void scoreQueryWithNestedField() throws IOException { query( "SELECT comment.data", FROM_NESTED, - "WHERE SCORE(MATCH_QUERY(NESTED(comment.data), 'ab'), 10)" - ), + "WHERE SCORE(MATCH_QUERY(NESTED(comment.data), 'ab'), 10)"), hits( - //hasValueForFields("ab", "comment.data") - hasNestedField("comment", - "data", "ab") - ) - ); + // hasValueForFields("ab", "comment.data") + hasNestedField("comment", "data", "ab"))); } @Test public void wildcardQuery() throws IOException { assertThat( - query( - "SELECT city", - FROM_ACCOUNTS, - "WHERE WILDCARD_QUERY(city.keyword, 'B*')" - ), - hits( - hasFieldWithPrefix("city", "B") - ) - ); + query("SELECT city", FROM_ACCOUNTS, "WHERE WILDCARD_QUERY(city.keyword, 'B*')"), + hits(hasFieldWithPrefix("city", "B"))); } @Test public void matchPhraseQuery() throws IOException { assertThat( - query( - "SELECT phrase", - FROM_PHRASE, - "WHERE MATCH_PHRASE(phrase, 'brown fox')" - ), - hits( - hasValueForFields("brown fox", "phrase") - ) - ); + query("SELECT phrase", FROM_PHRASE, "WHERE MATCH_PHRASE(phrase, 'brown fox')"), + hits(hasValueForFields("brown fox", "phrase"))); } @Test @@ -167,12 +124,8 @@ public void multiMatchQuerySingleField() throws IOException { query( "SELECT firstname", FROM_ACCOUNTS, - "WHERE MULTI_MATCH('query'='Ayers', 'fields'='firstname')" - ), - hits( - hasValueForFields("Ayers", "firstname") - ) - ); + "WHERE MULTI_MATCH('query'='Ayers', 'fields'='firstname')"), + hits(hasValueForFields("Ayers", "firstname"))); } @Test @@ -181,36 +134,30 @@ public void multiMatchQueryWildcardField() throws IOException { query( "SELECT firstname, lastname", FROM_ACCOUNTS, - "WHERE MULTI_MATCH('query'='Bradshaw', 'fields'='*name')" - ), - hits( - hasValueForFields("Bradshaw", "firstname", "lastname") - ) - ); + "WHERE MULTI_MATCH('query'='Bradshaw', 'fields'='*name')"), + hits(hasValueForFields("Bradshaw", "firstname", "lastname"))); } @Test public void numberLiteralInSelectField() { assertTrue( - executeQuery(StringUtils.format("SELECT 234234 AS number from %s", TEST_INDEX_ACCOUNT), - "jdbc") - .contains("234234") - ); + executeQuery( + StringUtils.format("SELECT 234234 AS number from %s", TEST_INDEX_ACCOUNT), "jdbc") + .contains("234234")); assertTrue( - executeQuery(StringUtils.format("SELECT 2.34234 AS number FROM %s", TEST_INDEX_ACCOUNT), - "jdbc") - .contains("2.34234") - ); + executeQuery( + StringUtils.format("SELECT 2.34234 AS number FROM %s", TEST_INDEX_ACCOUNT), "jdbc") + .contains("2.34234")); } private final Matcher hits(Matcher subMatcher) { - return featureValueOf("hits", everyItem(subMatcher), - resp -> Arrays.asList(resp.getHits().getHits())); + return featureValueOf( + "hits", everyItem(subMatcher), resp -> Arrays.asList(resp.getHits().getHits())); } - private FeatureMatcher featureValueOf(String name, Matcher subMatcher, - Function getter) { + private FeatureMatcher featureValueOf( + String name, Matcher subMatcher, Function getter) { return new FeatureMatcher(subMatcher, name, name) { @Override protected U featureValueOf(T actual) { @@ -220,44 +167,39 @@ protected U featureValueOf(T actual) { } /** - * Create Matchers for each field and its value - * Only one of the Matchers need to match (per hit) - *

- * Ex. If a query with wildcard field is made: - * multi_match(query="Ayers", fields="*name") - *

- * Then the value "Ayers" can be found in either the firstname or lastname field. Only one of these fields - * need to satisfy the query value to be evaluated as correct expected output. + * Create Matchers for each field and its value Only one of the Matchers need to match (per hit) + * + *

Ex. If a query with wildcard field is made: multi_match(query="Ayers", fields="*name") * - * @param value The value to match for a field in the sourceMap + *

Then the value "Ayers" can be found in either the firstname or lastname field. Only one of + * these fields need to satisfy the query value to be evaluated as correct expected output. + * + * @param value The value to match for a field in the sourceMap * @param fields A list of fields to match */ @SafeVarargs private final Matcher hasValueForFields(String value, String... fields) { return anyOf( - Arrays.asList(fields). - stream(). - map(field -> kv(field, is(value))). - collect(Collectors.toList())); + Arrays.asList(fields).stream() + .map(field -> kv(field, is(value))) + .collect(Collectors.toList())); } private final Matcher hasFieldWithPrefix(String field, String prefix) { - return featureValueOf(field, startsWith(prefix), - hit -> (String) hit.getSourceAsMap().get(field)); + return featureValueOf( + field, startsWith(prefix), hit -> (String) hit.getSourceAsMap().get(field)); } private final Matcher hasNestedField(String path, String field, String value) { - return featureValueOf(field, is(value), - hit -> ((HashMap) hit.getSourceAsMap().get(path)).get(field)); + return featureValueOf( + field, is(value), hit -> ((HashMap) hit.getSourceAsMap().get(path)).get(field)); } private final Matcher hasNestedArrayField(String path, String field, String value) { return new BaseMatcher() { @Override - public void describeTo(Description description) { - - } + public void describeTo(Description description) {} @Override public boolean matches(Object item) { @@ -275,7 +217,7 @@ private Matcher kv(String key, Matcher valMatcher) { } /*********************************************************** - Query Utility to Fetch Response for SQL + * Query Utility to Fetch Response for SQL ***********************************************************/ private SearchResponse query(String select, String from, String... statements) @@ -286,10 +228,11 @@ private SearchResponse query(String select, String from, String... statements) private SearchResponse execute(String sql) throws IOException { final JSONObject jsonObject = executeQuery(sql); - final XContentParser parser = new JsonXContentParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - new JsonFactory().createParser(jsonObject.toString())); + final XContentParser parser = + new JsonXContentParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + new JsonFactory().createParser(jsonObject.toString())); return SearchResponse.fromXContent(parser); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/QueryIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/QueryIT.java index f99285a90b..880a91c76b 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/QueryIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/QueryIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.allOf; @@ -45,22 +44,17 @@ public class QueryIT extends SQLIntegTestCase { /** * Currently commenting out tests related to JoinType index since there is an issue with mapping. - *

- * Also ignoring the following tests as they are failing, will require investigation: - * - idsQuerySubQueryIds - * - escapedCharactersCheck - * - fieldCollapsingTest - * - idsQueryOneId - * - idsQueryMultipleId - * - multipleIndicesOneNotExistWithoutHint - *

- * The following tests are being ignored because subquery is still running in OpenSearch transport thread: - * - twoSubQueriesTest() - * - inTermsSubQueryTest() + * + *

Also ignoring the following tests as they are failing, will require investigation: - + * idsQuerySubQueryIds - escapedCharactersCheck - fieldCollapsingTest - idsQueryOneId - + * idsQueryMultipleId - multipleIndicesOneNotExistWithoutHint + * + *

The following tests are being ignored because subquery is still running in OpenSearch + * transport thread: - twoSubQueriesTest() - inTermsSubQueryTest() */ + static final int BANK_INDEX_MALE_TRUE = 4; - final static int BANK_INDEX_MALE_TRUE = 4; - final static int BANK_INDEX_MALE_FALSE = 3; + static final int BANK_INDEX_MALE_FALSE = 3; @Override protected void init() throws Exception { @@ -87,92 +81,87 @@ public void queryEndWithSemiColonTest() { @Test public void searchTypeTest() throws IOException { - JSONObject response = executeQuery(String.format(Locale.ROOT, "SELECT * FROM %s LIMIT 1000", - TestsConstants.TEST_INDEX_PHRASE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, "SELECT * FROM %s LIMIT 1000", TestsConstants.TEST_INDEX_PHRASE)); Assert.assertTrue(response.has("hits")); Assert.assertEquals(6, getTotalHits(response)); } @Test public void multipleFromTest() throws IOException { - JSONObject response = executeQuery(String.format(Locale.ROOT, - "SELECT * FROM %s, %s LIMIT 2000", - TestsConstants.TEST_INDEX_BANK, TestsConstants.TEST_INDEX_BANK_TWO)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s, %s LIMIT 2000", + TestsConstants.TEST_INDEX_BANK, + TestsConstants.TEST_INDEX_BANK_TWO)); Assert.assertTrue(response.has("hits")); Assert.assertEquals(14, getTotalHits(response)); } @Test public void selectAllWithFieldReturnsAll() throws IOException { - JSONObject response = executeQuery(StringUtils.format( - "SELECT *, age " + - "FROM %s " + - "LIMIT 5", - TestsConstants.TEST_INDEX_BANK - )); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT *, age " + "FROM %s " + "LIMIT 5", TestsConstants.TEST_INDEX_BANK)); checkSelectAllAndFieldResponseSize(response); } @Test public void selectAllWithFieldReverseOrder() throws IOException { - JSONObject response = executeQuery(StringUtils.format( - "SELECT *, age " + - "FROM %s " + - "LIMIT 5", - TestsConstants.TEST_INDEX_BANK - )); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT *, age " + "FROM %s " + "LIMIT 5", TestsConstants.TEST_INDEX_BANK)); checkSelectAllAndFieldResponseSize(response); } @Test public void selectAllWithMultipleFields() throws IOException { - JSONObject response = executeQuery(StringUtils.format( - "SELECT *, age, address " + - "FROM %s " + - "LIMIT 5", - TestsConstants.TEST_INDEX_BANK - )); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT *, age, address " + "FROM %s " + "LIMIT 5", + TestsConstants.TEST_INDEX_BANK)); checkSelectAllAndFieldResponseSize(response); } @Test public void selectAllWithFieldAndOrderBy() throws IOException { - JSONObject response = executeQuery(StringUtils.format( - "SELECT *, age " + - "FROM %s " + - "ORDER BY age " + - "LIMIT 5", - TestsConstants.TEST_INDEX_BANK - )); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT *, age " + "FROM %s " + "ORDER BY age " + "LIMIT 5", + TestsConstants.TEST_INDEX_BANK)); checkSelectAllAndFieldResponseSize(response); } @Test public void selectAllWithFieldAndGroupBy() throws IOException { - JSONObject response = executeQuery(StringUtils.format( - "SELECT *, age " + - "FROM %s " + - "GROUP BY age " + - "LIMIT 10", - TestsConstants.TEST_INDEX_BANK - )); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT *, age " + "FROM %s " + "GROUP BY age " + "LIMIT 10", + TestsConstants.TEST_INDEX_BANK)); checkSelectAllAndFieldAggregationResponseSize(response, "age"); } @Test public void selectAllWithFieldAndGroupByReverseOrder() throws IOException { - JSONObject response = executeQuery(StringUtils.format( - "SELECT *, age " + - "FROM %s " + - "GROUP BY age " + - "LIMIT 10", - TestsConstants.TEST_INDEX_BANK - )); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT *, age " + "FROM %s " + "GROUP BY age " + "LIMIT 10", + TestsConstants.TEST_INDEX_BANK)); checkSelectAllAndFieldAggregationResponseSize(response, "age"); } @@ -180,14 +169,16 @@ public void selectAllWithFieldAndGroupByReverseOrder() throws IOException { @Test public void selectFieldWithAliasAndGroupBy() { String response = - executeQuery("SELECT lastname AS name FROM " + TEST_INDEX_ACCOUNT + " GROUP BY name", - "jdbc"); + executeQuery( + "SELECT lastname AS name FROM " + TEST_INDEX_ACCOUNT + " GROUP BY name", "jdbc"); assertThat(response, containsString("\"alias\": \"name\"")); } public void indexWithWildcardTest() throws IOException { - JSONObject response = executeQuery(String.format(Locale.ROOT, "SELECT * FROM %s* LIMIT 1000", - TestsConstants.TEST_INDEX_BANK)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, "SELECT * FROM %s* LIMIT 1000", TestsConstants.TEST_INDEX_BANK)); Assert.assertTrue(response.has("hits")); assertThat(getTotalHits(response), greaterThan(0)); } @@ -198,8 +189,8 @@ public void selectSpecificFields() throws IOException { Set expectedSource = new HashSet<>(Arrays.asList(arr)); JSONObject response = - executeQuery(String.format(Locale.ROOT, "SELECT age, account_number FROM %s", - TEST_INDEX_ACCOUNT)); + executeQuery( + String.format(Locale.ROOT, "SELECT age, account_number FROM %s", TEST_INDEX_ACCOUNT)); assertResponseForSelectSpecificFields(response, expectedSource); } @@ -209,8 +200,9 @@ public void selectSpecificFieldsUsingTableAlias() throws IOException { Set expectedSource = new HashSet<>(Arrays.asList(arr)); JSONObject response = - executeQuery(String.format(Locale.ROOT, "SELECT a.age, a.account_number FROM %s a", - TEST_INDEX_ACCOUNT)); + executeQuery( + String.format( + Locale.ROOT, "SELECT a.age, a.account_number FROM %s a", TEST_INDEX_ACCOUNT)); assertResponseForSelectSpecificFields(response, expectedSource); } @@ -219,15 +211,18 @@ public void selectSpecificFieldsUsingTableNamePrefix() throws IOException { String[] arr = new String[] {"age", "account_number"}; Set expectedSource = new HashSet<>(Arrays.asList(arr)); - JSONObject response = executeQuery(String.format(Locale.ROOT, - "SELECT opensearch-sql_test_index_account.age, opensearch-sql_test_index_account.account_number" + - " FROM %s", - TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT opensearch-sql_test_index_account.age," + + " opensearch-sql_test_index_account.account_number FROM %s", + TEST_INDEX_ACCOUNT)); assertResponseForSelectSpecificFields(response, expectedSource); } - private void assertResponseForSelectSpecificFields(JSONObject response, - Set expectedSource) { + private void assertResponseForSelectSpecificFields( + JSONObject response, Set expectedSource) { JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { JSONObject hit = hits.getJSONObject(i); @@ -240,9 +235,12 @@ public void selectFieldWithSpace() throws IOException { String[] arr = new String[] {"test field"}; Set expectedSource = new HashSet<>(Arrays.asList(arr)); - JSONObject response = executeQuery(String.format(Locale.ROOT, "SELECT ['test field'] FROM %s " + - "WHERE ['test field'] IS NOT null", - TestsConstants.TEST_INDEX_PHRASE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT ['test field'] FROM %s " + "WHERE ['test field'] IS NOT null", + TestsConstants.TEST_INDEX_PHRASE)); JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { @@ -259,19 +257,28 @@ public void selectAliases() throws IOException { String[] arr = new String[] {"myage", "myaccount_number"}; Set expectedSource = new HashSet<>(Arrays.asList(arr)); - JSONObject result = executeQuery(String.format(Locale.ROOT, - "SELECT age AS myage, account_number AS myaccount_number FROM %s", TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + String.format( + Locale.ROOT, + "SELECT age AS myage, account_number AS myaccount_number FROM %s", + TEST_INDEX_ACCOUNT)); JSONArray hits = getHits(result); - hits.forEach(hitObj -> { - JSONObject hit = (JSONObject) hitObj; - Assert.assertEquals(expectedSource, hit.getJSONObject("_source").keySet()); - }); + hits.forEach( + hitObj -> { + JSONObject hit = (JSONObject) hitObj; + Assert.assertEquals(expectedSource, hit.getJSONObject("_source").keySet()); + }); } @Test public void useTableAliasInWhereClauseTest() throws IOException { - JSONObject response = executeQuery(String.format(Locale.ROOT, - "SELECT * FROM %s a WHERE a.city = 'Nogal' LIMIT 1000", TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s a WHERE a.city = 'Nogal' LIMIT 1000", + TEST_INDEX_ACCOUNT)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -280,8 +287,12 @@ public void useTableAliasInWhereClauseTest() throws IOException { @Test public void notUseTableAliasInWhereClauseTest() throws IOException { - JSONObject response = executeQuery(String.format(Locale.ROOT, - "SELECT * FROM %s a WHERE city = 'Nogal' LIMIT 1000", TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s a WHERE city = 'Nogal' LIMIT 1000", + TEST_INDEX_ACCOUNT)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -290,10 +301,13 @@ public void notUseTableAliasInWhereClauseTest() throws IOException { @Test public void useTableNamePrefixInWhereClauseTest() throws IOException { - JSONObject response = executeQuery(String.format(Locale.ROOT, - "SELECT * FROM %s WHERE opensearch-sql_test_index_account.city = 'Nogal' LIMIT 1000", - TEST_INDEX_ACCOUNT - )); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE opensearch-sql_test_index_account.city = 'Nogal' LIMIT" + + " 1000", + TEST_INDEX_ACCOUNT)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -302,8 +316,12 @@ public void useTableNamePrefixInWhereClauseTest() throws IOException { @Test public void equalityTest() throws IOException { - JSONObject response = executeQuery(String.format(Locale.ROOT, - "SELECT * FROM %s WHERE city = 'Nogal' LIMIT 1000", TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE city = 'Nogal' LIMIT 1000", + TEST_INDEX_ACCOUNT)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -312,9 +330,12 @@ public void equalityTest() throws IOException { @Test public void equalityTestPhrase() throws IOException { - JSONObject response = executeQuery(String.format(Locale.ROOT, "SELECT * FROM %s WHERE " + - "match_phrase(phrase, 'quick fox here') LIMIT 1000", - TestsConstants.TEST_INDEX_PHRASE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE " + "match_phrase(phrase, 'quick fox here') LIMIT 1000", + TestsConstants.TEST_INDEX_PHRASE)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -324,10 +345,13 @@ public void equalityTestPhrase() throws IOException { @Test public void greaterThanTest() throws IOException { int someAge = 25; - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE age > %s LIMIT 1000", - TestsConstants.TEST_INDEX_PEOPLE, - someAge)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE age > %s LIMIT 1000", + TestsConstants.TEST_INDEX_PEOPLE, + someAge)); JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { @@ -340,10 +364,13 @@ public void greaterThanTest() throws IOException { @Test public void greaterThanOrEqualTest() throws IOException { int someAge = 25; - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE age >= %s LIMIT 1000", - TEST_INDEX_ACCOUNT, - someAge)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE age >= %s LIMIT 1000", + TEST_INDEX_ACCOUNT, + someAge)); boolean isEqualFound = false; JSONArray hits = getHits(response); @@ -352,24 +379,27 @@ public void greaterThanOrEqualTest() throws IOException { int age = getSource(hit).getInt("age"); assertThat(age, greaterThanOrEqualTo(someAge)); - if (age == someAge) { - isEqualFound = true; - } + if (age == someAge) { + isEqualFound = true; + } } Assert.assertTrue( - String.format(Locale.ROOT, "At least one of the documents need to contains age equal to %s", - someAge), + String.format( + Locale.ROOT, "At least one of the documents need to contains age equal to %s", someAge), isEqualFound); } @Test public void lessThanTest() throws IOException { int someAge = 25; - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE age < %s LIMIT 1000", - TestsConstants.TEST_INDEX_PEOPLE, - someAge)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE age < %s LIMIT 1000", + TestsConstants.TEST_INDEX_PEOPLE, + someAge)); JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { @@ -382,10 +412,13 @@ public void lessThanTest() throws IOException { @Test public void lessThanOrEqualTest() throws IOException { int someAge = 25; - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE age <= %s LIMIT 1000", - TEST_INDEX_ACCOUNT, - someAge)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE age <= %s LIMIT 1000", + TEST_INDEX_ACCOUNT, + someAge)); boolean isEqualFound = false; JSONArray hits = getHits(response); @@ -394,32 +427,39 @@ public void lessThanOrEqualTest() throws IOException { int age = getSource(hit).getInt("age"); assertThat(age, lessThanOrEqualTo(someAge)); - if (age == someAge) { - isEqualFound = true; - } + if (age == someAge) { + isEqualFound = true; + } } Assert.assertTrue( - String.format(Locale.ROOT, "At least one of the documents need to contains age equal to %s", - someAge), + String.format( + Locale.ROOT, "At least one of the documents need to contains age equal to %s", someAge), isEqualFound); } @Test public void orTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE match_phrase(gender, 'F') OR match_phrase(gender, 'M') " + - "LIMIT 1000", TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE match_phrase(gender, 'F') OR match_phrase(gender, 'M') " + + "LIMIT 1000", + TEST_INDEX_ACCOUNT)); Assert.assertEquals(1000, getTotalHits(response)); } @Test public void andTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE age=32 AND gender='M' LIMIT 1000", - TestsConstants.TEST_INDEX_PEOPLE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE age=32 AND gender='M' LIMIT 1000", + TestsConstants.TEST_INDEX_PEOPLE)); JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { @@ -431,9 +471,12 @@ public void andTest() throws IOException { @Test public void likeTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE firstname LIKE 'amb%%' LIMIT 1000", - TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE firstname LIKE 'amb%%' LIMIT 1000", + TEST_INDEX_ACCOUNT)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -442,9 +485,12 @@ public void likeTest() throws IOException { @Test public void notLikeTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE firstname NOT LIKE 'amb%%'", - TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE firstname NOT LIKE 'amb%%'", + TEST_INDEX_ACCOUNT)); JSONArray hits = getHits(response); Assert.assertNotEquals(0, getTotalHits(response)); @@ -456,11 +502,13 @@ public void notLikeTest() throws IOException { @Test public void regexQueryTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE dog_name = REGEXP_QUERY('sn.*', 'INTERSECTION|COMPLEMENT|EMPTY', 10000)", - TestsConstants.TEST_INDEX_DOG)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE dog_name = REGEXP_QUERY('sn.*'," + + " 'INTERSECTION|COMPLEMENT|EMPTY', 10000)", + TestsConstants.TEST_INDEX_DOG)); JSONArray hits = getHits(response); Assert.assertEquals(1, hits.length()); @@ -473,11 +521,13 @@ public void regexQueryTest() throws IOException { @Test public void negativeRegexQueryTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE NOT(dog_name = REGEXP_QUERY('sn.*', 'INTERSECTION|COMPLEMENT|EMPTY', 10000))", - TestsConstants.TEST_INDEX_DOG)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE NOT(dog_name = REGEXP_QUERY('sn.*'," + + " 'INTERSECTION|COMPLEMENT|EMPTY', 10000))", + TestsConstants.TEST_INDEX_DOG)); JSONArray hits = getHits(response); Assert.assertEquals(1, hits.length()); @@ -489,28 +539,36 @@ public void negativeRegexQueryTest() throws IOException { @Test public void doubleNotTest() throws IOException { - JSONObject response1 = executeQuery( - String.format(Locale.ROOT, - "SELECT * FROM %s WHERE NOT gender LIKE 'm' AND NOT gender LIKE 'f'", - TEST_INDEX_ACCOUNT)); + JSONObject response1 = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE NOT gender LIKE 'm' AND NOT gender LIKE 'f'", + TEST_INDEX_ACCOUNT)); Assert.assertEquals(0, getTotalHits(response1)); - JSONObject response2 = executeQuery( - String.format(Locale.ROOT, - "SELECT * FROM %s WHERE NOT gender LIKE 'm' AND gender NOT LIKE 'f'", - TEST_INDEX_ACCOUNT)); + JSONObject response2 = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE NOT gender LIKE 'm' AND gender NOT LIKE 'f'", + TEST_INDEX_ACCOUNT)); Assert.assertEquals(0, getTotalHits(response2)); - JSONObject response3 = executeQuery( - String.format(Locale.ROOT, - "SELECT * FROM %s WHERE gender NOT LIKE 'm' AND gender NOT LIKE 'f'", - TEST_INDEX_ACCOUNT)); + JSONObject response3 = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE gender NOT LIKE 'm' AND gender NOT LIKE 'f'", + TEST_INDEX_ACCOUNT)); Assert.assertEquals(0, getTotalHits(response3)); - JSONObject response4 = executeQuery( - String.format(Locale.ROOT, - "SELECT * FROM %s WHERE gender LIKE 'm' AND NOT gender LIKE 'f'", - TEST_INDEX_ACCOUNT)); + JSONObject response4 = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE gender LIKE 'm' AND NOT gender LIKE 'f'", + TEST_INDEX_ACCOUNT)); // Assert there are results and they all have gender 'm' Assert.assertNotEquals(0, getTotalHits(response4)); JSONArray hits = getHits(response4); @@ -519,16 +577,19 @@ public void doubleNotTest() throws IOException { Assert.assertEquals("m", getSource(hit).getString("gender").toLowerCase()); } - JSONObject response5 = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE NOT (gender = 'm' OR gender = 'f')", - TEST_INDEX_ACCOUNT)); + JSONObject response5 = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE NOT (gender = 'm' OR gender = 'f')", + TEST_INDEX_ACCOUNT)); Assert.assertEquals(0, getTotalHits(response5)); } @Test public void limitTest() throws IOException { - JSONObject response = executeQuery(String.format(Locale.ROOT, "SELECT * FROM %s LIMIT 30", - TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery(String.format(Locale.ROOT, "SELECT * FROM %s LIMIT 30", TEST_INDEX_ACCOUNT)); JSONArray hits = getHits(response); Assert.assertEquals(30, hits.length()); @@ -538,9 +599,14 @@ public void limitTest() throws IOException { public void betweenTest() throws IOException { int min = 27; int max = 30; - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE age BETWEEN %s AND %s LIMIT 1000", - TestsConstants.TEST_INDEX_PEOPLE, min, max)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE age BETWEEN %s AND %s LIMIT 1000", + TestsConstants.TEST_INDEX_PEOPLE, + min, + max)); JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { @@ -556,9 +622,14 @@ public void betweenTest() throws IOException { public void notBetweenTest() throws IOException { int min = 20; int max = 37; - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE age NOT BETWEEN %s AND %s LIMIT 1000", - TestsConstants.TEST_INDEX_PEOPLE, min, max)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE age NOT BETWEEN %s AND %s LIMIT 1000", + TestsConstants.TEST_INDEX_PEOPLE, + min, + max)); JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { @@ -575,9 +646,12 @@ public void notBetweenTest() throws IOException { @Test public void inTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT age FROM %s WHERE age IN (20, 22) LIMIT 1000", - TestsConstants.TEST_INDEX_PHRASE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT age FROM %s WHERE age IN (20, 22) LIMIT 1000", + TestsConstants.TEST_INDEX_PHRASE)); JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { @@ -589,10 +663,12 @@ public void inTest() throws IOException { @Test public void inTestWithStrings() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, - "SELECT phrase FROM %s WHERE phrase IN ('quick', 'fox') LIMIT 1000", - TestsConstants.TEST_INDEX_PHRASE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT phrase FROM %s WHERE phrase IN ('quick', 'fox') LIMIT 1000", + TestsConstants.TEST_INDEX_PHRASE)); JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { @@ -604,12 +680,15 @@ public void inTestWithStrings() throws IOException { @Test public void inTermsTestWithIdentifiersTreatedLikeStrings() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT name " + - "FROM %s " + - "WHERE name.firstname = IN_TERMS('daenerys','eddard') " + - "LIMIT 1000", - TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT name " + + "FROM %s " + + "WHERE name.firstname = IN_TERMS('daenerys','eddard') " + + "LIMIT 1000", + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); JSONArray hits = getHits(response); Assert.assertEquals(2, getTotalHits(response)); @@ -622,12 +701,15 @@ public void inTermsTestWithIdentifiersTreatedLikeStrings() throws IOException { @Test public void inTermsTestWithStrings() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT name " + - "FROM %s " + - "WHERE name.firstname = IN_TERMS('daenerys','eddard') " + - "LIMIT 1000", - TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT name " + + "FROM %s " + + "WHERE name.firstname = IN_TERMS('daenerys','eddard') " + + "LIMIT 1000", + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); JSONArray hits = getHits(response); Assert.assertEquals(2, getTotalHits(response)); @@ -640,12 +722,15 @@ public void inTermsTestWithStrings() throws IOException { @Test public void inTermsWithNumbers() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT name " + - "FROM %s " + - "WHERE name.ofHisName = IN_TERMS(4,2) " + - "LIMIT 1000", - TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT name " + + "FROM %s " + + "WHERE name.ofHisName = IN_TERMS(4,2) " + + "LIMIT 1000", + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -657,10 +742,12 @@ public void inTermsWithNumbers() throws IOException { @Test public void termQueryWithNumber() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, - "SELECT name FROM %s WHERE name.ofHisName = term(4) LIMIT 1000", - TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT name FROM %s WHERE name.ofHisName = term(4) LIMIT 1000", + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -672,12 +759,15 @@ public void termQueryWithNumber() throws IOException { @Test public void termQueryWithStringIdentifier() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT name " + - "FROM %s " + - "WHERE name.firstname = term('brandon') " + - "LIMIT 1000", - TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT name " + + "FROM %s " + + "WHERE name.firstname = term('brandon') " + + "LIMIT 1000", + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -689,12 +779,15 @@ public void termQueryWithStringIdentifier() throws IOException { @Test public void termQueryWithStringLiteral() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT name " + - "FROM %s " + - "WHERE name.firstname = term('brandon') " + - "LIMIT 1000", - TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT name " + + "FROM %s " + + "WHERE name.firstname = term('brandon') " + + "LIMIT 1000", + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -708,9 +801,12 @@ public void termQueryWithStringLiteral() throws IOException { // are returned as well. This may be incorrect behavior. @Test public void notInTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT age FROM %s WHERE age NOT IN (20, 22) LIMIT 1000", - TestsConstants.TEST_INDEX_PEOPLE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT age FROM %s WHERE age NOT IN (20, 22) LIMIT 1000", + TestsConstants.TEST_INDEX_PEOPLE)); JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { @@ -730,9 +826,12 @@ public void dateSearch() throws IOException { DateTimeFormatter formatter = DateTimeFormat.forPattern(TestsConstants.DATE_FORMAT); DateTime dateToCompare = new DateTime(2014, 8, 18, 0, 0, 0); - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT insert_time FROM %s WHERE insert_time < '2014-08-18'", - TestsConstants.TEST_INDEX_ONLINE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT insert_time FROM %s WHERE insert_time < '2014-08-18'", + TestsConstants.TEST_INDEX_ONLINE)); JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { JSONObject hit = hits.getJSONObject(i); @@ -740,8 +839,8 @@ public void dateSearch() throws IOException { DateTime insertTime = formatter.parseDateTime(source.getString("insert_time")); String errorMessage = - String.format(Locale.ROOT, "insert_time must be before 2014-08-18. Found: %s", - insertTime); + String.format( + Locale.ROOT, "insert_time must be before 2014-08-18. Found: %s", insertTime); Assert.assertTrue(errorMessage, insertTime.isBefore(dateToCompare)); } } @@ -751,10 +850,12 @@ public void dateSearchBraces() throws IOException { DateTimeFormatter formatter = DateTimeFormat.forPattern(TestsConstants.TS_DATE_FORMAT); DateTime dateToCompare = new DateTime(2015, 3, 15, 0, 0, 0); - JSONObject response = executeQuery( - String.format(Locale.ROOT, - "SELECT odbc_time FROM %s WHERE odbc_time < {ts '2015-03-15 00:00:00.000'}", - TestsConstants.TEST_INDEX_ODBC)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT odbc_time FROM %s WHERE odbc_time < {ts '2015-03-15 00:00:00.000'}", + TestsConstants.TEST_INDEX_ODBC)); JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { JSONObject hit = hits.getJSONObject(i); @@ -764,8 +865,8 @@ public void dateSearchBraces() throws IOException { DateTime insertTime = formatter.parseDateTime(insertTimeStr); String errorMessage = - String.format(Locale.ROOT, "insert_time must be before 2015-03-15. Found: %s", - insertTime); + String.format( + Locale.ROOT, "insert_time must be before 2015-03-15. Found: %s", insertTime); Assert.assertTrue(errorMessage, insertTime.isBefore(dateToCompare)); } } @@ -777,20 +878,24 @@ public void dateBetweenSearch() throws IOException { DateTime dateLimit1 = new DateTime(2014, 8, 18, 0, 0, 0); DateTime dateLimit2 = new DateTime(2014, 8, 21, 0, 0, 0); - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT insert_time " + - "FROM %s " + - "WHERE insert_time BETWEEN '2014-08-18' AND '2014-08-21' " + - "LIMIT 3", - TestsConstants.TEST_INDEX_ONLINE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT insert_time " + + "FROM %s " + + "WHERE insert_time BETWEEN '2014-08-18' AND '2014-08-21' " + + "LIMIT 3", + TestsConstants.TEST_INDEX_ONLINE)); JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { JSONObject hit = hits.getJSONObject(i); JSONObject source = getSource(hit); DateTime insertTime = formatter.parseDateTime(source.getString("insert_time")); - boolean isBetween = (insertTime.isAfter(dateLimit1) || insertTime.isEqual(dateLimit1)) && - (insertTime.isBefore(dateLimit2) || insertTime.isEqual(dateLimit2)); + boolean isBetween = + (insertTime.isAfter(dateLimit1) || insertTime.isEqual(dateLimit1)) + && (insertTime.isBefore(dateLimit2) || insertTime.isEqual(dateLimit2)); Assert.assertTrue("insert_time must be between 2014-08-18 and 2014-08-21", isBetween); } @@ -798,9 +903,12 @@ public void dateBetweenSearch() throws IOException { @Test public void missFilterSearch() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE insert_time2 IS missing", - TestsConstants.TEST_INDEX_PHRASE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE insert_time2 IS missing", + TestsConstants.TEST_INDEX_PHRASE)); JSONArray hits = getHits(response); Assert.assertEquals(4, getTotalHits(response)); @@ -814,9 +922,12 @@ public void missFilterSearch() throws IOException { @Test public void notMissFilterSearch() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE insert_time2 IS NOT missing", - TestsConstants.TEST_INDEX_PHRASE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE insert_time2 IS NOT missing", + TestsConstants.TEST_INDEX_PHRASE)); JSONArray hits = getHits(response); Assert.assertEquals(2, getTotalHits(response)); @@ -830,15 +941,19 @@ public void notMissFilterSearch() throws IOException { @Test public void complexConditionQuery() throws IOException { - String errorMessage = "Result does not exist to the condition " + - "(gender='m' AND (age> 25 OR account_number>5)) OR (gender='f' AND (age>30 OR account_number < 8)"; + String errorMessage = + "Result does not exist to the condition (gender='m' AND (age> 25 OR account_number>5)) OR" + + " (gender='f' AND (age>30 OR account_number < 8)"; - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE (gender='m' AND (age> 25 OR account_number>5)) " + - "OR (gender='f' AND (age>30 OR account_number < 8))", - TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE (gender='m' AND (age> 25 OR account_number>5)) " + + "OR (gender='f' AND (age>30 OR account_number < 8))", + TEST_INDEX_ACCOUNT)); JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { @@ -849,7 +964,8 @@ public void complexConditionQuery() throws IOException { int age = source.getInt("age"); int accountNumber = source.getInt("account_number"); - Assert.assertTrue(errorMessage, + Assert.assertTrue( + errorMessage, (gender.equals("m") && (age > 25 || accountNumber > 5)) || (gender.equals("f") && (age > 30 || accountNumber < 8))); } @@ -857,16 +973,20 @@ public void complexConditionQuery() throws IOException { @Test public void complexNotConditionQuery() throws IOException { - String errorMessage = "Result does not exist to the condition " + - "NOT (gender='m' AND NOT (age > 25 OR account_number > 5)) " + - "OR (NOT gender='f' AND NOT (age > 30 OR account_number < 8))"; + String errorMessage = + "Result does not exist to the condition " + + "NOT (gender='m' AND NOT (age > 25 OR account_number > 5)) " + + "OR (NOT gender='f' AND NOT (age > 30 OR account_number < 8))"; - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE NOT (gender='m' AND NOT (age > 25 OR account_number > 5)) " + - "OR (NOT gender='f' AND NOT (age > 30 OR account_number < 8))", - TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE NOT (gender='m' AND NOT (age > 25 OR account_number > 5)) " + + "OR (NOT gender='f' AND NOT (age > 30 OR account_number < 8))", + TEST_INDEX_ACCOUNT)); JSONArray hits = getHits(response); Assert.assertNotEquals(0, hits.length()); @@ -878,7 +998,8 @@ public void complexNotConditionQuery() throws IOException { int age = source.getInt("age"); int accountNumber = source.getInt("account_number"); - Assert.assertTrue(errorMessage, + Assert.assertTrue( + errorMessage, !(gender.equals("m") && !(age > 25 || accountNumber > 5)) || (!gender.equals("f") && !(age > 30 || accountNumber < 8))); } @@ -887,9 +1008,10 @@ public void complexNotConditionQuery() throws IOException { @Test @SuppressWarnings("unchecked") public void orderByAscTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT age FROM %s ORDER BY age ASC LIMIT 1000", - TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, "SELECT age FROM %s ORDER BY age ASC LIMIT 1000", TEST_INDEX_ACCOUNT)); JSONArray hits = getHits(response); ArrayList ages = new ArrayList<>(); @@ -907,17 +1029,23 @@ public void orderByAscTest() throws IOException { @Test public void orderByDescTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT age FROM %s ORDER BY age DESC LIMIT 1000", - TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT age FROM %s ORDER BY age DESC LIMIT 1000", + TEST_INDEX_ACCOUNT)); assertResponseForOrderByTest(response); } @Test public void orderByDescUsingTableAliasTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT a.age FROM %s a ORDER BY a.age DESC LIMIT 1000", - TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT a.age FROM %s a ORDER BY a.age DESC LIMIT 1000", + TEST_INDEX_ACCOUNT)); assertResponseForOrderByTest(response); } @@ -940,13 +1068,16 @@ private void assertResponseForOrderByTest(JSONObject response) { @Test @SuppressWarnings("unchecked") public void orderByAscFieldWithSpaceTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE `test field` IS NOT null " + - "ORDER BY `test field` ASC " + - "LIMIT 1000", - TestsConstants.TEST_INDEX_PHRASE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE `test field` IS NOT null " + + "ORDER BY `test field` ASC " + + "LIMIT 1000", + TestsConstants.TEST_INDEX_PHRASE)); JSONArray hits = getHits(response); ArrayList testFields = new ArrayList<>(); @@ -964,195 +1095,177 @@ public void orderByAscFieldWithSpaceTest() throws IOException { @Test public void testWhereWithBoolEqualsTrue() throws IOException { - JSONObject response = executeQuery( - StringUtils.format( - "SELECT * " + - "FROM %s " + - "WHERE male = true " + - "LIMIT 5", - TestsConstants.TEST_INDEX_BANK) - ); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT * " + "FROM %s " + "WHERE male = true " + "LIMIT 5", + TestsConstants.TEST_INDEX_BANK)); checkResponseSize(response, BANK_INDEX_MALE_TRUE); } @Test public void testWhereWithBoolEqualsTrueAndGroupBy() throws IOException { - JSONObject response = executeQuery( - StringUtils.format( - "SELECT * " + - "FROM %s " + - "WHERE male = true " + - "GROUP BY balance " + - "LIMIT 5", - TestsConstants.TEST_INDEX_BANK) - ); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT * " + "FROM %s " + "WHERE male = true " + "GROUP BY balance " + "LIMIT 5", + TestsConstants.TEST_INDEX_BANK)); checkAggregationResponseSize(response, BANK_INDEX_MALE_TRUE); } @Test public void testWhereWithBoolEqualsTrueAndOrderBy() throws IOException { - JSONObject response = executeQuery( - StringUtils.format( - "SELECT * " + - "FROM %s " + - "WHERE male = true " + - "ORDER BY age " + - "LIMIT 5", - TestsConstants.TEST_INDEX_BANK) - ); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT * " + "FROM %s " + "WHERE male = true " + "ORDER BY age " + "LIMIT 5", + TestsConstants.TEST_INDEX_BANK)); checkResponseSize(response, BANK_INDEX_MALE_TRUE); } @Test public void testWhereWithBoolIsTrue() throws IOException { - JSONObject response = executeQuery( - StringUtils.format( - "SELECT * " + - "FROM %s " + - "WHERE male IS true " + - "GROUP BY balance " + - "LIMIT 5", - TestsConstants.TEST_INDEX_BANK) - ); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT * " + "FROM %s " + "WHERE male IS true " + "GROUP BY balance " + "LIMIT 5", + TestsConstants.TEST_INDEX_BANK)); checkAggregationResponseSize(response, BANK_INDEX_MALE_TRUE); } @Test public void testWhereWithBoolIsNotTrue() throws IOException { - JSONObject response = executeQuery( - StringUtils.format( - "SELECT * " + - "FROM %s " + - "WHERE male IS NOT true " + - "GROUP BY balance " + - "LIMIT 5", - TestsConstants.TEST_INDEX_BANK) - ); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT * " + + "FROM %s " + + "WHERE male IS NOT true " + + "GROUP BY balance " + + "LIMIT 5", + TestsConstants.TEST_INDEX_BANK)); checkAggregationResponseSize(response, BANK_INDEX_MALE_FALSE); } @Test public void testWhereWithBoolEqualsFalse() throws IOException { - JSONObject response = executeQuery( - StringUtils.format( - "SELECT * " + - "FROM %s " + - "WHERE male = false " + - "LIMIT 5", - TestsConstants.TEST_INDEX_BANK) - ); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT * " + "FROM %s " + "WHERE male = false " + "LIMIT 5", + TestsConstants.TEST_INDEX_BANK)); checkResponseSize(response, BANK_INDEX_MALE_FALSE); } @Test public void testWhereWithBoolEqualsFalseAndGroupBy() throws IOException { - JSONObject response = executeQuery( - StringUtils.format( - "SELECT * " + - "FROM %s " + - "WHERE male = false " + - "GROUP BY balance " + - "LIMIT 5", - TestsConstants.TEST_INDEX_BANK) - ); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT * " + "FROM %s " + "WHERE male = false " + "GROUP BY balance " + "LIMIT 5", + TestsConstants.TEST_INDEX_BANK)); checkAggregationResponseSize(response, BANK_INDEX_MALE_FALSE); } @Test public void testWhereWithBoolEqualsFalseAndOrderBy() throws IOException { - JSONObject response = executeQuery( - StringUtils.format( - "SELECT * " + - "FROM %s " + - "WHERE male = false " + - "ORDER BY age " + - "LIMIT 5", - TestsConstants.TEST_INDEX_BANK) - ); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT * " + "FROM %s " + "WHERE male = false " + "ORDER BY age " + "LIMIT 5", + TestsConstants.TEST_INDEX_BANK)); checkResponseSize(response, BANK_INDEX_MALE_FALSE); } @Test public void testWhereWithBoolIsFalse() throws IOException { - JSONObject response = executeQuery( - StringUtils.format( - "SELECT * " + - "FROM %s " + - "WHERE male IS false " + - "GROUP BY balance " + - "LIMIT 5", - TestsConstants.TEST_INDEX_BANK) - ); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT * " + "FROM %s " + "WHERE male IS false " + "GROUP BY balance " + "LIMIT 5", + TestsConstants.TEST_INDEX_BANK)); checkAggregationResponseSize(response, BANK_INDEX_MALE_FALSE); } @Test public void testWhereWithBoolIsNotFalse() throws IOException { - JSONObject response = executeQuery( - StringUtils.format( - "SELECT * " + - "FROM %s " + - "WHERE male IS NOT false " + - "GROUP BY balance " + - "LIMIT 5", - TestsConstants.TEST_INDEX_BANK) - ); + JSONObject response = + executeQuery( + StringUtils.format( + "SELECT * " + + "FROM %s " + + "WHERE male IS NOT false " + + "GROUP BY balance " + + "LIMIT 5", + TestsConstants.TEST_INDEX_BANK)); checkAggregationResponseSize(response, BANK_INDEX_MALE_TRUE); } @Test public void testMultiPartWhere() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE (firstname LIKE 'opal' OR firstname LIKE 'rodriquez') " + - "AND (state like 'oh' OR state like 'hi')", - TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE (firstname LIKE 'opal' OR firstname LIKE 'rodriquez') " + + "AND (state like 'oh' OR state like 'hi')", + TEST_INDEX_ACCOUNT)); Assert.assertEquals(2, getTotalHits(response)); } @Test public void testMultiPartWhere2() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE ((account_number > 200 AND account_number < 300) OR gender LIKE 'm') " + - "AND (state LIKE 'hi' OR address LIKE 'avenue')", - TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE ((account_number > 200 AND account_number < 300) OR gender LIKE 'm') " + + "AND (state LIKE 'hi' OR address LIKE 'avenue')", + TEST_INDEX_ACCOUNT)); Assert.assertEquals(127, getTotalHits(response)); } @Test public void testMultiPartWhere3() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE ((account_number > 25 AND account_number < 75) AND age >35 ) " + - "AND (state LIKE 'md' OR (address LIKE 'avenue' OR address LIKE 'street'))", - TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE ((account_number > 25 AND account_number < 75) AND age >35 ) " + + "AND (state LIKE 'md' OR (address LIKE 'avenue' OR address LIKE 'street'))", + TEST_INDEX_ACCOUNT)); Assert.assertEquals(7, getTotalHits(response)); } @Test public void filterPolygonTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE GEO_INTERSECTS(place,'POLYGON ((102 2, 103 2, 103 3, 102 3, 102 2))')", - TestsConstants.TEST_INDEX_LOCATION)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE GEO_INTERSECTS(place,'POLYGON ((102 2, 103 2, 103 3, 102 3, 102 2))')", + TestsConstants.TEST_INDEX_LOCATION)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -1163,10 +1276,12 @@ public void filterPolygonTest() throws IOException { @Test public void boundingBox() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, - "SELECT * FROM %s WHERE GEO_BOUNDING_BOX(center, 100.0, 1.0, 101, 0.0)", - TestsConstants.TEST_INDEX_LOCATION)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE GEO_BOUNDING_BOX(center, 100.0, 1.0, 101, 0.0)", + TestsConstants.TEST_INDEX_LOCATION)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -1177,10 +1292,12 @@ public void boundingBox() throws IOException { @Test public void geoDistance() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, - "SELECT * FROM %s WHERE GEO_DISTANCE(center, '1km', 100.5, 0.500001)", - TestsConstants.TEST_INDEX_LOCATION)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE GEO_DISTANCE(center, '1km', 100.5, 0.500001)", + TestsConstants.TEST_INDEX_LOCATION)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -1191,10 +1308,12 @@ public void geoDistance() throws IOException { @Test public void geoPolygon() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, - "SELECT * FROM %s WHERE GEO_POLYGON(center, 100,0, 100.5, 2, 101.0,0)", - TestsConstants.TEST_INDEX_LOCATION)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE GEO_POLYGON(center, 100,0, 100.5, 2, 101.0,0)", + TestsConstants.TEST_INDEX_LOCATION)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -1206,36 +1325,45 @@ public void geoPolygon() throws IOException { @Ignore @Test public void escapedCharactersCheck() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE MATCH_PHRASE(nickname, 'Daenerys \"Stormborn\"') " + - "LIMIT 1000", - TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE MATCH_PHRASE(nickname, 'Daenerys \"Stormborn\"') " + + "LIMIT 1000", + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); Assert.assertEquals(1, getTotalHits(response)); } @Test public void complexObjectSearch() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE MATCH_PHRASE(name.firstname, 'Jaime') " + - "LIMIT 1000", - TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE MATCH_PHRASE(name.firstname, 'Jaime') " + + "LIMIT 1000", + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); Assert.assertEquals(1, getTotalHits(response)); } @Test public void complexObjectReturnField() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT parents.father " + - "FROM %s " + - "WHERE MATCH_PHRASE(name.firstname, 'Brandon') " + - "LIMIT 1000", - TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT parents.father " + + "FROM %s " + + "WHERE MATCH_PHRASE(name.firstname, 'Brandon') " + + "LIMIT 1000", + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); JSONArray hits = getHits(response); Assert.assertEquals(1, getTotalHits(response)); @@ -1246,14 +1374,18 @@ public void complexObjectReturnField() throws IOException { /** * TODO: Fields prefixed with @ gets converted to SQLVariantRefExpr instead of SQLIdentifierExpr - * Either change SQLVariantRefExpr to SQLIdentifierExpr - * Or handle the special case for SQLVariantRefExpr + * Either change SQLVariantRefExpr to SQLIdentifierExpr Or handle the special case for + * SQLVariantRefExpr */ @Ignore @Test public void queryWithAtFieldOnWhere() throws IOException { - JSONObject response = executeQuery(String.format(Locale.ROOT, - "SELECT * FROM %s where @wolf = 'Summer' LIMIT 1000", TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s where @wolf = 'Summer' LIMIT 1000", + TEST_INDEX_GAME_OF_THRONES)); Assert.assertEquals(1, getTotalHits(response)); JSONObject hit = getHits(response).getJSONObject(0); Assert.assertEquals("Summer", hit.get("@wolf")); @@ -1265,19 +1397,22 @@ public void queryWithDotAtStartOfIndexName() throws Exception { TestUtils.createHiddenIndexByRestClient(client(), ".bank", null); TestUtils.loadDataByRestClient(client(), ".bank", "/src/test/resources/.bank.json"); - String response = executeQuery("SELECT education FROM .bank WHERE account_number = 12345", - "jdbc"); + String response = + executeQuery("SELECT education FROM .bank WHERE account_number = 12345", "jdbc"); Assert.assertTrue(response.contains("PhD")); } @Test public void notLikeTests() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT name " + - "FROM %s " + - "WHERE name.firstname NOT LIKE 'd%%' AND name IS NOT NULL " + - "LIMIT 1000", - TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT name " + + "FROM %s " + + "WHERE name.firstname NOT LIKE 'd%%' AND name IS NOT NULL " + + "LIMIT 1000", + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); JSONArray hits = getHits(response); Assert.assertEquals(3, getTotalHits(response)); @@ -1286,45 +1421,49 @@ public void notLikeTests() throws IOException { JSONObject source = getSource(hit); String name = source.getJSONObject("name").getString("firstname"); - Assert - .assertFalse(String.format(Locale.ROOT, "Name [%s] should not match pattern [d%%]", name), - name.startsWith("d")); + Assert.assertFalse( + String.format(Locale.ROOT, "Name [%s] should not match pattern [d%%]", name), + name.startsWith("d")); } } @Test public void isNullTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT name " + - "FROM %s " + - "WHERE nickname IS NULL " + - "LIMIT 1000", - TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT name " + "FROM %s " + "WHERE nickname IS NULL " + "LIMIT 1000", + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); Assert.assertEquals(6, getTotalHits(response)); } @Test public void isNotNullTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT name " + - "FROM %s " + - "WHERE nickname IS NOT NULL " + - "LIMIT 1000", - TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT name " + "FROM %s " + "WHERE nickname IS NOT NULL " + "LIMIT 1000", + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); Assert.assertEquals(1, getTotalHits(response)); } @Test public void innerQueryTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s D " + - "WHERE holdersName IN (SELECT firstname " + - "FROM %s " + - "WHERE firstname = 'Hattie')", - TestsConstants.TEST_INDEX_DOG, TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s D " + + "WHERE holdersName IN (SELECT firstname " + + "FROM %s " + + "WHERE firstname = 'Hattie')", + TestsConstants.TEST_INDEX_DOG, + TEST_INDEX_ACCOUNT)); JSONArray hits = getHits(response); Assert.assertEquals(1, hits.length()); @@ -1339,19 +1478,22 @@ public void innerQueryTest() throws IOException { @Ignore @Test public void twoSubQueriesTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE holdersName IN (SELECT firstname " + - "FROM %s " + - "WHERE firstname = 'Hattie') " + - "AND age IN (SELECT name.ofHisName " + - "FROM %s " + - "WHERE name.firstname <> 'Daenerys' " + - "AND name.ofHisName IS NOT NULL) ", - TestsConstants.TEST_INDEX_DOG, - TEST_INDEX_ACCOUNT, - TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE holdersName IN (SELECT firstname " + + "FROM %s " + + "WHERE firstname = 'Hattie') " + + "AND age IN (SELECT name.ofHisName " + + "FROM %s " + + "WHERE name.firstname <> 'Daenerys' " + + "AND name.ofHisName IS NOT NULL) ", + TestsConstants.TEST_INDEX_DOG, + TEST_INDEX_ACCOUNT, + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); JSONArray hits = getHits(response); Assert.assertEquals(1, hits.length()); @@ -1366,14 +1508,18 @@ public void twoSubQueriesTest() throws IOException { @Ignore @Test public void inTermsSubQueryTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE age = IN_TERMS (SELECT name.ofHisName " + - "FROM %s " + - "WHERE name.firstname <> 'Daenerys' " + - "AND name.ofHisName IS NOT NULL)", - TestsConstants.TEST_INDEX_DOG, TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE age = IN_TERMS (SELECT name.ofHisName " + + "FROM %s " + + "WHERE name.firstname <> 'Daenerys' " + + "AND name.ofHisName IS NOT NULL)", + TestsConstants.TEST_INDEX_DOG, + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); JSONArray hits = getHits(response); Assert.assertEquals(1, hits.length()); @@ -1388,9 +1534,12 @@ public void inTermsSubQueryTest() throws IOException { @Ignore @Test public void idsQueryOneId() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE _id = IDS_QUERY(dog, 1)", - TestsConstants.TEST_INDEX_DOG)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE _id = IDS_QUERY(dog, 1)", + TestsConstants.TEST_INDEX_DOG)); JSONArray hits = getHits(response); Assert.assertEquals(1, hits.length()); @@ -1405,9 +1554,12 @@ public void idsQueryOneId() throws IOException { @Ignore @Test public void idsQueryMultipleId() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE _id = IDS_QUERY(dog, 1, 2, 3)", - TestsConstants.TEST_INDEX_DOG)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE _id = IDS_QUERY(dog, 1, 2, 3)", + TestsConstants.TEST_INDEX_DOG)); JSONArray hits = getHits(response); Assert.assertEquals(1, hits.length()); @@ -1422,14 +1574,18 @@ public void idsQueryMultipleId() throws IOException { @Ignore @Test public void idsQuerySubQueryIds() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE _id = IDS_QUERY(dog, (SELECT name.ofHisName " + - "FROM %s " + - "WHERE name.firstname <> 'Daenerys' " + - "AND name.ofHisName IS NOT NULL))", - TestsConstants.TEST_INDEX_DOG, TestsConstants.TEST_INDEX_GAME_OF_THRONES)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE _id = IDS_QUERY(dog, (SELECT name.ofHisName " + + "FROM %s " + + "WHERE name.firstname <> 'Daenerys' " + + "AND name.ofHisName IS NOT NULL))", + TestsConstants.TEST_INDEX_DOG, + TestsConstants.TEST_INDEX_GAME_OF_THRONES)); JSONArray hits = getHits(response); Assert.assertEquals(1, hits.length()); @@ -1443,18 +1599,24 @@ public void idsQuerySubQueryIds() throws IOException { @Test public void nestedEqualsTestFieldNormalField() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE nested(message.info)='b'", - TestsConstants.TEST_INDEX_NESTED_TYPE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE nested(message.info)='b'", + TestsConstants.TEST_INDEX_NESTED_TYPE)); Assert.assertEquals(1, getTotalHits(response)); } @Test public void nestedEqualsTestFieldInsideArrays() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * FROM %s WHERE nested(message.info) = 'a'", - TestsConstants.TEST_INDEX_NESTED_TYPE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s WHERE nested(message.info) = 'a'", + TestsConstants.TEST_INDEX_NESTED_TYPE)); Assert.assertEquals(2, getTotalHits(response)); } @@ -1462,106 +1624,124 @@ public void nestedEqualsTestFieldInsideArrays() throws IOException { @Ignore // Seems like we don't support nested with IN, throwing IllegalArgumentException @Test public void nestedOnInQuery() throws IOException { - JSONObject response = executeQuery(String.format(Locale.ROOT, - "SELECT * FROM %s where nested(message.info) IN ('a','b')", TEST_INDEX_NESTED_TYPE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * FROM %s where nested(message.info) IN ('a','b')", + TEST_INDEX_NESTED_TYPE)); Assert.assertEquals(3, getTotalHits(response)); } @Test public void complexNestedQueryBothOnSameObject() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE nested('message', message.info = 'a' AND message.author ='i')", - TestsConstants.TEST_INDEX_NESTED_TYPE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE nested('message', message.info = 'a' AND message.author ='i')", + TestsConstants.TEST_INDEX_NESTED_TYPE)); Assert.assertEquals(1, getTotalHits(response)); } @Test public void complexNestedQueryNotBothOnSameObject() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE nested('message', message.info = 'a' AND message.author ='h')", - TestsConstants.TEST_INDEX_NESTED_TYPE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s " + + "WHERE nested('message', message.info = 'a' AND message.author ='h')", + TestsConstants.TEST_INDEX_NESTED_TYPE)); Assert.assertEquals(0, getTotalHits(response)); } @Test public void nestedOnInTermsQuery() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT * " + - "FROM %s " + - "WHERE nested(message.info) = IN_TERMS('a', 'b')", - TestsConstants.TEST_INDEX_NESTED_TYPE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT * " + "FROM %s " + "WHERE nested(message.info) = IN_TERMS('a', 'b')", + TestsConstants.TEST_INDEX_NESTED_TYPE)); Assert.assertEquals(3, getTotalHits(response)); } // TODO Uncomment these after problem with loading join index is resolved -// @Test -// public void childrenEqualsTestFieldNormalField() throws IOException { -// JSONObject response = executeQuery( -// String.format(Locale.ROOT, "SELECT * " + -// "FROM %s/joinType " + -// "WHERE children(childrenType, info) = 'b'", TestsConstants.TEST_INDEX_JOIN_TYPE)); -// -// Assert.assertEquals(1, getTotalHits(response)); -// } -// -// @Test -// public void childrenOnInQuery() throws IOException { -// JSONObject response = executeQuery( -// String.format(Locale.ROOT, "SELECT * " + -// "FROM %s/joinType " + -// "WHERE children(childrenType, info) IN ('a', 'b')", -// TestsConstants.TEST_INDEX_JOIN_TYPE)); -// -// Assert.assertEquals(2, getTotalHits(response)); -// } -// -// @Test -// public void complexChildrenQueryBothOnSameObject() throws IOException { -// JSONObject response = executeQuery( -// String.format(Locale.ROOT, "SELECT * " + -// "FROM %s/joinType " + -// "WHERE children(childrenType, info = 'a' AND author ='e')", -// TestsConstants.TEST_INDEX_JOIN_TYPE)); -// -// Assert.assertEquals(1, getTotalHits(response)); -// } -// -// @Test -// public void complexChildrenQueryNotOnSameObject() throws IOException { -// JSONObject response = executeQuery( -// String.format(Locale.ROOT, "SELECT * " + -// "FROM %s/joinType " + -// "WHERE children(childrenType, info = 'a' AND author ='j')", -// TestsConstants.TEST_INDEX_JOIN_TYPE)); -// -// Assert.assertEquals(0, getTotalHits(response)); -// } -// -// @Test -// public void childrenOnInTermsQuery() throws IOException { -// JSONObject response = executeQuery( -// String.format(Locale.ROOT, "SELECT * " + -// "FROM %s/joinType " + -// "WHERE children(childrenType, info) = IN_TERMS(a, b)", -// TestsConstants.TEST_INDEX_JOIN_TYPE)); -// -// Assert.assertEquals(2, getTotalHits(response)); -// } + // @Test + // public void childrenEqualsTestFieldNormalField() throws IOException { + // JSONObject response = executeQuery( + // String.format(Locale.ROOT, "SELECT * " + + // "FROM %s/joinType " + + // "WHERE children(childrenType, info) = 'b'", + // TestsConstants.TEST_INDEX_JOIN_TYPE)); + // + // Assert.assertEquals(1, getTotalHits(response)); + // } + // + // @Test + // public void childrenOnInQuery() throws IOException { + // JSONObject response = executeQuery( + // String.format(Locale.ROOT, "SELECT * " + + // "FROM %s/joinType " + + // "WHERE children(childrenType, info) IN ('a', 'b')", + // TestsConstants.TEST_INDEX_JOIN_TYPE)); + // + // Assert.assertEquals(2, getTotalHits(response)); + // } + // + // @Test + // public void complexChildrenQueryBothOnSameObject() throws IOException { + // JSONObject response = executeQuery( + // String.format(Locale.ROOT, "SELECT * " + + // "FROM %s/joinType " + + // "WHERE children(childrenType, info = 'a' AND author + // ='e')", + // TestsConstants.TEST_INDEX_JOIN_TYPE)); + // + // Assert.assertEquals(1, getTotalHits(response)); + // } + // + // @Test + // public void complexChildrenQueryNotOnSameObject() throws IOException { + // JSONObject response = executeQuery( + // String.format(Locale.ROOT, "SELECT * " + + // "FROM %s/joinType " + + // "WHERE children(childrenType, info = 'a' AND author + // ='j')", + // TestsConstants.TEST_INDEX_JOIN_TYPE)); + // + // Assert.assertEquals(0, getTotalHits(response)); + // } + // + // @Test + // public void childrenOnInTermsQuery() throws IOException { + // JSONObject response = executeQuery( + // String.format(Locale.ROOT, "SELECT * " + + // "FROM %s/joinType " + + // "WHERE children(childrenType, info) = IN_TERMS(a, b)", + // TestsConstants.TEST_INDEX_JOIN_TYPE)); + // + // Assert.assertEquals(2, getTotalHits(response)); + // } @Ignore // the hint does not really work, NoSuchIndexException is thrown @Test public void multipleIndicesOneNotExistWithHint() throws IOException { - JSONObject response = executeQuery(String - .format(Locale.ROOT, "SELECT /*! IGNORE_UNAVAILABLE */ * FROM %s,%s ", TEST_INDEX_ACCOUNT, - "badindex")); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT /*! IGNORE_UNAVAILABLE */ * FROM %s,%s ", + TEST_INDEX_ACCOUNT, + "badindex")); Assert.assertTrue(getTotalHits(response) > 0); } @@ -1573,8 +1753,8 @@ public void multipleIndicesOneNotExistWithoutHint() throws IOException { String.format(Locale.ROOT, "SELECT * FROM %s, %s", TEST_INDEX_ACCOUNT, "badindex")); Assert.fail("Expected exception, but call succeeded"); } catch (ResponseException e) { - Assert.assertEquals(RestStatus.BAD_REQUEST.getStatus(), - e.getResponse().getStatusLine().getStatusCode()); + Assert.assertEquals( + RestStatus.BAD_REQUEST.getStatus(), e.getResponse().getStatusLine().getStatusCode()); final String entity = TestUtils.getResponseBody(e.getResponse()); Assert.assertThat(entity, containsString("\"type\": \"IndexNotFoundException\"")); } @@ -1582,29 +1762,36 @@ public void multipleIndicesOneNotExistWithoutHint() throws IOException { // TODO Find way to check routing() without SearchRequestBuilder // to properly update these tests to OpenSearchIntegTestCase format -// @Test -// public void routingRequestOneRounting() throws IOException { -// SqlElasticSearchRequestBuilder request = getRequestBuilder(String.format(Locale.ROOT, -// "SELECT /*! ROUTINGS(hey) */ * FROM %s ", TEST_INDEX_ACCOUNT)); -// SearchRequestBuilder searchRequestBuilder = (SearchRequestBuilder) request.getBuilder(); -// Assert.assertEquals("hey",searchRequestBuilder.request().routing()); -// } -// -// @Test -// public void routingRequestMultipleRountings() throws IOException { -// SqlElasticSearchRequestBuilder request = getRequestBuilder(String.format(Locale.ROOT, -// "SELECT /*! ROUTINGS(hey,bye) */ * FROM %s ", TEST_INDEX_ACCOUNT)); -// SearchRequestBuilder searchRequestBuilder = (SearchRequestBuilder) request.getBuilder(); -// Assert.assertEquals("hey,bye",searchRequestBuilder.request().routing()); -// } + // @Test + // public void routingRequestOneRounting() throws IOException { + // SqlElasticSearchRequestBuilder request = getRequestBuilder(String.format(Locale.ROOT, + // "SELECT /*! ROUTINGS(hey) */ * FROM %s ", + // TEST_INDEX_ACCOUNT)); + // SearchRequestBuilder searchRequestBuilder = (SearchRequestBuilder) request.getBuilder(); + // Assert.assertEquals("hey",searchRequestBuilder.request().routing()); + // } + // + // @Test + // public void routingRequestMultipleRountings() throws IOException { + // SqlElasticSearchRequestBuilder request = getRequestBuilder(String.format(Locale.ROOT, + // "SELECT /*! ROUTINGS(hey,bye) */ * FROM %s ", + // TEST_INDEX_ACCOUNT)); + // SearchRequestBuilder searchRequestBuilder = (SearchRequestBuilder) request.getBuilder(); + // Assert.assertEquals("hey,bye",searchRequestBuilder.request().routing()); + // } @Ignore // Getting parser error: syntax error, expect RPAREN, actual IDENTIFIER insert_time @Test public void scriptFilterNoParams() throws IOException { - JSONObject result = executeQuery(String.format(Locale.ROOT, - "SELECT insert_time FROM %s where script('doc[\\'insert_time\''].date.hourOfDay==16') " + - "and insert_time <'2014-08-21T00:00:00.000Z'", TEST_INDEX_ONLINE)); + JSONObject result = + executeQuery( + String.format( + Locale.ROOT, + "SELECT insert_time FROM %s where" + + " script('doc[\\'insert_time\''].date.hourOfDay==16') and insert_time" + + " <'2014-08-21T00:00:00.000Z'", + TEST_INDEX_ONLINE)); Assert.assertEquals(237, getTotalHits(result)); } @@ -1612,20 +1799,28 @@ public void scriptFilterNoParams() throws IOException { @Test public void scriptFilterWithParams() throws IOException { - JSONObject result = executeQuery(String.format(Locale.ROOT, - "SELECT insert_time FROM %s where script('doc[\\'insert_time\''].date.hourOfDay==x','x'=16) " + - "and insert_time <'2014-08-21T00:00:00.000Z'", TEST_INDEX_ONLINE)); + JSONObject result = + executeQuery( + String.format( + Locale.ROOT, + "SELECT insert_time FROM %s where" + + " script('doc[\\'insert_time\''].date.hourOfDay==x','x'=16) and insert_time" + + " <'2014-08-21T00:00:00.000Z'", + TEST_INDEX_ONLINE)); Assert.assertEquals(237, getTotalHits(result)); } @Test public void highlightPreTagsAndPostTags() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, - "SELECT /*! HIGHLIGHT(phrase, pre_tags : [''], post_tags : ['']) */ " + - "* FROM %s " + - "WHERE phrase LIKE 'fox' " + - "ORDER BY _score", TestsConstants.TEST_INDEX_PHRASE)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT /*! HIGHLIGHT(phrase, pre_tags : [''], post_tags : ['']) */ " + + "* FROM %s " + + "WHERE phrase LIKE 'fox' " + + "ORDER BY _score", + TestsConstants.TEST_INDEX_PHRASE)); JSONArray hits = getHits(response); for (int i = 0; i < hits.length(); i++) { @@ -1640,13 +1835,17 @@ public void highlightPreTagsAndPostTags() throws IOException { @Ignore @Test public void fieldCollapsingTest() throws IOException { - JSONObject response = executeQuery( - String.format(Locale.ROOT, "SELECT /*! COLLAPSE({\"field\":\"age\"," + - "\"inner_hits\":{\"name\": \"account\"," + - "\"size\":1," + - "\"sort\":[{\"age\":\"asc\"}]}," + - "\"max_concurrent_group_searches\": 4}) */ " + - "* FROM %s", TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery( + String.format( + Locale.ROOT, + "SELECT /*! COLLAPSE({\"field\":\"age\"," + + "\"inner_hits\":{\"name\": \"account\"," + + "\"size\":1," + + "\"sort\":[{\"age\":\"asc\"}]}," + + "\"max_concurrent_group_searches\": 4}) */ " + + "* FROM %s", + TEST_INDEX_ACCOUNT)); JSONArray hits = getHits(response); Assert.assertEquals(21, hits.length()); @@ -1656,8 +1855,8 @@ public void fieldCollapsingTest() throws IOException { @Test public void backticksQuotedIndexNameTest() throws Exception { TestUtils.createIndexByRestClient(client(), "bank_unquote", null); - TestUtils - .loadDataByRestClient(client(), "bank", "/src/test/resources/bank_for_unquote_test.json"); + TestUtils.loadDataByRestClient( + client(), "bank", "/src/test/resources/bank_for_unquote_test.json"); JSONArray hits = getHits(executeQuery("SELECT lastname FROM `bank`")); Object responseIndex = ((JSONObject) hits.get(0)).query("/_index"); @@ -1665,39 +1864,57 @@ public void backticksQuotedIndexNameTest() throws Exception { assertEquals( executeQuery("SELECT lastname FROM bank", "jdbc"), - executeQuery("SELECT `bank`.`lastname` FROM `bank`", "jdbc") - ); + executeQuery("SELECT `bank`.`lastname` FROM `bank`", "jdbc")); assertEquals( executeQuery( - "SELECT `b`.`age` AS `AGE`, AVG(`b`.`balance`) FROM `bank` AS `b` " + - "WHERE ABS(`b`.`age`) > 20 GROUP BY `b`.`age` ORDER BY `b`.`age`", + "SELECT `b`.`age` AS `AGE`, AVG(`b`.`balance`) FROM `bank` AS `b` " + + "WHERE ABS(`b`.`age`) > 20 GROUP BY `b`.`age` ORDER BY `b`.`age`", "jdbc"), - executeQuery("SELECT b.age AS AGE, AVG(balance) FROM bank AS b " + - "WHERE ABS(age) > 20 GROUP BY b.age ORDER BY b.age", - "jdbc") - ); + executeQuery( + "SELECT b.age AS AGE, AVG(balance) FROM bank AS b " + + "WHERE ABS(age) > 20 GROUP BY b.age ORDER BY b.age", + "jdbc")); } @Test public void backticksQuotedFieldNamesTest() { - String expected = executeQuery(StringUtils.format("SELECT b.lastname FROM %s " + - "AS b ORDER BY age LIMIT 3", TestsConstants.TEST_INDEX_BANK), "jdbc"); - String quotedFieldResult = executeQuery(StringUtils.format("SELECT b.`lastname` FROM %s " + - "AS b ORDER BY age LIMIT 3", TestsConstants.TEST_INDEX_BANK), "jdbc"); + String expected = + executeQuery( + StringUtils.format( + "SELECT b.lastname FROM %s " + "AS b ORDER BY age LIMIT 3", + TestsConstants.TEST_INDEX_BANK), + "jdbc"); + String quotedFieldResult = + executeQuery( + StringUtils.format( + "SELECT b.`lastname` FROM %s " + "AS b ORDER BY age LIMIT 3", + TestsConstants.TEST_INDEX_BANK), + "jdbc"); assertEquals(expected, quotedFieldResult); } @Test public void backticksQuotedAliasTest() { - String expected = executeQuery(StringUtils.format("SELECT b.lastname FROM %s " + - "AS b ORDER BY age LIMIT 3", TestsConstants.TEST_INDEX_BANK), "jdbc"); - String quotedAliasResult = executeQuery(StringUtils.format("SELECT `b`.lastname FROM %s" + - " AS `b` ORDER BY age LIMIT 3", TestsConstants.TEST_INDEX_BANK), "jdbc"); + String expected = + executeQuery( + StringUtils.format( + "SELECT b.lastname FROM %s " + "AS b ORDER BY age LIMIT 3", + TestsConstants.TEST_INDEX_BANK), + "jdbc"); + String quotedAliasResult = + executeQuery( + StringUtils.format( + "SELECT `b`.lastname FROM %s" + " AS `b` ORDER BY age LIMIT 3", + TestsConstants.TEST_INDEX_BANK), + "jdbc"); String quotedAliasAndFieldResult = - executeQuery(StringUtils.format("SELECT `b`.`lastname` FROM %s " + - "AS `b` ORDER BY age LIMIT 3", TestsConstants.TEST_INDEX_BANK), "jdbc"); + executeQuery( + StringUtils.format( + "SELECT `b`.`lastname` FROM %s " + "AS `b` ORDER BY age LIMIT 3", + TestsConstants.TEST_INDEX_BANK), + "jdbc"); assertEquals(expected, quotedAliasResult); assertEquals(expected, quotedAliasAndFieldResult); @@ -1705,19 +1922,28 @@ public void backticksQuotedAliasTest() { @Test public void backticksQuotedAliasWithSpecialCharactersTest() { - String expected = executeQuery(StringUtils.format("SELECT b.lastname FROM %s " + - "AS b ORDER BY age LIMIT 3", TestsConstants.TEST_INDEX_BANK), "jdbc"); + String expected = + executeQuery( + StringUtils.format( + "SELECT b.lastname FROM %s " + "AS b ORDER BY age LIMIT 3", + TestsConstants.TEST_INDEX_BANK), + "jdbc"); String specialCharAliasResult = - executeQuery(StringUtils.format("SELECT `b k`.lastname FROM %s " + - "AS `b k` ORDER BY age LIMIT 3", TestsConstants.TEST_INDEX_BANK), "jdbc"); + executeQuery( + StringUtils.format( + "SELECT `b k`.lastname FROM %s " + "AS `b k` ORDER BY age LIMIT 3", + TestsConstants.TEST_INDEX_BANK), + "jdbc"); assertEquals(expected, specialCharAliasResult); } @Test public void backticksQuotedAliasInJDBCResponseTest() { - String query = StringUtils.format("SELECT `b`.`lastname` AS `name` FROM %s AS `b` " + - "ORDER BY age LIMIT 3", TestsConstants.TEST_INDEX_BANK); + String query = + StringUtils.format( + "SELECT `b`.`lastname` AS `name` FROM %s AS `b` " + "ORDER BY age LIMIT 3", + TestsConstants.TEST_INDEX_BANK); String response = executeQuery(query, "jdbc"); assertTrue(response.contains("\"alias\": \"name\"")); @@ -1725,10 +1951,14 @@ public void backticksQuotedAliasInJDBCResponseTest() { @Test public void caseWhenSwitchTest() throws IOException { - JSONObject response = executeQuery("SELECT CASE age " + - "WHEN 30 THEN '1' " + - "WHEN 40 THEN '2' " + - "ELSE '0' END AS cases FROM " + TEST_INDEX_ACCOUNT + " WHERE age IS NOT NULL"); + JSONObject response = + executeQuery( + "SELECT CASE age " + + "WHEN 30 THEN '1' " + + "WHEN 40 THEN '2' " + + "ELSE '0' END AS cases FROM " + + TEST_INDEX_ACCOUNT + + " WHERE age IS NOT NULL"); JSONObject hit = getHits(response).getJSONObject(0); String age = hit.query("/_source/age").toString(); String cases = age.equals("30") ? "1" : age.equals("40") ? "2" : "0"; @@ -1738,49 +1968,61 @@ public void caseWhenSwitchTest() throws IOException { @Test public void caseWhenJdbcResponseTest() { - String response = executeQuery("SELECT CASE age " + - "WHEN 30 THEN 'age is 30' " + - "WHEN 40 THEN 'age is 40' " + - "ELSE 'NA' END AS cases FROM " + TEST_INDEX_ACCOUNT + " WHERE age is not null", "jdbc"); + String response = + executeQuery( + "SELECT CASE age " + + "WHEN 30 THEN 'age is 30' " + + "WHEN 40 THEN 'age is 40' " + + "ELSE 'NA' END AS cases FROM " + + TEST_INDEX_ACCOUNT + + " WHERE age is not null", + "jdbc"); assertTrue( - response.contains("age is 30") || - response.contains("age is 40") || - response.contains("NA") - ); + response.contains("age is 30") + || response.contains("age is 40") + || response.contains("NA")); } @Ignore("This is already supported in new SQL engine") @Test public void functionInCaseFieldShouldThrowESExceptionDueToIllegalScriptInJdbc() { - String response = executeQuery( - "select case lower(firstname) when 'amber' then '1' else '2' end as cases from " + - TEST_INDEX_ACCOUNT, - "jdbc"); - queryInJdbcResponseShouldIndicateESException(response, "SearchPhaseExecutionException", + String response = + executeQuery( + "select case lower(firstname) when 'amber' then '1' else '2' end as cases from " + + TEST_INDEX_ACCOUNT, + "jdbc"); + queryInJdbcResponseShouldIndicateESException( + response, + "SearchPhaseExecutionException", "For more details, please send request for Json format"); } @Ignore("This is already supported in our new query engine") @Test public void functionCallWithIllegalScriptShouldThrowESExceptionInJdbc() { - String response = executeQuery("select log(balance + 2) from " + TEST_INDEX_BANK, - "jdbc"); - queryInJdbcResponseShouldIndicateESException(response, "SearchPhaseExecutionException", + String response = executeQuery("select log(balance + 2) from " + TEST_INDEX_BANK, "jdbc"); + queryInJdbcResponseShouldIndicateESException( + response, + "SearchPhaseExecutionException", "please send request for Json format to see the raw response from OpenSearch engine."); } - @Ignore("Goes in different route, does not call PrettyFormatRestExecutor.execute methods." + - "The performRequest method in RestClient doesn't throw any exceptions for null value fields in script") + @Ignore( + "Goes in different route, does not call PrettyFormatRestExecutor.execute methods.The" + + " performRequest method in RestClient doesn't throw any exceptions for null value" + + " fields in script") @Test public void functionArgWithNullValueFieldShouldThrowESExceptionInJdbc() { - String response = executeQuery( - "select log(balance) from " + TEST_INDEX_BANK_WITH_NULL_VALUES, "jdbc"); - queryInJdbcResponseShouldIndicateESException(response, "SearchPhaseExecutionException", + String response = + executeQuery("select log(balance) from " + TEST_INDEX_BANK_WITH_NULL_VALUES, "jdbc"); + queryInJdbcResponseShouldIndicateESException( + response, + "SearchPhaseExecutionException", "For more details, please send request for Json format"); } - private void queryInJdbcResponseShouldIndicateESException(String response, String exceptionType, - String... errMsgs) { + private void queryInJdbcResponseShouldIndicateESException( + String response, String exceptionType, String... errMsgs) { Assert.assertThat(response, containsString(exceptionType)); for (String errMsg : errMsgs) { Assert.assertThat(response, containsString(errMsg)); @@ -1803,9 +2045,21 @@ private void checkAggregationResponseSize(JSONObject response, int sizeCheck) { private void checkSelectAllAndFieldResponseSize(JSONObject response) { String[] arr = - new String[] {"account_number", "firstname", "address", "birthdate", "gender", "city", - "lastname", - "balance", "employer", "state", "age", "email", "male"}; + new String[] { + "account_number", + "firstname", + "address", + "birthdate", + "gender", + "city", + "lastname", + "balance", + "employer", + "state", + "age", + "email", + "male" + }; Set expectedSource = new HashSet<>(Arrays.asList(arr)); JSONArray hits = getHits(response); diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/RestIntegTestCase.java b/integ-test/src/test/java/org/opensearch/sql/legacy/RestIntegTestCase.java index dd48d82114..fc9112afdf 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/RestIntegTestCase.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/RestIntegTestCase.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.opensearch.core.common.Strings.isNullOrEmpty; @@ -50,16 +49,14 @@ /** * SQL plugin integration test base class (migrated from SQLIntegTestCase) - *

- * The execution of order is as follows: - *

- * OpenSearchRestTestCase: 1) initClient() N+1) closeClient() - * \ / - * SQLIntegTestCase: 2) setUpIndices() -> 4) setUpIndices() ... -> N) cleanUpIndices() - * \ \ - * XXXTIT: 3) init() 5) init() - *

- * TODO: this base class should extends ODFERestTestCase + * + *

The execution of order is as follows: + * + *

OpenSearchRestTestCase: 1) initClient() N+1) closeClient() \ / SQLIntegTestCase: 2) + * setUpIndices() -> 4) setUpIndices() ... -> N) cleanUpIndices() \ \ XXXTIT: 3) init() 5) + * init() + * + *

TODO: this base class should extends ODFERestTestCase */ public abstract class RestIntegTestCase extends OpenSearchSQLRestTestCase { @@ -78,12 +75,12 @@ protected boolean preserveClusterUponCompletion() { } /** - * We need to be able to dump the jacoco coverage before cluster is shut down. - * The new internal testing framework removed some of the gradle tasks we were listening to - * to choose a good time to do it. This will dump the executionData to file after each test. - * TODO: This is also currently just overwriting integTest.exec with the updated execData without - * resetting after writing each time. This can be improved to either write an exec file per test - * or by letting jacoco append to the file + * We need to be able to dump the jacoco coverage before cluster is shut down. The new internal + * testing framework removed some of the gradle tasks we were listening to to choose a good time + * to do it. This will dump the executionData to file after each test. TODO: This is also + * currently just overwriting integTest.exec with the updated execData without resetting after + * writing each time. This can be improved to either write an exec file per test or by letting + * jacoco append to the file */ public interface IProxy { byte[] getExecutionData(boolean reset); @@ -104,10 +101,12 @@ public static void dumpCoverage() { String serverUrl = "service:jmx:rmi:///jndi/rmi://127.0.0.1:7777/jmxrmi"; try (JMXConnector connector = JMXConnectorFactory.connect(new JMXServiceURL(serverUrl))) { - IProxy proxy = MBeanServerInvocationHandler.newProxyInstance( - connector.getMBeanServerConnection(), new ObjectName("org.jacoco:type=Runtime"), - IProxy.class, - false); + IProxy proxy = + MBeanServerInvocationHandler.newProxyInstance( + connector.getMBeanServerConnection(), + new ObjectName("org.jacoco:type=Runtime"), + IProxy.class, + false); Path path = Paths.get(jacocoBuildPath + "/integTest.exec"); Files.write(path, proxy.getExecutionData(false)); @@ -117,9 +116,9 @@ public static void dumpCoverage() { } /** - * As JUnit JavaDoc says: - * "The @AfterClass methods declared in superclasses will be run after those of the current class." - * So this method is supposed to run before closeClients() in parent class. + * As JUnit JavaDoc says: "The @AfterClass methods declared in superclasses will be run after + * those of the current class." So this method is supposed to run before closeClients() in parent + * class. */ @AfterClass public static void cleanUpIndices() throws IOException { @@ -128,8 +127,8 @@ public static void cleanUpIndices() throws IOException { } /** - * Make it thread-safe in case tests are running in parallel but does not guarantee - * if test like DeleteIT that mutates cluster running in parallel. + * Make it thread-safe in case tests are running in parallel but does not guarantee if test like + * DeleteIT that mutates cluster running in parallel. */ protected synchronized void loadIndex(Index index) throws IOException { String indexName = index.getName(); @@ -142,11 +141,8 @@ protected synchronized void loadIndex(Index index) throws IOException { } } - /** - * Provide for each test to load test index, data and other setup work - */ - protected void init() throws Exception { - } + /** Provide for each test to load test index, data and other setup work */ + protected void init() throws Exception {} protected static void updateClusterSetting(String settingKey, Object value) throws IOException { updateClusterSetting(settingKey, value, true); @@ -155,18 +151,18 @@ protected static void updateClusterSetting(String settingKey, Object value) thro protected static void updateClusterSetting(String settingKey, Object value, boolean persistent) throws IOException { String property = persistent ? PERSISTENT : TRANSIENT; - XContentBuilder builder = XContentFactory - .jsonBuilder() - .startObject() - .startObject(property) - .field(settingKey, value) - .endObject() - .endObject(); + XContentBuilder builder = + XContentFactory.jsonBuilder() + .startObject() + .startObject(property) + .field(settingKey, value) + .endObject() + .endObject(); Request request = new Request("PUT", "_cluster/settings"); request.setJsonEntity(builder.toString()); Response response = client().performRequest(request); - Assert - .assertEquals(RestStatus.OK, RestStatus.fromCode(response.getStatusLine().getStatusCode())); + Assert.assertEquals( + RestStatus.OK, RestStatus.fromCode(response.getStatusLine().getStatusCode())); } protected static void wipeAllClusterSettings() throws IOException { @@ -174,103 +170,109 @@ protected static void wipeAllClusterSettings() throws IOException { updateClusterSetting("*", null, false); } - /** - * Enum for associating test index with relevant mapping and data. - */ + /** Enum for associating test index with relevant mapping and data. */ public enum Index { - ONLINE(TestsConstants.TEST_INDEX_ONLINE, - "online", - null, - "src/test/resources/online.json"), - ACCOUNT(TestsConstants.TEST_INDEX_ACCOUNT, + ONLINE(TestsConstants.TEST_INDEX_ONLINE, "online", null, "src/test/resources/online.json"), + ACCOUNT( + TestsConstants.TEST_INDEX_ACCOUNT, "account", getAccountIndexMapping(), "src/test/resources/accounts.json"), - PHRASE(TestsConstants.TEST_INDEX_PHRASE, + PHRASE( + TestsConstants.TEST_INDEX_PHRASE, "phrase", getPhraseIndexMapping(), "src/test/resources/phrases.json"), - DOG(TestsConstants.TEST_INDEX_DOG, - "dog", - getDogIndexMapping(), - "src/test/resources/dogs.json"), - DOGS2(TestsConstants.TEST_INDEX_DOG2, + DOG(TestsConstants.TEST_INDEX_DOG, "dog", getDogIndexMapping(), "src/test/resources/dogs.json"), + DOGS2( + TestsConstants.TEST_INDEX_DOG2, "dog", getDogs2IndexMapping(), "src/test/resources/dogs2.json"), - DOGS3(TestsConstants.TEST_INDEX_DOG3, + DOGS3( + TestsConstants.TEST_INDEX_DOG3, "dog", getDogs3IndexMapping(), "src/test/resources/dogs3.json"), - DOGSSUBQUERY(TestsConstants.TEST_INDEX_DOGSUBQUERY, + DOGSSUBQUERY( + TestsConstants.TEST_INDEX_DOGSUBQUERY, "dog", getDogIndexMapping(), "src/test/resources/dogsubquery.json"), - PEOPLE(TestsConstants.TEST_INDEX_PEOPLE, - "people", - null, - "src/test/resources/peoples.json"), - PEOPLE2(TestsConstants.TEST_INDEX_PEOPLE2, + PEOPLE(TestsConstants.TEST_INDEX_PEOPLE, "people", null, "src/test/resources/peoples.json"), + PEOPLE2( + TestsConstants.TEST_INDEX_PEOPLE2, "people", getPeople2IndexMapping(), "src/test/resources/people2.json"), - GAME_OF_THRONES(TestsConstants.TEST_INDEX_GAME_OF_THRONES, + GAME_OF_THRONES( + TestsConstants.TEST_INDEX_GAME_OF_THRONES, "gotCharacters", getGameOfThronesIndexMapping(), "src/test/resources/game_of_thrones_complex.json"), - SYSTEM(TestsConstants.TEST_INDEX_SYSTEM, - "systems", - null, - "src/test/resources/systems.json"), - ODBC(TestsConstants.TEST_INDEX_ODBC, + SYSTEM(TestsConstants.TEST_INDEX_SYSTEM, "systems", null, "src/test/resources/systems.json"), + ODBC( + TestsConstants.TEST_INDEX_ODBC, "odbc", getOdbcIndexMapping(), "src/test/resources/odbc-date-formats.json"), - LOCATION(TestsConstants.TEST_INDEX_LOCATION, + LOCATION( + TestsConstants.TEST_INDEX_LOCATION, "location", getLocationIndexMapping(), "src/test/resources/locations.json"), - LOCATION_TWO(TestsConstants.TEST_INDEX_LOCATION2, + LOCATION_TWO( + TestsConstants.TEST_INDEX_LOCATION2, "location2", getLocationIndexMapping(), "src/test/resources/locations2.json"), - NESTED(TestsConstants.TEST_INDEX_NESTED_TYPE, + NESTED( + TestsConstants.TEST_INDEX_NESTED_TYPE, "nestedType", getNestedTypeIndexMapping(), "src/test/resources/nested_objects.json"), - NESTED_WITH_QUOTES(TestsConstants.TEST_INDEX_NESTED_WITH_QUOTES, + NESTED_WITH_QUOTES( + TestsConstants.TEST_INDEX_NESTED_WITH_QUOTES, "nestedType", getNestedTypeIndexMapping(), "src/test/resources/nested_objects_quotes_in_values.json"), - EMPLOYEE_NESTED(TestsConstants.TEST_INDEX_EMPLOYEE_NESTED, + EMPLOYEE_NESTED( + TestsConstants.TEST_INDEX_EMPLOYEE_NESTED, "_doc", getEmployeeNestedTypeIndexMapping(), "src/test/resources/employee_nested.json"), - JOIN(TestsConstants.TEST_INDEX_JOIN_TYPE, + JOIN( + TestsConstants.TEST_INDEX_JOIN_TYPE, "joinType", getJoinTypeIndexMapping(), "src/test/resources/join_objects.json"), - BANK(TestsConstants.TEST_INDEX_BANK, + BANK( + TestsConstants.TEST_INDEX_BANK, "account", getBankIndexMapping(), "src/test/resources/bank.json"), - BANK_TWO(TestsConstants.TEST_INDEX_BANK_TWO, + BANK_TWO( + TestsConstants.TEST_INDEX_BANK_TWO, "account_two", getBankIndexMapping(), "src/test/resources/bank_two.json"), - BANK_WITH_NULL_VALUES(TestsConstants.TEST_INDEX_BANK_WITH_NULL_VALUES, + BANK_WITH_NULL_VALUES( + TestsConstants.TEST_INDEX_BANK_WITH_NULL_VALUES, "account_null", getBankWithNullValuesIndexMapping(), "src/test/resources/bank_with_null_values.json"), - ORDER(TestsConstants.TEST_INDEX_ORDER, + ORDER( + TestsConstants.TEST_INDEX_ORDER, "_doc", getOrderIndexMapping(), "src/test/resources/order.json"), - WEBLOG(TestsConstants.TEST_INDEX_WEBLOG, + WEBLOG( + TestsConstants.TEST_INDEX_WEBLOG, "weblog", getWeblogsIndexMapping(), "src/test/resources/weblogs.json"), - DATE(TestsConstants.TEST_INDEX_DATE, + DATE( + TestsConstants.TEST_INDEX_DATE, "dates", getDateIndexMapping(), "src/test/resources/dates.json"); diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/SQLFunctionsIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/SQLFunctionsIT.java index c1c1a26f4a..877f803189 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/SQLFunctionsIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/SQLFunctionsIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.allOf; @@ -45,10 +44,7 @@ import org.opensearch.search.SearchHit; import org.opensearch.search.SearchHits; - -/** - * Created by allwefantasy on 8/25/16. - */ +/** Created by allwefantasy on 8/25/16. */ public class SQLFunctionsIT extends SQLIntegTestCase { @Override @@ -61,70 +57,75 @@ protected void init() throws Exception { @Test public void functionFieldAliasAndGroupByAlias() throws Exception { - String query = "SELECT " + - "floor(substring(address,0,3)*20) as key," + - "sum(age) cvalue FROM " + TEST_INDEX_ACCOUNT + " where address is not null " + - "group by key order by cvalue desc limit 10 "; + String query = + "SELECT " + + "floor(substring(address,0,3)*20) as key," + + "sum(age) cvalue FROM " + + TEST_INDEX_ACCOUNT + + " where address is not null " + + "group by key order by cvalue desc limit 10 "; final JSONObject result = executeQuery(query); - - IntStream.rangeClosed(0, 9).forEach(i -> { - Assert.assertNotNull(result.query(String.format("/aggregations/key/buckets/%d/key", i))); - Assert.assertNotNull( - result.query(String.format("/aggregations/key/buckets/%d/cvalue/value", i))); - } - ); + IntStream.rangeClosed(0, 9) + .forEach( + i -> { + Assert.assertNotNull( + result.query(String.format("/aggregations/key/buckets/%d/key", i))); + Assert.assertNotNull( + result.query(String.format("/aggregations/key/buckets/%d/cvalue/value", i))); + }); } /** * todo fix the issue. * - * @see https://github.com/opendistro-for-elasticsearch/sql/issues/59 + * @see https://github.com/opendistro-for-elasticsearch/sql/issues/59 */ @Ignore public void normalFieldAlias() throws Exception { - //here is a bug,csv field with spa - String query = "SELECT " + - "address as key,age from " + - TEST_INDEX_ACCOUNT + " where address is not null " + - "limit 10 "; + // here is a bug,csv field with spa + String query = + "SELECT " + + "address as key,age from " + + TEST_INDEX_ACCOUNT + + " where address is not null " + + "limit 10 "; - assertThat( - executeQuery(query), - hitAny(kvString("/_source/key", not(isEmptyOrNullString()))) - ); + assertThat(executeQuery(query), hitAny(kvString("/_source/key", not(isEmptyOrNullString())))); } @Test public void functionAlias() throws Exception { - //here is a bug,if only script fields are included,then all fields will return; fix later - String query = "SELECT " + - "substring(address,0,3) as key,address from " + - TEST_INDEX_ACCOUNT + " where address is not null " + - "order by address desc limit 10 "; + // here is a bug,if only script fields are included,then all fields will return; fix later + String query = + "SELECT " + + "substring(address,0,3) as key,address from " + + TEST_INDEX_ACCOUNT + + " where address is not null " + + "order by address desc limit 10 "; assertThat( executeQuery(query), - hitAny(both(kvString("/_source/address", equalTo("863 Wythe Place"))) - .and(kvString("/fields/key/0", - equalTo("863")))) - ); + hitAny( + both(kvString("/_source/address", equalTo("863 Wythe Place"))) + .and(kvString("/fields/key/0", equalTo("863"))))); } @Test public void caseChangeTest() throws IOException { - String query = "SELECT LOWER(firstname) " + - "FROM opensearch-sql_test_index_account " + - "WHERE UPPER(lastname)='DUKE' " + - "ORDER BY upper(lastname) "; + String query = + "SELECT LOWER(firstname) " + + "FROM opensearch-sql_test_index_account " + + "WHERE UPPER(lastname)='DUKE' " + + "ORDER BY upper(lastname) "; assertThat( executeQuery(query), hitAny( kvString("/_source/address", equalTo("880 Holmes Lane")), - kvString("/fields/LOWER(firstname)/0", equalTo("amber"))) - ); + kvString("/fields/LOWER(firstname)/0", equalTo("amber")))); } @Test @@ -133,23 +134,23 @@ public void caseChangeTestWithLocale() throws IOException { // "IL".toLowerCase() in a Turkish locale returns "ıl" // https://stackoverflow.com/questions/11063102/using-locales-with-javas-tolowercase-and-touppercase - String query = "SELECT LOWER(state.keyword, 'tr') " + - "FROM opensearch-sql_test_index_account " + - "WHERE account_number=1"; + String query = + "SELECT LOWER(state.keyword, 'tr') " + + "FROM opensearch-sql_test_index_account " + + "WHERE account_number=1"; assertThat( executeQuery(query), - hitAny( - kvString("/fields/LOWER(state.keyword, 'tr')/0", equalTo("ıl"))) - ); + hitAny(kvString("/fields/LOWER(state.keyword, 'tr')/0", equalTo("ıl")))); } @Test public void caseChangeWithAggregationTest() throws IOException { - String query = "SELECT UPPER(e.firstname) AS upper, COUNT(*)" + - "FROM opensearch-sql_test_index_account e " + - "WHERE LOWER(e.lastname)='duke' " + - "GROUP BY upper"; + String query = + "SELECT UPPER(e.firstname) AS upper, COUNT(*)" + + "FROM opensearch-sql_test_index_account e " + + "WHERE LOWER(e.lastname)='duke' " + + "GROUP BY upper"; assertThat( executeQuery(query), @@ -158,8 +159,10 @@ public void caseChangeWithAggregationTest() throws IOException { @Test public void castIntFieldToDoubleWithoutAliasTest() throws IOException { - String query = "SELECT CAST(age AS DOUBLE) FROM " + TestsConstants.TEST_INDEX_ACCOUNT + - " ORDER BY age DESC LIMIT 5"; + String query = + "SELECT CAST(age AS DOUBLE) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY age DESC LIMIT 5"; SearchHit[] hits = query(query).getHits(); checkSuccessfulFieldCast(hits, "cast_age", "DOUBLE"); @@ -171,8 +174,9 @@ public void castIntFieldToDoubleWithoutAliasTest() throws IOException { @Test public void castIntFieldToDoubleWithAliasTest() throws IOException { String query = - "SELECT CAST(age AS DOUBLE) AS test_alias FROM " + TestsConstants.TEST_INDEX_ACCOUNT + - " ORDER BY age LIMIT 5"; + "SELECT CAST(age AS DOUBLE) AS test_alias FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY age LIMIT 5"; SearchHit[] hits = query(query).getHits(); checkSuccessfulFieldCast(hits, "test_alias", "DOUBLE"); @@ -183,8 +187,10 @@ public void castIntFieldToDoubleWithAliasTest() throws IOException { @Test public void castIntFieldToStringWithoutAliasTest() throws IOException { - String query = "SELECT CAST(balance AS STRING) FROM " + TestsConstants.TEST_INDEX_ACCOUNT + - " ORDER BY balance LIMIT 1"; + String query = + "SELECT CAST(balance AS STRING) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY balance LIMIT 1"; SearchHit[] hits = query(query).getHits(); checkSuccessfulFieldCast(hits, "cast_balance", "STRING"); @@ -195,48 +201,51 @@ public void castIntFieldToStringWithoutAliasTest() throws IOException { @Test public void castIntFieldToStringWithAliasTest() throws IOException { - String query = "SELECT CAST(balance AS STRING) AS cast_string_alias FROM " + - TestsConstants.TEST_INDEX_ACCOUNT + - " ORDER BY cast_string_alias DESC LIMIT 1"; + String query = + "SELECT CAST(balance AS STRING) AS cast_string_alias FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY cast_string_alias DESC LIMIT 1"; SearchHit[] hits = query(query).getHits(); checkSuccessfulFieldCast(hits, "cast_string_alias", "STRING"); for (int i = 0; i < hits.length; ++i) { Assert.assertThat(hits[i].getFields().get("cast_string_alias").getValue(), is("9838")); } - } @Test public void castIntFieldToFloatWithoutAliasJdbcFormatTest() { - JSONObject response = executeJdbcRequest( - "SELECT CAST(balance AS FLOAT) AS cast_balance FROM " + TestsConstants.TEST_INDEX_ACCOUNT + - " ORDER BY balance DESC LIMIT 1"); + JSONObject response = + executeJdbcRequest( + "SELECT CAST(balance AS FLOAT) AS cast_balance FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY balance DESC LIMIT 1"); - verifySchema(response, - schema("CAST(balance AS FLOAT)", "cast_balance", "float")); + verifySchema(response, schema("CAST(balance AS FLOAT)", "cast_balance", "float")); - verifyDataRows(response, - rows(49989.0)); + verifyDataRows(response, rows(49989.0)); } @Test public void castIntFieldToFloatWithAliasJdbcFormatTest() { - JSONObject response = executeJdbcRequest( - "SELECT CAST(balance AS FLOAT) AS jdbc_float_alias " + - "FROM " + TestsConstants.TEST_INDEX_ACCOUNT + " ORDER BY jdbc_float_alias LIMIT 1"); + JSONObject response = + executeJdbcRequest( + "SELECT CAST(balance AS FLOAT) AS jdbc_float_alias " + + "FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY jdbc_float_alias LIMIT 1"); - verifySchema(response, - schema("CAST(balance AS FLOAT)", "jdbc_float_alias", "float")); + verifySchema(response, schema("CAST(balance AS FLOAT)", "jdbc_float_alias", "float")); - verifyDataRows(response, - rows(1011.0)); + verifyDataRows(response, rows(1011.0)); } @Test public void castIntFieldToDoubleWithoutAliasOrderByTest() throws IOException { - String query = "SELECT CAST(age AS DOUBLE) FROM " + TestsConstants.TEST_INDEX_ACCOUNT + - " ORDER BY age LIMIT 1"; + String query = + "SELECT CAST(age AS DOUBLE) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY age LIMIT 1"; SearchHit[] hits = query(query).getHits(); checkSuccessfulFieldCast(hits, "cast_age", "DOUBLE"); @@ -247,148 +256,138 @@ public void castIntFieldToDoubleWithoutAliasOrderByTest() throws IOException { @Test public void castIntFieldToDoubleWithAliasOrderByTest() throws IOException { - String query = "SELECT CAST(age AS DOUBLE) AS alias FROM " + TestsConstants.TEST_INDEX_ACCOUNT + - " ORDER BY alias DESC LIMIT 1"; + String query = + "SELECT CAST(age AS DOUBLE) AS alias FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY alias DESC LIMIT 1"; SearchHit[] hits = query(query).getHits(); checkSuccessfulFieldCast(hits, "alias", "DOUBLE"); for (int i = 0; i < hits.length; ++i) { Assert.assertThat(hits[i].getFields().get("alias").getValue(), is(40.0)); } - } @Test public void castIntFieldToFloatWithoutAliasJdbcFormatGroupByTest() { - JSONObject response = executeJdbcRequest( - "SELECT CAST(balance AS FLOAT) FROM " + - TestsConstants.TEST_INDEX_ACCOUNT + " GROUP BY balance ORDER BY balance DESC LIMIT 5"); + JSONObject response = + executeJdbcRequest( + "SELECT CAST(balance AS FLOAT) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " GROUP BY balance ORDER BY balance DESC LIMIT 5"); - verifySchema(response, - schema("CAST(balance AS FLOAT)", null, "float")); + verifySchema(response, schema("CAST(balance AS FLOAT)", null, "float")); - verifyDataRows(response, - rows(49989.0), - rows(49795.0), - rows(49741.0), - rows(49671.0), - rows(49587.0)); + verifyDataRows( + response, rows(49989.0), rows(49795.0), rows(49741.0), rows(49671.0), rows(49587.0)); } @Test public void castIntFieldToFloatWithAliasJdbcFormatGroupByTest() { - JSONObject response = executeJdbcRequest( - "SELECT CAST(balance AS FLOAT) AS jdbc_float_alias " - + " FROM " + TestsConstants.TEST_INDEX_ACCOUNT - + " GROUP BY jdbc_float_alias " - + " ORDER BY jdbc_float_alias ASC " - + " LIMIT 5"); + JSONObject response = + executeJdbcRequest( + "SELECT CAST(balance AS FLOAT) AS jdbc_float_alias " + + " FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " GROUP BY jdbc_float_alias " + + " ORDER BY jdbc_float_alias ASC " + + " LIMIT 5"); - verifySchema(response, - schema("CAST(balance AS FLOAT)", "jdbc_float_alias", "float")); + verifySchema(response, schema("CAST(balance AS FLOAT)", "jdbc_float_alias", "float")); - verifyDataRows(response, - rows(1011.0), - rows(10116.0), - rows(10138.0), - rows(10147.0), - rows(10178.0)); + verifyDataRows( + response, rows(1011.0), rows(10116.0), rows(10138.0), rows(10147.0), rows(10178.0)); } @Test public void castIntFieldToDoubleWithAliasJdbcFormatGroupByTest() { - JSONObject response = executeJdbcRequest( - "SELECT CAST(age AS DOUBLE) AS jdbc_double_alias " + - "FROM " + TestsConstants.TEST_INDEX_ACCOUNT + - " GROUP BY jdbc_double_alias DESC LIMIT 5"); + JSONObject response = + executeJdbcRequest( + "SELECT CAST(age AS DOUBLE) AS jdbc_double_alias " + + "FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " GROUP BY jdbc_double_alias DESC LIMIT 5"); - verifySchema(response, - schema("jdbc_double_alias", "jdbc_double_alias", "double")); + verifySchema(response, schema("jdbc_double_alias", "jdbc_double_alias", "double")); - verifyDataRows(response, - rows("31.0"), - rows("39.0"), - rows("26.0"), - rows("32.0"), - rows("35.0")); + verifyDataRows(response, rows("31.0"), rows("39.0"), rows("26.0"), rows("32.0"), rows("35.0")); } @Test public void castKeywordFieldToDatetimeWithoutAliasJdbcFormatTest() { - JSONObject response = executeJdbcRequest("SELECT CAST(date_keyword AS DATETIME) FROM " - + TestsConstants.TEST_INDEX_DATE + " ORDER BY date_keyword"); + JSONObject response = + executeJdbcRequest( + "SELECT CAST(date_keyword AS DATETIME) FROM " + + TestsConstants.TEST_INDEX_DATE + + " ORDER BY date_keyword"); verifySchema(response, schema("cast_date_keyword", null, "date")); - verifyDataRows(response, - rows("2014-08-19 07:09:13.434"), - rows("2019-09-25 02:04:13.469")); + verifyDataRows(response, rows("2014-08-19 07:09:13.434"), rows("2019-09-25 02:04:13.469")); } @Test public void castKeywordFieldToDatetimeWithAliasJdbcFormatTest() { JSONObject response = - executeJdbcRequest("SELECT CAST(date_keyword AS DATETIME) AS test_alias FROM " - + TestsConstants.TEST_INDEX_DATE + " ORDER BY date_keyword"); + executeJdbcRequest( + "SELECT CAST(date_keyword AS DATETIME) AS test_alias FROM " + + TestsConstants.TEST_INDEX_DATE + + " ORDER BY date_keyword"); verifySchema(response, schema("test_alias", null, "date")); - verifyDataRows(response, - rows("2014-08-19 07:09:13.434"), - rows("2019-09-25 02:04:13.469")); + verifyDataRows(response, rows("2014-08-19 07:09:13.434"), rows("2019-09-25 02:04:13.469")); } @Test public void castFieldToDatetimeWithWhereClauseJdbcFormatTest() { - JSONObject response = executeJdbcRequest("SELECT CAST(date_keyword AS DATETIME) FROM " - + TestsConstants.TEST_INDEX_DATE + " WHERE date_keyword IS NOT NULL ORDER BY date_keyword"); + JSONObject response = + executeJdbcRequest( + "SELECT CAST(date_keyword AS DATETIME) FROM " + + TestsConstants.TEST_INDEX_DATE + + " WHERE date_keyword IS NOT NULL ORDER BY date_keyword"); verifySchema(response, schema("cast_date_keyword", null, "date")); - verifyDataRows(response, - rows("2014-08-19 07:09:13.434"), - rows("2019-09-25 02:04:13.469")); + verifyDataRows(response, rows("2014-08-19 07:09:13.434"), rows("2019-09-25 02:04:13.469")); } @Test public void castFieldToDatetimeWithGroupByJdbcFormatTest() { JSONObject response = - executeJdbcRequest("SELECT CAST(date_keyword AS DATETIME) AS test_alias FROM " - + TestsConstants.TEST_INDEX_DATE + " GROUP BY test_alias DESC"); + executeJdbcRequest( + "SELECT CAST(date_keyword AS DATETIME) AS test_alias FROM " + + TestsConstants.TEST_INDEX_DATE + + " GROUP BY test_alias DESC"); verifySchema(response, schema("test_alias", "test_alias", "double")); - verifyDataRows(response, - rows("2014-08-19T07:09:13.434Z"), - rows("2019-09-25T02:04:13.469Z")); + verifyDataRows(response, rows("2014-08-19T07:09:13.434Z"), rows("2019-09-25T02:04:13.469Z")); } - @Test public void castBoolFieldToNumericValueInSelectClause() { JSONObject response = executeJdbcRequest( "SELECT " - + " male, " - + " CAST(male AS INT) AS cast_int, " - + " CAST(male AS LONG) AS cast_long, " - + " CAST(male AS FLOAT) AS cast_float, " - + " CAST(male AS DOUBLE) AS cast_double " - + "FROM " + TestsConstants.TEST_INDEX_BANK + " " - + "WHERE account_number = 1 OR account_number = 13" - ); - - verifySchema(response, + + " male, " + + " CAST(male AS INT) AS cast_int, " + + " CAST(male AS LONG) AS cast_long, " + + " CAST(male AS FLOAT) AS cast_float, " + + " CAST(male AS DOUBLE) AS cast_double " + + "FROM " + + TestsConstants.TEST_INDEX_BANK + + " " + + "WHERE account_number = 1 OR account_number = 13"); + + verifySchema( + response, schema("male", "boolean"), schema("CAST(male AS INT)", "cast_int", "integer"), schema("CAST(male AS LONG)", "cast_long", "long"), schema("CAST(male AS FLOAT)", "cast_float", "float"), - schema("CAST(male AS DOUBLE)", "cast_double", "double") - ); - verifyDataRows(response, - rows(true, 1, 1, 1.0, 1.0), - rows(false, 0, 0, 0.0, 0.0) - ); + schema("CAST(male AS DOUBLE)", "cast_double", "double")); + verifyDataRows(response, rows(true, 1, 1, 1.0, 1.0), rows(false, 0, 0, 0.0, 0.0)); } @Test @@ -396,90 +395,82 @@ public void castBoolFieldToNumericValueWithGroupByAlias() { JSONObject response = executeJdbcRequest( "SELECT " - + "CAST(male AS INT) AS cast_int, " - + "COUNT(*) " - + "FROM " + TestsConstants.TEST_INDEX_BANK + " " - + "GROUP BY cast_int" - ); - - verifySchema(response, + + "CAST(male AS INT) AS cast_int, " + + "COUNT(*) " + + "FROM " + + TestsConstants.TEST_INDEX_BANK + + " " + + "GROUP BY cast_int"); + + verifySchema( + response, schema("CAST(male AS INT)", "cast_int", "integer"), - schema("COUNT(*)", "integer") - ); - verifyDataRows(response, - rows(0, 3), - rows(1, 4) - ); + schema("COUNT(*)", "integer")); + verifyDataRows(response, rows(0, 3), rows(1, 4)); } @Test public void castStatementInWhereClauseGreaterThanTest() { - JSONObject response = executeJdbcRequest("SELECT balance FROM " + TEST_INDEX_ACCOUNT - + " WHERE (account_number < CAST(age AS DOUBLE)) ORDER BY balance LIMIT 5"); + JSONObject response = + executeJdbcRequest( + "SELECT balance FROM " + + TEST_INDEX_ACCOUNT + + " WHERE (account_number < CAST(age AS DOUBLE)) ORDER BY balance LIMIT 5"); verifySchema(response, schema("balance", null, "long")); - verifyDataRows(response, - rows(4180), - rows(5686), - rows(7004), - rows(7831), - rows(14127)); + verifyDataRows(response, rows(4180), rows(5686), rows(7004), rows(7831), rows(14127)); } @Test public void castStatementInWhereClauseLessThanTest() { - JSONObject response = executeJdbcRequest("SELECT balance FROM " + TEST_INDEX_ACCOUNT - + " WHERE (account_number > CAST(age AS DOUBLE)) ORDER BY balance LIMIT 5"); + JSONObject response = + executeJdbcRequest( + "SELECT balance FROM " + + TEST_INDEX_ACCOUNT + + " WHERE (account_number > CAST(age AS DOUBLE)) ORDER BY balance LIMIT 5"); verifySchema(response, schema("balance", null, "long")); - verifyDataRows(response, - rows(1011), - rows(1031), - rows(1110), - rows(1133), - rows(1172)); + verifyDataRows(response, rows(1011), rows(1031), rows(1110), rows(1133), rows(1172)); } @Test public void castStatementInWhereClauseEqualToConstantTest() { - JSONObject response = executeJdbcRequest("SELECT balance FROM " + TEST_INDEX_ACCOUNT - + " WHERE (CAST(age AS DOUBLE) = 36.0) ORDER BY balance LIMIT 5"); + JSONObject response = + executeJdbcRequest( + "SELECT balance FROM " + + TEST_INDEX_ACCOUNT + + " WHERE (CAST(age AS DOUBLE) = 36.0) ORDER BY balance LIMIT 5"); verifySchema(response, schema("balance", null, "long")); - verifyDataRows(response, - rows(1249), - rows(1463), - rows(3960), - rows(5686), - rows(6025)); + verifyDataRows(response, rows(1249), rows(1463), rows(3960), rows(5686), rows(6025)); } @Test public void castStatementInWhereClauseLessThanConstantTest() { - JSONObject response = executeJdbcRequest("SELECT balance FROM " + TEST_INDEX_ACCOUNT - + " WHERE (CAST(age AS DOUBLE) < 36.0) ORDER BY balance LIMIT 5"); + JSONObject response = + executeJdbcRequest( + "SELECT balance FROM " + + TEST_INDEX_ACCOUNT + + " WHERE (CAST(age AS DOUBLE) < 36.0) ORDER BY balance LIMIT 5"); verifySchema(response, schema("balance", null, "long")); - verifyDataRows(response, - rows(1011), - rows(1031), - rows(1110), - rows(1133), - rows(1172)); + verifyDataRows(response, rows(1011), rows(1031), rows(1110), rows(1133), rows(1172)); } /** - * Testing compilation - * Result comparison is empty then comparing different types (Date and keyword) + * Testing compilation Result comparison is empty then comparing different types (Date and + * keyword) */ @Test public void castStatementInWhereClauseDatetimeCastTest() { - JSONObject response = executeJdbcRequest("SELECT date_keyword FROM " - + TestsConstants.TEST_INDEX_DATE - + " WHERE (CAST(date_keyword AS DATETIME) = '2014-08-19T07:09:13.434Z')"); + JSONObject response = + executeJdbcRequest( + "SELECT date_keyword FROM " + + TestsConstants.TEST_INDEX_DATE + + " WHERE (CAST(date_keyword AS DATETIME) = '2014-08-19T07:09:13.434Z')"); String schema_result = "{\"name\":\"date_keyword\",\"type\":\"keyword\"}"; assertEquals(response.getJSONArray("schema").get(0).toString(), schema_result); @@ -487,30 +478,32 @@ public void castStatementInWhereClauseDatetimeCastTest() { @Test public void concat_ws_field_and_string() throws Exception { - //here is a bug,csv field with spa - String query = "SELECT " + - " concat_ws('-',age,'-') as age,address from " + - TEST_INDEX_ACCOUNT + " " + - " limit 10 "; + // here is a bug,csv field with spa + String query = + "SELECT " + + " concat_ws('-',age,'-') as age,address from " + + TEST_INDEX_ACCOUNT + + " " + + " limit 10 "; - assertThat( - executeQuery(query), - hitAny(kvString("/fields/age/0", endsWith("--"))) - ); + assertThat(executeQuery(query), hitAny(kvString("/fields/age/0", endsWith("--")))); } /** * Ignore this test case because painless doesn't allowlist String.split function. * - * @see https://www.elastic.co/guide/en/elasticsearch/painless/7.0/painless-api-reference.html + * @see https://www.elastic.co/guide/en/elasticsearch/painless/7.0/painless-api-reference.html */ @Ignore public void whereConditionLeftFunctionRightVariableEqualTest() throws Exception { - String query = "SELECT " + - " * from " + - TestsConstants.TEST_INDEX + " " + - " where split(address,' ')[0]='806' limit 1000 "; + String query = + "SELECT " + + " * from " + + TestsConstants.TEST_INDEX + + " " + + " where split(address,' ')[0]='806' limit 1000 "; assertThat(executeQuery(query).query("/hits/total"), equalTo(4)); } @@ -518,15 +511,18 @@ public void whereConditionLeftFunctionRightVariableEqualTest() throws Exception /** * Ignore this test case because painless doesn't allowlist String.split function. * - * @see https://www.elastic.co/guide/en/elasticsearch/painless/7.0/painless-api-reference.html + * @see https://www.elastic.co/guide/en/elasticsearch/painless/7.0/painless-api-reference.html */ @Ignore public void whereConditionLeftFunctionRightVariableGreatTest() throws Exception { - String query = "SELECT " + - " * from " + - TestsConstants.TEST_INDEX + " " + - " where floor(split(address,' ')[0]+0) > 805 limit 1000 "; + String query = + "SELECT " + + " * from " + + TestsConstants.TEST_INDEX + + " " + + " where floor(split(address,' ')[0]+0) > 805 limit 1000 "; assertThat(executeQuery(query).query("/hits/total"), equalTo(223)); } @@ -534,42 +530,45 @@ public void whereConditionLeftFunctionRightVariableGreatTest() throws Exception @Test public void concat_ws_fields() throws Exception { - //here is a bug,csv field with spa - String query = "SELECT " + - " concat_ws('-',age,address) as combine,address from " + - TEST_INDEX_ACCOUNT + " " + - " limit 10 "; - assertThat( - executeQuery(query), - hitAny(kvString("/fields/combine/0", containsString("-"))) - ); + // here is a bug,csv field with spa + String query = + "SELECT " + + " concat_ws('-',age,address) as combine,address from " + + TEST_INDEX_ACCOUNT + + " " + + " limit 10 "; + assertThat(executeQuery(query), hitAny(kvString("/fields/combine/0", containsString("-")))); } @Test public void functionLogs() throws Exception { - String query = "SELECT log10(100) as a, log(1) as b, log(2, 4) as c, log2(8) as d from " - + TEST_INDEX_ACCOUNT + " limit 1"; + String query = + "SELECT log10(100) as a, log(1) as b, log(2, 4) as c, log2(8) as d from " + + TEST_INDEX_ACCOUNT + + " limit 1"; assertThat( executeQuery(query), - hitAny(both(kvDouble("/fields/a/0", equalTo(Math.log10(100)))) - .and(kvDouble("/fields/b/0", equalTo(Math.log(1)))) - .and(kvDouble("/fields/c/0", closeTo(Math.log(4) / Math.log(2), 0.0001))) - .and(kvDouble("/fields/d/0", closeTo(Math.log(8) / Math.log(2), 0.0001)))) - ); + hitAny( + both(kvDouble("/fields/a/0", equalTo(Math.log10(100)))) + .and(kvDouble("/fields/b/0", equalTo(Math.log(1)))) + .and(kvDouble("/fields/c/0", closeTo(Math.log(4) / Math.log(2), 0.0001))) + .and(kvDouble("/fields/d/0", closeTo(Math.log(8) / Math.log(2), 0.0001))))); } @Test public void functionPow() throws Exception { - String query = "SELECT pow(account_number, 2) as key," + - "abs(age - 60) as new_age from " + TEST_INDEX_ACCOUNT + - " WHERE firstname = 'Virginia' and lastname='Ayala' limit 1"; + String query = + "SELECT pow(account_number, 2) as key," + + "abs(age - 60) as new_age from " + + TEST_INDEX_ACCOUNT + + " WHERE firstname = 'Virginia' and lastname='Ayala' limit 1"; assertThat( executeQuery(query), - hitAny(both(kvDouble("/fields/new_age/0", equalTo(21.0))) - .and(kvDouble("/fields/key/0", equalTo(625.0)))) - ); + hitAny( + both(kvDouble("/fields/new_age/0", equalTo(21.0))) + .and(kvDouble("/fields/key/0", equalTo(625.0))))); } @Test @@ -577,96 +576,88 @@ public void operatorSubstring() throws IOException { assertThat( executeQuery( "SELECT substring('sampleName', 1, 4) AS substring FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvString("/fields/substring/0", equalTo("samp"))) - ); + hitAny(kvString("/fields/substring/0", equalTo("samp")))); assertThat( executeQuery( "SELECT substring('sampleName', 0, 20) AS substring FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvString("/fields/substring/0", equalTo("sampleName"))) - ); + hitAny(kvString("/fields/substring/0", equalTo("sampleName")))); } @Test public void operatorLength() throws IOException { assertThat( - executeQuery("SELECT LENGTH(lastname) FROM " + TEST_INDEX_ACCOUNT + executeQuery( + "SELECT LENGTH(lastname) FROM " + + TEST_INDEX_ACCOUNT + " WHERE lastname IS NOT NULL GROUP BY LENGTH(lastname) ORDER BY LENGTH(lastname)", "jdbc"), - containsString("\"type\": \"integer\"") - ); + containsString("\"type\": \"integer\"")); assertThat( executeQuery("SELECT LENGTH('sampleName') AS length FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/length/0", equalTo(10))) - ); - + hitAny(kvInt("/fields/length/0", equalTo(10)))); } @Test public void operatorReplace() { String query = "SELECT REPLACE('elastic', 'el', 'fant') FROM " + TEST_INDEX_ACCOUNT; - assertThat( - executeQuery(query, "jdbc"), - containsString("fantastic") - ); + assertThat(executeQuery(query, "jdbc"), containsString("fantastic")); } - @Ignore("The LOCATE function is not implemented in new SQL engine. https://github" - + ".com/opensearch-project/sql/issues/74") + @Ignore( + "The LOCATE function is not implemented in new SQL engine. https://github" + + ".com/opensearch-project/sql/issues/74") public void operatorLocate() throws IOException { - String query = "SELECT LOCATE('a', lastname, 0) FROM " + TEST_INDEX_ACCOUNT - + - " WHERE lastname IS NOT NULL GROUP BY LOCATE('a', lastname, 0) ORDER BY LOCATE('a', lastname, 0)"; - assertThat( - executeQuery(query, "jdbc"), containsString("\"type\": \"integer\"") - ); + String query = + "SELECT LOCATE('a', lastname, 0) FROM " + + TEST_INDEX_ACCOUNT + + " WHERE lastname IS NOT NULL GROUP BY LOCATE('a', lastname, 0) ORDER BY LOCATE('a'," + + " lastname, 0)"; + assertThat(executeQuery(query, "jdbc"), containsString("\"type\": \"integer\"")); assertThat( executeQuery("SELECT LOCATE('a', 'sampleName', 3) AS locate FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/locate/0", equalTo(8))) - ); + hitAny(kvInt("/fields/locate/0", equalTo(8)))); assertThat( executeQuery("SELECT LOCATE('a', 'sampleName') AS locate FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/locate/0", equalTo(2))) - ); + hitAny(kvInt("/fields/locate/0", equalTo(2)))); } @Test public void rtrim() throws IOException { assertThat( executeQuery("SELECT RTRIM(' sampleName ') AS rtrim FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvString("/fields/rtrim/0", equalTo(" sampleName"))) - ); + hitAny(kvString("/fields/rtrim/0", equalTo(" sampleName")))); } @Test public void ltrim() throws IOException { assertThat( executeQuery("SELECT LTRIM(' sampleName ') AS ltrim FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvString("/fields/ltrim/0", equalTo("sampleName "))) - ); + hitAny(kvString("/fields/ltrim/0", equalTo("sampleName ")))); } - @Ignore("The ASCII function is not implemented in new SQL engine. https://github" - + ".com/opensearch-project/sql/issues/73") + @Ignore( + "The ASCII function is not implemented in new SQL engine. https://github" + + ".com/opensearch-project/sql/issues/73") public void ascii() throws IOException { assertThat( - executeQuery("SELECT ASCII(lastname) FROM " + TEST_INDEX_ACCOUNT - + - " WHERE lastname IS NOT NULL GROUP BY ASCII(lastname) ORDER BY ASCII(lastname) LIMIT 5", + executeQuery( + "SELECT ASCII(lastname) FROM " + + TEST_INDEX_ACCOUNT + + " WHERE lastname IS NOT NULL GROUP BY ASCII(lastname) ORDER BY ASCII(lastname)" + + " LIMIT 5", "jdbc"), - containsString("\"type\": \"integer\"") - ); + containsString("\"type\": \"integer\"")); assertThat( executeQuery("SELECT ASCII('sampleName') AS ascii FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/ascii/0", equalTo(115))) - ); + hitAny(kvInt("/fields/ascii/0", equalTo(115)))); } /** - * The following tests for LEFT and RIGHT are ignored because the OpenSearch client fails to parse "LEFT"/"RIGHT" in - * the integTest + * The following tests for LEFT and RIGHT are ignored because the OpenSearch client fails to parse + * "LEFT"/"RIGHT" in the integTest */ @Ignore @Test @@ -674,13 +665,11 @@ public void left() throws IOException { assertThat( executeQuery( "SELECT LEFT('sample', 2) AS left FROM " + TEST_INDEX_ACCOUNT + " ORDER BY left"), - hitAny(kvString("/fields/left/0", equalTo("sa"))) - ); + hitAny(kvString("/fields/left/0", equalTo("sa")))); assertThat( executeQuery( "SELECT LEFT('sample', 20) AS left FROM " + TEST_INDEX_ACCOUNT + " ORDER BY left"), - hitAny(kvString("/fields/left/0", equalTo("sample"))) - ); + hitAny(kvString("/fields/left/0", equalTo("sample")))); } @Ignore @@ -689,20 +678,20 @@ public void right() throws IOException { assertThat( executeQuery( "SELECT RIGHT('elastic', 3) AS right FROM " + TEST_INDEX_ACCOUNT + " ORDER BY right"), - hitAny(kvString("/fields/right/0", equalTo("tic"))) - ); + hitAny(kvString("/fields/right/0", equalTo("tic")))); assertThat( executeQuery( "SELECT RIGHT('elastic', 20) AS right FROM " + TEST_INDEX_ACCOUNT + " ORDER BY right"), - hitAny(kvString("/fields/right/0", equalTo("elastic"))) - ); + hitAny(kvString("/fields/right/0", equalTo("elastic")))); } @Test public void ifFuncShouldPassJDBC() { - JSONObject response = executeJdbcRequest( - "SELECT IF(age > 30, 'True', 'False') AS Ages FROM " + TEST_INDEX_ACCOUNT - + " WHERE age IS NOT NULL GROUP BY Ages"); + JSONObject response = + executeJdbcRequest( + "SELECT IF(age > 30, 'True', 'False') AS Ages FROM " + + TEST_INDEX_ACCOUNT + + " WHERE age IS NOT NULL GROUP BY Ages"); assertEquals("IF(age > 30, 'True', 'False')", response.query("/schema/0/name")); assertEquals("Ages", response.query("/schema/0/alias")); assertEquals("keyword", response.query("/schema/0/type")); @@ -712,35 +701,33 @@ public void ifFuncShouldPassJDBC() { public void ifFuncWithBinaryComparisonAsConditionTest() throws IOException { assertThat( executeQuery("SELECT IF(2 > 0, 'hello', 'world') AS ifTrue FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvString("/fields/ifTrue/0", equalTo("hello"))) - ); + hitAny(kvString("/fields/ifTrue/0", equalTo("hello")))); assertThat( executeQuery("SELECT IF(2 = 0, 'hello', 'world') AS ifFalse FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvString("/fields/ifFalse/0", equalTo("world"))) - ); + hitAny(kvString("/fields/ifFalse/0", equalTo("world")))); } @Test public void ifFuncWithBooleanExprInputAsConditionTest() throws IOException { assertThat( executeQuery("SELECT IF(true, 1, 0) AS ifBoolean FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/ifBoolean/0", equalTo(1))) - ); + hitAny(kvInt("/fields/ifBoolean/0", equalTo(1)))); } @Test public void ifFuncWithNullInputAsConditionTest() throws IOException { assertThat( executeQuery("SELECT IF(null, 1, 0) AS ifNull FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/ifNull/0", equalTo(0))) - ); + hitAny(kvInt("/fields/ifNull/0", equalTo(0)))); } @Test public void ifnullShouldPassJDBC() throws IOException { - JSONObject response = executeJdbcRequest( - "SELECT IFNULL(lastname, 'unknown') AS name FROM " + TEST_INDEX_ACCOUNT - + " GROUP BY name"); + JSONObject response = + executeJdbcRequest( + "SELECT IFNULL(lastname, 'unknown') AS name FROM " + + TEST_INDEX_ACCOUNT + + " GROUP BY name"); assertEquals("IFNULL(lastname, 'unknown')", response.query("/schema/0/name")); assertEquals("name", response.query("/schema/0/alias")); assertEquals("keyword", response.query("/schema/0/type")); @@ -750,27 +737,23 @@ public void ifnullShouldPassJDBC() throws IOException { public void ifnullWithNotNullInputTest() throws IOException { assertThat( executeQuery("SELECT IFNULL('sample', 'IsNull') AS ifnull FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvString("/fields/ifnull/0", equalTo("sample"))) - ); + hitAny(kvString("/fields/ifnull/0", equalTo("sample")))); } @Test public void ifnullWithNullInputTest() throws IOException { assertThat( executeQuery("SELECT IFNULL(null, 10) AS ifnull FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/ifnull/0", equalTo(10))) - ); + hitAny(kvInt("/fields/ifnull/0", equalTo(10)))); assertThat( executeQuery("SELECT IFNULL('', 10) AS ifnull FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvString("/fields/ifnull/0", equalTo(""))) - ); + hitAny(kvString("/fields/ifnull/0", equalTo("")))); } @Test public void isnullShouldPassJDBC() { JSONObject response = - executeJdbcRequest( - "SELECT ISNULL(lastname) AS name FROM " + TEST_INDEX_ACCOUNT); + executeJdbcRequest("SELECT ISNULL(lastname) AS name FROM " + TEST_INDEX_ACCOUNT); assertEquals("ISNULL(lastname)", response.query("/schema/0/name")); assertEquals("name", response.query("/schema/0/alias")); assertEquals("boolean", response.query("/schema/0/type")); @@ -780,61 +763,57 @@ public void isnullShouldPassJDBC() { public void isnullWithNotNullInputTest() throws IOException { assertThat( executeQuery("SELECT ISNULL('elastic') AS isnull FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/isnull/0", equalTo(0))) - ); + hitAny(kvInt("/fields/isnull/0", equalTo(0)))); assertThat( executeQuery("SELECT ISNULL('') AS isnull FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/isnull/0", equalTo(0))) - ); + hitAny(kvInt("/fields/isnull/0", equalTo(0)))); } @Test public void isnullWithNullInputTest() throws IOException { assertThat( executeQuery("SELECT ISNULL(null) AS isnull FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/isnull/0", equalTo(1))) - ); + hitAny(kvInt("/fields/isnull/0", equalTo(1)))); } @Test public void isnullWithMathExpr() throws IOException { assertThat( executeQuery("SELECT ISNULL(1+1) AS isnull FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/isnull/0", equalTo(0))) - ); + hitAny(kvInt("/fields/isnull/0", equalTo(0)))); assertThat( executeQuery("SELECT ISNULL(1+1*1/0) AS isnull FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/isnull/0", equalTo(1))) - ); + hitAny(kvInt("/fields/isnull/0", equalTo(1)))); } /** * Ignore this test case because painless doesn't allowlist String.split function. * - * @see https://www.elastic.co/guide/en/elasticsearch/painless/7.0/painless-api-reference.html + * @see https://www.elastic.co/guide/en/elasticsearch/painless/7.0/painless-api-reference.html */ @Ignore public void split_field() throws Exception { - //here is a bug,csv field with spa - String query = "SELECT " + - " split(address,' ')[0],age from " + - TestsConstants.TEST_INDEX + " where address is not null " + - " limit 10 "; + // here is a bug,csv field with spa + String query = + "SELECT " + + " split(address,' ')[0],age from " + + TestsConstants.TEST_INDEX + + " where address is not null " + + " limit 10 "; } @Test public void literal() throws Exception { - String query = "SELECT 10 " + - "from " + TEST_INDEX_ACCOUNT + " limit 1"; + String query = "SELECT 10 " + "from " + TEST_INDEX_ACCOUNT + " limit 1"; final SearchHit[] hits = query(query).getHits(); assertThat(hits[0].getFields(), hasValue(contains(10))); } @Test public void literalWithDoubleValue() throws Exception { - String query = "SELECT 10.0 " + - "from " + TEST_INDEX_ACCOUNT + " limit 1"; + String query = "SELECT 10.0 " + "from " + TEST_INDEX_ACCOUNT + " limit 1"; final SearchHit[] hits = query(query).getHits(); assertThat(hits[0].getFields(), hasValue(contains(10.0))); @@ -842,8 +821,7 @@ public void literalWithDoubleValue() throws Exception { @Test public void literalWithAlias() throws Exception { - String query = "SELECT 10 as key " + - "from " + TEST_INDEX_ACCOUNT + " limit 1"; + String query = "SELECT 10 as key " + "from " + TEST_INDEX_ACCOUNT + " limit 1"; final SearchHit[] hits = query(query).getHits(); assertThat(hits.length, is(1)); @@ -852,8 +830,7 @@ public void literalWithAlias() throws Exception { @Test public void literalMultiField() throws Exception { - String query = "SELECT 1, 2 " + - "from " + TEST_INDEX_ACCOUNT + " limit 1"; + String query = "SELECT 1, 2 " + "from " + TEST_INDEX_ACCOUNT + " limit 1"; final SearchHit[] hits = query(query).getHits(); assertThat(hits.length, is(1)); @@ -863,10 +840,11 @@ public void literalMultiField() throws Exception { private SearchHits query(String query) throws IOException { final String rsp = executeQueryWithStringOutput(query); - final XContentParser parser = new JsonXContentParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - new JsonFactory().createParser(rsp)); + final XContentParser parser = + new JsonXContentParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + new JsonFactory().createParser(rsp)); return SearchResponse.fromXContent(parser).getHits(); } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/SQLIntegTestCase.java b/integ-test/src/test/java/org/opensearch/sql/legacy/SQLIntegTestCase.java index 7216c03d08..e0d04c55b8 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/SQLIntegTestCase.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/SQLIntegTestCase.java @@ -3,38 +3,8 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; -import com.google.common.base.Strings; -import com.google.gson.Gson; -import org.apache.commons.lang3.StringUtils; -import org.json.JSONArray; -import org.json.JSONObject; -import org.junit.AfterClass; -import org.junit.Assert; -import org.junit.Before; -import org.opensearch.client.Request; -import org.opensearch.client.RequestOptions; -import org.opensearch.client.Response; -import org.opensearch.client.RestClient; -import org.opensearch.sql.common.setting.Settings; - -import javax.management.MBeanServerInvocationHandler; -import javax.management.ObjectName; -import javax.management.remote.JMXConnector; -import javax.management.remote.JMXConnectorFactory; -import javax.management.remote.JMXServiceURL; -import java.io.IOException; -import java.io.UnsupportedEncodingException; -import java.net.URLEncoder; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.Locale; -import org.opensearch.sql.datasource.model.DataSourceMetadata; - import static com.google.common.base.Strings.isNullOrEmpty; import static org.opensearch.sql.legacy.TestUtils.createIndexByRestClient; import static org.opensearch.sql.legacy.TestUtils.getAccountIndexMapping; @@ -69,9 +39,35 @@ import static org.opensearch.sql.legacy.plugin.RestSqlAction.EXPLAIN_API_ENDPOINT; import static org.opensearch.sql.legacy.plugin.RestSqlAction.QUERY_API_ENDPOINT; -/** - * OpenSearch Rest integration test base for SQL testing - */ +import com.google.common.base.Strings; +import com.google.gson.Gson; +import java.io.IOException; +import java.io.UnsupportedEncodingException; +import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Locale; +import javax.management.MBeanServerInvocationHandler; +import javax.management.ObjectName; +import javax.management.remote.JMXConnector; +import javax.management.remote.JMXConnectorFactory; +import javax.management.remote.JMXServiceURL; +import org.apache.commons.lang3.StringUtils; +import org.json.JSONArray; +import org.json.JSONObject; +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.Before; +import org.opensearch.client.Request; +import org.opensearch.client.RequestOptions; +import org.opensearch.client.Response; +import org.opensearch.client.RestClient; +import org.opensearch.sql.common.setting.Settings; +import org.opensearch.sql.datasource.model.DataSourceMetadata; + +/** OpenSearch Rest integration test base for SQL testing */ public abstract class SQLIntegTestCase extends OpenSearchSQLRestTestCase { public static final String PERSISTENT = "persistent"; @@ -103,12 +99,12 @@ protected boolean preserveClusterUponCompletion() { } /** - * We need to be able to dump the jacoco coverage before cluster is shut down. - * The new internal testing framework removed some of the gradle tasks we were listening to - * to choose a good time to do it. This will dump the executionData to file after each test. - * TODO: This is also currently just overwriting integTest.exec with the updated execData without - * resetting after writing each time. This can be improved to either write an exec file per test - * or by letting jacoco append to the file + * We need to be able to dump the jacoco coverage before cluster is shut down. The new internal + * testing framework removed some of the gradle tasks we were listening to to choose a good time + * to do it. This will dump the executionData to file after each test. TODO: This is also + * currently just overwriting integTest.exec with the updated execData without resetting after + * writing each time. This can be improved to either write an exec file per test or by letting + * jacoco append to the file */ public interface IProxy { byte[] getExecutionData(boolean reset); @@ -129,10 +125,12 @@ public static void dumpCoverage() { String serverUrl = "service:jmx:rmi:///jndi/rmi://127.0.0.1:7777/jmxrmi"; try (JMXConnector connector = JMXConnectorFactory.connect(new JMXServiceURL(serverUrl))) { - IProxy proxy = MBeanServerInvocationHandler.newProxyInstance( - connector.getMBeanServerConnection(), new ObjectName("org.jacoco:type=Runtime"), - IProxy.class, - false); + IProxy proxy = + MBeanServerInvocationHandler.newProxyInstance( + connector.getMBeanServerConnection(), + new ObjectName("org.jacoco:type=Runtime"), + IProxy.class, + false); Path path = Paths.get(jacocoBuildPath + "/integTest.exec"); Files.write(path, proxy.getExecutionData(false)); @@ -142,9 +140,9 @@ public static void dumpCoverage() { } /** - * As JUnit JavaDoc says: - * "The @AfterClass methods declared in superclasses will be run after those of the current class." - * So this method is supposed to run before closeClients() in parent class. + * As JUnit JavaDoc says: "The @AfterClass methods declared in superclasses will be run after + * those of the current class." So this method is supposed to run before closeClients() in parent + * class. */ @AfterClass public static void cleanUpIndices() throws IOException { @@ -156,13 +154,16 @@ public static void cleanUpIndices() throws IOException { protected void setQuerySizeLimit(Integer limit) throws IOException { updateClusterSettings( - new ClusterSetting("transient", Settings.Key.QUERY_SIZE_LIMIT.getKeyValue(), limit.toString())); + new ClusterSetting( + "transient", Settings.Key.QUERY_SIZE_LIMIT.getKeyValue(), limit.toString())); } protected void resetQuerySizeLimit() throws IOException { updateClusterSettings( - new ClusterSetting("transient", Settings.Key.QUERY_SIZE_LIMIT.getKeyValue(), DEFAULT_QUERY_SIZE_LIMIT - .toString())); + new ClusterSetting( + "transient", + Settings.Key.QUERY_SIZE_LIMIT.getKeyValue(), + DEFAULT_QUERY_SIZE_LIMIT.toString())); } protected static void wipeAllClusterSettings() throws IOException { @@ -179,19 +180,16 @@ protected void setMaxResultWindow(String indexName, Integer window) throws IOExc } protected void resetMaxResultWindow(String indexName) throws IOException { - updateIndexSettings(indexName, - "{ \"index\": { \"max_result_window\": " + DEFAULT_MAX_RESULT_WINDOW + " } }"); + updateIndexSettings( + indexName, "{ \"index\": { \"max_result_window\": " + DEFAULT_MAX_RESULT_WINDOW + " } }"); } - /** - * Provide for each test to load test index, data and other setup work - */ - protected void init() throws Exception { - } + /** Provide for each test to load test index, data and other setup work */ + protected void init() throws Exception {} /** - * Make it thread-safe in case tests are running in parallel but does not guarantee - * if test like DeleteIT that mutates cluster running in parallel. + * Make it thread-safe in case tests are running in parallel but does not guarantee if test like + * DeleteIT that mutates cluster running in parallel. */ protected synchronized void loadIndex(Index index, RestClient client) throws IOException { String indexName = index.getName(); @@ -305,8 +303,9 @@ protected Request buildGetEndpointRequest(final String sqlQuery) { Assert.fail(utf8CharsetName + " not available"); } - final String requestUrl = String.format(Locale.ROOT, "%s?sql=%s&format=%s", QUERY_API_ENDPOINT, - urlEncodedQuery, "json"); + final String requestUrl = + String.format( + Locale.ROOT, "%s?sql=%s&format=%s", QUERY_API_ENDPOINT, urlEncodedQuery, "json"); return new Request("GET", requestUrl); } @@ -345,7 +344,8 @@ private String executeRequest(final String requestBody, final boolean isExplainQ return executeRequest(sqlRequest); } - protected static String executeRequest(final Request request, RestClient client) throws IOException { + protected static String executeRequest(final Request request, RestClient client) + throws IOException { Response response = client.performRequest(request); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); return getResponseBody(response); @@ -374,10 +374,12 @@ protected JSONObject executeCursorCloseQuery(final String cursor) throws IOExcep return new JSONObject(executeRequest(sqlRequest)); } - protected static JSONObject updateClusterSettings(ClusterSetting setting, RestClient client) throws IOException { + protected static JSONObject updateClusterSettings(ClusterSetting setting, RestClient client) + throws IOException { Request request = new Request("PUT", "/_cluster/settings"); - String persistentSetting = String.format(Locale.ROOT, - "{\"%s\": {\"%s\": %s}}", setting.type, setting.name, setting.value); + String persistentSetting = + String.format( + Locale.ROOT, "{\"%s\": {\"%s\": %s}}", setting.type, setting.name, setting.value); request.setJsonEntity(persistentSetting); RequestOptions.Builder restOptionsBuilder = RequestOptions.DEFAULT.toBuilder(); restOptionsBuilder.addHeader("Content-Type", "application/json"); @@ -414,11 +416,17 @@ ClusterSetting nullify() { @Override public String toString() { - return "ClusterSetting{" + - "type='" + type + '\'' + - ", path='" + name + '\'' + - ", value='" + value + '\'' + - '}'; + return "ClusterSetting{" + + "type='" + + type + + '\'' + + ", path='" + + name + + '\'' + + ", value='" + + value + + '\'' + + '}'; } } @@ -439,16 +447,12 @@ protected String makeRequest(String query) { } protected String makeRequest(String query, int fetch_size) { - return String.format("{\n" + - " \"fetch_size\": \"%s\",\n" + - " \"query\": \"%s\"\n" + - "}", fetch_size, query); + return String.format( + "{\n" + " \"fetch_size\": \"%s\",\n" + " \"query\": \"%s\"\n" + "}", fetch_size, query); } protected String makeFetchLessRequest(String query) { - return String.format("{\n" + - " \"query\": \"%s\"\n" + - "}", query); + return String.format("{\n" + " \"query\": \"%s\"\n" + "}", query); } protected String makeCursorRequest(String cursor) { @@ -501,7 +505,6 @@ protected static Request getFetchDataSourceRequest(String name) { return request; } - protected static Request getDeleteDataSourceRequest(String name) { Request request = new Request("DELETE", "/_plugins/_query/_datasources" + "/" + name); RequestOptions.Builder restOptionsBuilder = RequestOptions.DEFAULT.toBuilder(); @@ -510,175 +513,196 @@ protected static Request getDeleteDataSourceRequest(String name) { return request; } - /** - * Enum for associating test index with relevant mapping and data. - */ + /** Enum for associating test index with relevant mapping and data. */ public enum Index { - ONLINE(TestsConstants.TEST_INDEX_ONLINE, - "online", - null, - "src/test/resources/online.json"), - ACCOUNT(TestsConstants.TEST_INDEX_ACCOUNT, + ONLINE(TestsConstants.TEST_INDEX_ONLINE, "online", null, "src/test/resources/online.json"), + ACCOUNT( + TestsConstants.TEST_INDEX_ACCOUNT, "account", getAccountIndexMapping(), "src/test/resources/accounts.json"), - PHRASE(TestsConstants.TEST_INDEX_PHRASE, + PHRASE( + TestsConstants.TEST_INDEX_PHRASE, "phrase", getPhraseIndexMapping(), "src/test/resources/phrases.json"), - DOG(TestsConstants.TEST_INDEX_DOG, - "dog", - getDogIndexMapping(), - "src/test/resources/dogs.json"), - DOGS2(TestsConstants.TEST_INDEX_DOG2, + DOG(TestsConstants.TEST_INDEX_DOG, "dog", getDogIndexMapping(), "src/test/resources/dogs.json"), + DOGS2( + TestsConstants.TEST_INDEX_DOG2, "dog", getDogs2IndexMapping(), "src/test/resources/dogs2.json"), - DOGS3(TestsConstants.TEST_INDEX_DOG3, + DOGS3( + TestsConstants.TEST_INDEX_DOG3, "dog", getDogs3IndexMapping(), "src/test/resources/dogs3.json"), - DOGSSUBQUERY(TestsConstants.TEST_INDEX_DOGSUBQUERY, + DOGSSUBQUERY( + TestsConstants.TEST_INDEX_DOGSUBQUERY, "dog", getDogIndexMapping(), "src/test/resources/dogsubquery.json"), - PEOPLE(TestsConstants.TEST_INDEX_PEOPLE, - "people", - null, - "src/test/resources/peoples.json"), - PEOPLE2(TestsConstants.TEST_INDEX_PEOPLE2, + PEOPLE(TestsConstants.TEST_INDEX_PEOPLE, "people", null, "src/test/resources/peoples.json"), + PEOPLE2( + TestsConstants.TEST_INDEX_PEOPLE2, "people", getPeople2IndexMapping(), "src/test/resources/people2.json"), - GAME_OF_THRONES(TestsConstants.TEST_INDEX_GAME_OF_THRONES, + GAME_OF_THRONES( + TestsConstants.TEST_INDEX_GAME_OF_THRONES, "gotCharacters", getGameOfThronesIndexMapping(), "src/test/resources/game_of_thrones_complex.json"), - SYSTEM(TestsConstants.TEST_INDEX_SYSTEM, - "systems", - null, - "src/test/resources/systems.json"), - ODBC(TestsConstants.TEST_INDEX_ODBC, + SYSTEM(TestsConstants.TEST_INDEX_SYSTEM, "systems", null, "src/test/resources/systems.json"), + ODBC( + TestsConstants.TEST_INDEX_ODBC, "odbc", getOdbcIndexMapping(), "src/test/resources/odbc-date-formats.json"), - LOCATION(TestsConstants.TEST_INDEX_LOCATION, + LOCATION( + TestsConstants.TEST_INDEX_LOCATION, "location", getLocationIndexMapping(), "src/test/resources/locations.json"), - LOCATION_TWO(TestsConstants.TEST_INDEX_LOCATION2, + LOCATION_TWO( + TestsConstants.TEST_INDEX_LOCATION2, "location2", getLocationIndexMapping(), "src/test/resources/locations2.json"), - NESTED(TestsConstants.TEST_INDEX_NESTED_TYPE, + NESTED( + TestsConstants.TEST_INDEX_NESTED_TYPE, "nestedType", getNestedTypeIndexMapping(), "src/test/resources/nested_objects.json"), - NESTED_WITHOUT_ARRAYS(TestsConstants.TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS, + NESTED_WITHOUT_ARRAYS( + TestsConstants.TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS, "nestedTypeWithoutArrays", getNestedTypeIndexMapping(), "src/test/resources/nested_objects_without_arrays.json"), - NESTED_WITH_QUOTES(TestsConstants.TEST_INDEX_NESTED_WITH_QUOTES, + NESTED_WITH_QUOTES( + TestsConstants.TEST_INDEX_NESTED_WITH_QUOTES, "nestedType", getNestedTypeIndexMapping(), "src/test/resources/nested_objects_quotes_in_values.json"), - EMPLOYEE_NESTED(TestsConstants.TEST_INDEX_EMPLOYEE_NESTED, + EMPLOYEE_NESTED( + TestsConstants.TEST_INDEX_EMPLOYEE_NESTED, "_doc", getEmployeeNestedTypeIndexMapping(), "src/test/resources/employee_nested.json"), - JOIN(TestsConstants.TEST_INDEX_JOIN_TYPE, + JOIN( + TestsConstants.TEST_INDEX_JOIN_TYPE, "joinType", getJoinTypeIndexMapping(), "src/test/resources/join_objects.json"), - UNEXPANDED_OBJECT(TestsConstants.TEST_INDEX_UNEXPANDED_OBJECT, + UNEXPANDED_OBJECT( + TestsConstants.TEST_INDEX_UNEXPANDED_OBJECT, "unexpandedObject", getUnexpandedObjectIndexMapping(), "src/test/resources/unexpanded_objects.json"), - BANK(TestsConstants.TEST_INDEX_BANK, + BANK( + TestsConstants.TEST_INDEX_BANK, "account", getBankIndexMapping(), "src/test/resources/bank.json"), - BANK_TWO(TestsConstants.TEST_INDEX_BANK_TWO, + BANK_TWO( + TestsConstants.TEST_INDEX_BANK_TWO, "account_two", getBankIndexMapping(), "src/test/resources/bank_two.json"), - BANK_WITH_NULL_VALUES(TestsConstants.TEST_INDEX_BANK_WITH_NULL_VALUES, + BANK_WITH_NULL_VALUES( + TestsConstants.TEST_INDEX_BANK_WITH_NULL_VALUES, "account_null", getBankWithNullValuesIndexMapping(), "src/test/resources/bank_with_null_values.json"), - BANK_WITH_STRING_VALUES(TestsConstants.TEST_INDEX_STRINGS, + BANK_WITH_STRING_VALUES( + TestsConstants.TEST_INDEX_STRINGS, "strings", getStringIndexMapping(), "src/test/resources/strings.json"), - BANK_CSV_SANITIZE(TestsConstants.TEST_INDEX_BANK_CSV_SANITIZE, + BANK_CSV_SANITIZE( + TestsConstants.TEST_INDEX_BANK_CSV_SANITIZE, "account", getBankIndexMapping(), "src/test/resources/bank_csv_sanitize.json"), - BANK_RAW_SANITIZE(TestsConstants.TEST_INDEX_BANK_RAW_SANITIZE, - "account", - getBankIndexMapping(), - "src/test/resources/bank_raw_sanitize.json"), - ORDER(TestsConstants.TEST_INDEX_ORDER, + BANK_RAW_SANITIZE( + TestsConstants.TEST_INDEX_BANK_RAW_SANITIZE, + "account", + getBankIndexMapping(), + "src/test/resources/bank_raw_sanitize.json"), + ORDER( + TestsConstants.TEST_INDEX_ORDER, "_doc", getOrderIndexMapping(), "src/test/resources/order.json"), - WEBLOG(TestsConstants.TEST_INDEX_WEBLOG, + WEBLOG( + TestsConstants.TEST_INDEX_WEBLOG, "weblog", getWeblogsIndexMapping(), "src/test/resources/weblogs.json"), - DATE(TestsConstants.TEST_INDEX_DATE, + DATE( + TestsConstants.TEST_INDEX_DATE, "dates", getDateIndexMapping(), "src/test/resources/dates.json"), - DATETIME(TestsConstants.TEST_INDEX_DATE_TIME, + DATETIME( + TestsConstants.TEST_INDEX_DATE_TIME, "_doc", getDateTimeIndexMapping(), "src/test/resources/datetime.json"), - NESTED_SIMPLE(TestsConstants.TEST_INDEX_NESTED_SIMPLE, + NESTED_SIMPLE( + TestsConstants.TEST_INDEX_NESTED_SIMPLE, "_doc", getNestedSimpleIndexMapping(), "src/test/resources/nested_simple.json"), - DEEP_NESTED(TestsConstants.TEST_INDEX_DEEP_NESTED, + DEEP_NESTED( + TestsConstants.TEST_INDEX_DEEP_NESTED, "_doc", getDeepNestedIndexMapping(), "src/test/resources/deep_nested_index_data.json"), - DATA_TYPE_NUMERIC(TestsConstants.TEST_INDEX_DATATYPE_NUMERIC, + DATA_TYPE_NUMERIC( + TestsConstants.TEST_INDEX_DATATYPE_NUMERIC, "_doc", getDataTypeNumericIndexMapping(), "src/test/resources/datatypes_numeric.json"), - DATA_TYPE_NONNUMERIC(TestsConstants.TEST_INDEX_DATATYPE_NONNUMERIC, + DATA_TYPE_NONNUMERIC( + TestsConstants.TEST_INDEX_DATATYPE_NONNUMERIC, "_doc", getDataTypeNonnumericIndexMapping(), "src/test/resources/datatypes.json"), - BEER(TestsConstants.TEST_INDEX_BEER, - "beer", - null, - "src/test/resources/beer.stackexchange.json"), - NULL_MISSING(TestsConstants.TEST_INDEX_NULL_MISSING, + BEER( + TestsConstants.TEST_INDEX_BEER, "beer", null, "src/test/resources/beer.stackexchange.json"), + NULL_MISSING( + TestsConstants.TEST_INDEX_NULL_MISSING, "null_missing", getMappingFile("null_missing_index_mapping.json"), "src/test/resources/null_missing.json"), - CALCS(TestsConstants.TEST_INDEX_CALCS, + CALCS( + TestsConstants.TEST_INDEX_CALCS, "calcs", getMappingFile("calcs_index_mappings.json"), "src/test/resources/calcs.json"), - DATE_FORMATS(TestsConstants.TEST_INDEX_DATE_FORMATS, + DATE_FORMATS( + TestsConstants.TEST_INDEX_DATE_FORMATS, "date_formats", getMappingFile("date_formats_index_mapping.json"), "src/test/resources/date_formats.json"), - WILDCARD(TestsConstants.TEST_INDEX_WILDCARD, + WILDCARD( + TestsConstants.TEST_INDEX_WILDCARD, "wildcard", getMappingFile("wildcard_index_mappings.json"), "src/test/resources/wildcard.json"), - DATASOURCES(TestsConstants.DATASOURCES, + DATASOURCES( + TestsConstants.DATASOURCES, "datasource", getMappingFile("datasources_index_mappings.json"), "src/test/resources/datasources.json"), - MULTI_NESTED(TestsConstants.TEST_INDEX_MULTI_NESTED_TYPE, + MULTI_NESTED( + TestsConstants.TEST_INDEX_MULTI_NESTED_TYPE, "multi_nested", getMappingFile("multi_nested.json"), "src/test/resources/multi_nested_objects.json"), - NESTED_WITH_NULLS(TestsConstants.TEST_INDEX_NESTED_WITH_NULLS, + NESTED_WITH_NULLS( + TestsConstants.TEST_INDEX_NESTED_WITH_NULLS, "multi_nested", getNestedTypeIndexMapping(), "src/test/resources/nested_with_nulls.json"); @@ -710,7 +734,5 @@ public String getMapping() { public String getDataSet() { return this.dataSet; } - - } } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/ShowIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/ShowIT.java index b28336c482..fa86bbbc22 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/ShowIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/ShowIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.equalTo; @@ -20,7 +19,8 @@ public class ShowIT extends SQLIntegTestCase { @Override protected void init() { - // Note: not using the existing TEST_INDEX_* indices, since underscore in the names causes issues + // Note: not using the existing TEST_INDEX_* indices, since underscore in the names causes + // issues createEmptyIndexIfNotExist("abcdefg"); createEmptyIndexIfNotExist("abcdefghijk"); createEmptyIndexIfNotExist("abcdijk"); diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/SourceFieldIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/SourceFieldIT.java index a6a1a1cfe9..bf288262b6 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/SourceFieldIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/SourceFieldIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT; @@ -31,32 +30,44 @@ protected void init() throws Exception { @Test public void includeTest() throws IOException { - SearchHits response = query(String.format( - "SELECT include('*name','*ge'),include('b*'),include('*ddre*'),include('gender') FROM %s LIMIT 1000", - TEST_INDEX_ACCOUNT)); + SearchHits response = + query( + String.format( + "SELECT include('*name','*ge'),include('b*'),include('*ddre*'),include('gender')" + + " FROM %s LIMIT 1000", + TEST_INDEX_ACCOUNT)); for (SearchHit hit : response.getHits()) { Set keySet = hit.getSourceAsMap().keySet(); for (String field : keySet) { - Assert.assertTrue(field.endsWith("name") || field.endsWith("ge") || field.startsWith("b") || - field.contains("ddre") || field.equals("gender")); + Assert.assertTrue( + field.endsWith("name") + || field.endsWith("ge") + || field.startsWith("b") + || field.contains("ddre") + || field.equals("gender")); } } - } @Test public void excludeTest() throws IOException { - SearchHits response = query(String.format( - "SELECT exclude('*name','*ge'),exclude('b*'),exclude('*ddre*'),exclude('gender') FROM %s LIMIT 1000", - TEST_INDEX_ACCOUNT)); + SearchHits response = + query( + String.format( + "SELECT exclude('*name','*ge'),exclude('b*'),exclude('*ddre*'),exclude('gender')" + + " FROM %s LIMIT 1000", + TEST_INDEX_ACCOUNT)); for (SearchHit hit : response.getHits()) { Set keySet = hit.getSourceAsMap().keySet(); for (String field : keySet) { Assert.assertFalse( - field.endsWith("name") || field.endsWith("ge") || field.startsWith("b") || - field.contains("ddre") || field.equals("gender")); + field.endsWith("name") + || field.endsWith("ge") + || field.startsWith("b") + || field.contains("ddre") + || field.equals("gender")); } } } @@ -64,15 +75,18 @@ public void excludeTest() throws IOException { @Test public void allTest() throws IOException { - SearchHits response = query(String.format( - "SELECT exclude('*name','*ge'),include('b*'),exclude('*ddre*'),include('gender') FROM %s LIMIT 1000", - TEST_INDEX_ACCOUNT)); + SearchHits response = + query( + String.format( + "SELECT exclude('*name','*ge'),include('b*'),exclude('*ddre*'),include('gender')" + + " FROM %s LIMIT 1000", + TEST_INDEX_ACCOUNT)); for (SearchHit hit : response.getHits()) { Set keySet = hit.getSourceAsMap().keySet(); for (String field : keySet) { - Assert - .assertFalse(field.endsWith("name") || field.endsWith("ge") || field.contains("ddre")); + Assert.assertFalse( + field.endsWith("name") || field.endsWith("ge") || field.contains("ddre")); Assert.assertTrue(field.startsWith("b") || field.equals("gender")); } } @@ -81,11 +95,11 @@ public void allTest() throws IOException { private SearchHits query(String query) throws IOException { final JSONObject jsonObject = executeQuery(query); - final XContentParser parser = new JsonXContentParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - new JsonFactory().createParser(jsonObject.toString())); + final XContentParser parser = + new JsonXContentParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + new JsonFactory().createParser(jsonObject.toString())); return SearchResponse.fromXContent(parser).getHits(); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/SubqueryIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/SubqueryIT.java index 0fd0fea7f7..16614bfd2a 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/SubqueryIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/SubqueryIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.both; @@ -38,9 +37,7 @@ public class SubqueryIT extends SQLIntegTestCase { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); - + @Rule public ExpectedException exceptionRule = ExpectedException.none(); @Override protected void init() throws Exception { @@ -51,50 +48,55 @@ protected void init() throws Exception { @Test public void testIN() throws IOException { - String query = String.format(Locale.ROOT, - "SELECT dog_name " + - "FROM %s A " + - "WHERE holdersName IN (SELECT firstname FROM %s B) " + - "AND dog_name <> 'babala'", - TEST_INDEX_DOGSUBQUERY, TEST_INDEX_ACCOUNT); + String query = + String.format( + Locale.ROOT, + "SELECT dog_name " + + "FROM %s A " + + "WHERE holdersName IN (SELECT firstname FROM %s B) " + + "AND dog_name <> 'babala'", + TEST_INDEX_DOGSUBQUERY, + TEST_INDEX_ACCOUNT); JSONObject response = executeQuery(query); assertThat( response, hitAll( kvString("/_source/A.dog_name", is("snoopy")), - kvString("/_source/A.dog_name", is("gogo")) - ) - ); + kvString("/_source/A.dog_name", is("gogo")))); } @Test public void testINWithAlias() throws IOException { - String query = String.format(Locale.ROOT, - "SELECT A.dog_name " + - "FROM %s A " + - "WHERE A.holdersName IN (SELECT B.firstname FROM %s B) " + - "AND A.dog_name <> 'babala'", - TEST_INDEX_DOGSUBQUERY, TEST_INDEX_ACCOUNT); + String query = + String.format( + Locale.ROOT, + "SELECT A.dog_name " + + "FROM %s A " + + "WHERE A.holdersName IN (SELECT B.firstname FROM %s B) " + + "AND A.dog_name <> 'babala'", + TEST_INDEX_DOGSUBQUERY, + TEST_INDEX_ACCOUNT); JSONObject response = executeQuery(query); assertThat( response, hitAll( kvString("/_source/A.dog_name", is("snoopy")), - kvString("/_source/A.dog_name", is("gogo")) - ) - ); + kvString("/_source/A.dog_name", is("gogo")))); } @Test public void testINSelectAll() throws IOException { - String query = String.format(Locale.ROOT, - "SELECT * " + - "FROM %s A " + - "WHERE holdersName IN (SELECT firstname FROM %s B) " + - "AND dog_name <> 'babala'", - TEST_INDEX_DOGSUBQUERY, TEST_INDEX_ACCOUNT); + String query = + String.format( + Locale.ROOT, + "SELECT * " + + "FROM %s A " + + "WHERE holdersName IN (SELECT firstname FROM %s B) " + + "AND dog_name <> 'babala'", + TEST_INDEX_DOGSUBQUERY, + TEST_INDEX_ACCOUNT); JSONObject response = executeQuery(query); assertThat( @@ -105,39 +107,38 @@ public void testINSelectAll() throws IOException { .and(kvInt("/_source/A.age", is(4))), both(kvString("/_source/A.dog_name", is("gogo"))) .and(kvString("/_source/A.holdersName", is("Gabrielle"))) - .and(kvInt("/_source/A.age", is(6))) - ) - ); + .and(kvInt("/_source/A.age", is(6))))); } @Test public void testINWithInnerWhere() throws IOException { - String query = String.format(Locale.ROOT, - "SELECT dog_name " + - "FROM %s A " + - "WHERE holdersName IN (SELECT firstname FROM %s B WHERE age <> 36) " + - "AND dog_name <> 'babala'", - TEST_INDEX_DOGSUBQUERY, TEST_INDEX_ACCOUNT); + String query = + String.format( + Locale.ROOT, + "SELECT dog_name " + + "FROM %s A " + + "WHERE holdersName IN (SELECT firstname FROM %s B WHERE age <> 36) " + + "AND dog_name <> 'babala'", + TEST_INDEX_DOGSUBQUERY, + TEST_INDEX_ACCOUNT); JSONObject response = executeQuery(query); - assertThat( - response, - hitAll( - kvString("/_source/A.dog_name", is("gogo")) - ) - ); + assertThat(response, hitAll(kvString("/_source/A.dog_name", is("gogo")))); } @Test public void testNotSupportedQuery() throws IOException { exceptionRule.expect(ResponseException.class); exceptionRule.expectMessage("Unsupported subquery"); - String query = String.format(Locale.ROOT, - "SELECT dog_name " + - "FROM %s A " + - "WHERE holdersName NOT IN (SELECT firstname FROM %s B WHERE age <> 36) " + - "AND dog_name <> 'babala'", - TEST_INDEX_DOGSUBQUERY, TEST_INDEX_ACCOUNT); + String query = + String.format( + Locale.ROOT, + "SELECT dog_name " + + "FROM %s A " + + "WHERE holdersName NOT IN (SELECT firstname FROM %s B WHERE age <> 36) " + + "AND dog_name <> 'babala'", + TEST_INDEX_DOGSUBQUERY, + TEST_INDEX_ACCOUNT); executeQuery(query); } @@ -145,100 +146,91 @@ public void testNotSupportedQuery() throws IOException { @Ignore @Test public void testINWithDuplicate() throws IOException { - String query = String.format(Locale.ROOT, - "SELECT dog_name " + - "FROM %s A " + - "WHERE holdersName IN (SELECT firstname FROM %s B)", - TEST_INDEX_DOGSUBQUERY, TEST_INDEX_ACCOUNT); + String query = + String.format( + Locale.ROOT, + "SELECT dog_name " + "FROM %s A " + "WHERE holdersName IN (SELECT firstname FROM %s B)", + TEST_INDEX_DOGSUBQUERY, + TEST_INDEX_ACCOUNT); JSONObject response = executeQuery(query); assertThat( response, hitAll( kvString("/_source/A.dog_name", is("snoopy")), - kvString("/_source/A.dog_name", is("babala")) - ) - ); + kvString("/_source/A.dog_name", is("babala")))); } @Test public void nonCorrelatedExists() throws IOException { - String query = String.format(Locale.ROOT, - "SELECT e.name " + - "FROM %s as e " + - "WHERE EXISTS (SELECT * FROM e.projects as p)", - TEST_INDEX_EMPLOYEE_NESTED); + String query = + String.format( + Locale.ROOT, + "SELECT e.name " + "FROM %s as e " + "WHERE EXISTS (SELECT * FROM e.projects as p)", + TEST_INDEX_EMPLOYEE_NESTED); JSONObject response = executeQuery(query); assertThat( response, hitAll( kvString("/_source/name", is("Bob Smith")), - kvString("/_source/name", is("Jane Smith")) - ) - ); + kvString("/_source/name", is("Jane Smith")))); } @Test public void nonCorrelatedExistsWhere() throws IOException { - String query = String.format(Locale.ROOT, - "SELECT e.name " + - "FROM %s as e " + - "WHERE EXISTS (SELECT * FROM e.projects as p WHERE p.name LIKE 'aurora')", - TEST_INDEX_EMPLOYEE_NESTED); + String query = + String.format( + Locale.ROOT, + "SELECT e.name " + + "FROM %s as e " + + "WHERE EXISTS (SELECT * FROM e.projects as p WHERE p.name LIKE 'aurora')", + TEST_INDEX_EMPLOYEE_NESTED); JSONObject response = executeQuery(query); - assertThat( - response, - hitAll( - kvString("/_source/name", is("Bob Smith")) - ) - ); + assertThat(response, hitAll(kvString("/_source/name", is("Bob Smith")))); } @Test public void nonCorrelatedExistsParentWhere() throws IOException { - String query = String.format(Locale.ROOT, - "SELECT e.name " + - "FROM %s as e " + - "WHERE EXISTS (SELECT * FROM e.projects as p WHERE p.name LIKE 'security') " + - "AND e.name LIKE 'jane'", - TEST_INDEX_EMPLOYEE_NESTED); + String query = + String.format( + Locale.ROOT, + "SELECT e.name " + + "FROM %s as e " + + "WHERE EXISTS (SELECT * FROM e.projects as p WHERE p.name LIKE 'security') " + + "AND e.name LIKE 'jane'", + TEST_INDEX_EMPLOYEE_NESTED); JSONObject response = executeQuery(query); - assertThat( - response, - hitAll( - kvString("/_source/name", is("Jane Smith")) - ) - ); + assertThat(response, hitAll(kvString("/_source/name", is("Jane Smith")))); } @Test public void nonCorrelatedNotExists() throws IOException { - String query = String.format(Locale.ROOT, - "SELECT e.name " + - "FROM %s as e " + - "WHERE NOT EXISTS (SELECT * FROM e.projects as p)", - TEST_INDEX_EMPLOYEE_NESTED); + String query = + String.format( + Locale.ROOT, + "SELECT e.name " + "FROM %s as e " + "WHERE NOT EXISTS (SELECT * FROM e.projects as p)", + TEST_INDEX_EMPLOYEE_NESTED); JSONObject response = executeQuery(query); assertThat( response, hitAll( kvString("/_source/name", is("Susan Smith")), - kvString("/_source/name", is("John Doe")) - ) - ); + kvString("/_source/name", is("John Doe")))); } @Test public void nonCorrelatedNotExistsWhere() throws IOException { - String query = String.format(Locale.ROOT, - "SELECT e.name " + - "FROM %s as e " + - "WHERE NOT EXISTS (SELECT * FROM e.projects as p WHERE p.name LIKE 'aurora')", - TEST_INDEX_EMPLOYEE_NESTED); + String query = + String.format( + Locale.ROOT, + "SELECT e.name " + + "FROM %s as e " + + "WHERE NOT EXISTS (SELECT * FROM e.projects as p WHERE p.name LIKE 'aurora')", + TEST_INDEX_EMPLOYEE_NESTED); JSONObject response = executeQuery(query); assertThat( @@ -246,52 +238,55 @@ public void nonCorrelatedNotExistsWhere() throws IOException { hitAll( kvString("/_source/name", is("Susan Smith")), kvString("/_source/name", is("Jane Smith")), - kvString("/_source/name", is("John Doe")) - ) - ); + kvString("/_source/name", is("John Doe")))); } @Test public void nonCorrelatedNotExistsParentWhere() throws IOException { - String query = String.format(Locale.ROOT, - "SELECT e.name " + - "FROM %s as e " + - "WHERE NOT EXISTS (SELECT * FROM e.projects as p WHERE p.name LIKE 'security') " + - "AND e.name LIKE 'smith'", - TEST_INDEX_EMPLOYEE_NESTED); + String query = + String.format( + Locale.ROOT, + "SELECT e.name " + + "FROM %s as e " + + "WHERE NOT EXISTS (SELECT * FROM e.projects as p WHERE p.name LIKE 'security') " + + "AND e.name LIKE 'smith'", + TEST_INDEX_EMPLOYEE_NESTED); JSONObject response = executeQuery(query); - assertThat( - response, - hitAll( - kvString("/_source/name", is("Susan Smith")) - ) - ); + assertThat(response, hitAll(kvString("/_source/name", is("Susan Smith")))); } @Test public void selectFromSubqueryWithCountShouldPass() throws IOException { - JSONObject result = executeQuery( - StringUtils.format("SELECT t.TEMP as count " + - "FROM (SELECT COUNT(*) as TEMP FROM %s) t", TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + StringUtils.format( + "SELECT t.TEMP as count " + "FROM (SELECT COUNT(*) as TEMP FROM %s) t", + TEST_INDEX_ACCOUNT)); assertThat(result.query("/aggregations/count/value"), equalTo(1000)); } @Test public void selectFromSubqueryWithWhereAndCountShouldPass() throws IOException { - JSONObject result = executeQuery( - StringUtils.format("SELECT t.TEMP as count " + - "FROM (SELECT COUNT(*) as TEMP FROM %s WHERE age > 30) t", TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + StringUtils.format( + "SELECT t.TEMP as count " + + "FROM (SELECT COUNT(*) as TEMP FROM %s WHERE age > 30) t", + TEST_INDEX_ACCOUNT)); assertThat(result.query("/aggregations/count/value"), equalTo(502)); } @Test public void selectFromSubqueryWithCountAndGroupByShouldPass() throws Exception { - JSONObject result = executeQuery( - StringUtils.format("SELECT t.TEMP as count " + - "FROM (SELECT COUNT(*) as TEMP FROM %s GROUP BY gender) t", TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + StringUtils.format( + "SELECT t.TEMP as count " + + "FROM (SELECT COUNT(*) as TEMP FROM %s GROUP BY gender) t", + TEST_INDEX_ACCOUNT)); assertThat(getTotalHits(result), equalTo(1000)); JSONObject gender = (JSONObject) result.query("/aggregations/gender"); @@ -312,11 +307,12 @@ public void selectFromSubqueryWithCountAndGroupByShouldPass() throws Exception { @Test public void selectFromSubqueryWithCountAndGroupByAndOrderByShouldPass() throws IOException { - JSONObject result = executeQuery( - StringUtils.format( - "SELECT t.TEMP as count " + - "FROM (SELECT COUNT(*) as TEMP FROM %s GROUP BY age ORDER BY TEMP) t", - TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + StringUtils.format( + "SELECT t.TEMP as count " + + "FROM (SELECT COUNT(*) as TEMP FROM %s GROUP BY age ORDER BY TEMP) t", + TEST_INDEX_ACCOUNT)); JSONArray buckets = (JSONArray) result.query("/aggregations/age/buckets"); List countList = new ArrayList<>(); for (int i = 0; i < buckets.length(); ++i) { @@ -328,44 +324,50 @@ public void selectFromSubqueryWithCountAndGroupByAndOrderByShouldPass() throws I @Test public void selectFromSubqueryWithCountAndGroupByAndHavingShouldPass() throws Exception { - JSONObject result = executeQuery( - StringUtils.format("SELECT t.T1 as g, t.T2 as c " + - "FROM (SELECT gender as T1, COUNT(*) as T2 " + - " FROM %s " + - " GROUP BY gender " + - " HAVING T2 > 500) t", TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + StringUtils.format( + "SELECT t.T1 as g, t.T2 as c " + + "FROM (SELECT gender as T1, COUNT(*) as T2 " + + " FROM %s " + + " GROUP BY gender " + + " HAVING T2 > 500) t", + TEST_INDEX_ACCOUNT)); assertThat(result.query("/aggregations/g/buckets/0/c/value"), equalTo(507)); } @Test public void selectFromSubqueryCountAndSum() throws IOException { - JSONObject result = executeQuery( - StringUtils.format( - "SELECT t.TEMP1 as count, t.TEMP2 as balance " + - "FROM (SELECT COUNT(*) as TEMP1, SUM(balance) as TEMP2 " + - " FROM %s) t", - TEST_INDEX_ACCOUNT)); + JSONObject result = + executeQuery( + StringUtils.format( + "SELECT t.TEMP1 as count, t.TEMP2 as balance " + + "FROM (SELECT COUNT(*) as TEMP1, SUM(balance) as TEMP2 " + + " FROM %s) t", + TEST_INDEX_ACCOUNT)); assertThat(result.query("/aggregations/count/value"), equalTo(1000)); - assertThat(((BigDecimal) result.query("/aggregations/balance/value")).doubleValue(), + assertThat( + ((BigDecimal) result.query("/aggregations/balance/value")).doubleValue(), closeTo(25714837.0, 0.01)); } @Test public void selectFromSubqueryWithoutAliasShouldPass() throws IOException { - JSONObject response = executeJdbcRequest( - StringUtils.format( - "SELECT a.firstname AS my_first, a.lastname AS my_last, a.age AS my_age " + - "FROM (SELECT firstname, lastname, age " + - "FROM %s " + - "WHERE age = 40 and account_number = 291) AS a", - TEST_INDEX_ACCOUNT)); - - verifySchema(response, + JSONObject response = + executeJdbcRequest( + StringUtils.format( + "SELECT a.firstname AS my_first, a.lastname AS my_last, a.age AS my_age " + + "FROM (SELECT firstname, lastname, age " + + "FROM %s " + + "WHERE age = 40 and account_number = 291) AS a", + TEST_INDEX_ACCOUNT)); + + verifySchema( + response, schema("firstname", "my_first", "text"), schema("lastname", "my_last", "text"), schema("age", "my_age", "long")); - verifyDataRows(response, - rows("Lynn", "Pollard", 40)); + verifyDataRows(response, rows("Lynn", "Pollard", 40)); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/TermQueryExplainIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/TermQueryExplainIT.java index fcc9b048c9..9d03e70825 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/TermQueryExplainIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/TermQueryExplainIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.hamcrest.Matchers.containsString; @@ -17,7 +16,6 @@ import org.opensearch.client.ResponseException; import org.opensearch.core.rest.RestStatus; - public class TermQueryExplainIT extends SQLIntegTestCase { @Override @@ -35,13 +33,15 @@ protected void init() throws Exception { @Test public void testNonExistingIndex() throws IOException { try { - explainQuery("SELECT firstname, lastname " + - "FROM opensearch_sql_test_fake_index " + - "WHERE firstname = 'Leo'"); + explainQuery( + "SELECT firstname, lastname " + + "FROM opensearch_sql_test_fake_index " + + "WHERE firstname = 'Leo'"); Assert.fail("Expected ResponseException, but none was thrown"); } catch (ResponseException e) { - assertThat(e.getResponse().getStatusLine().getStatusCode(), + assertThat( + e.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.BAD_REQUEST.getStatus())); final String entity = TestUtils.getResponseBody(e.getResponse()); assertThat(entity, containsString("no such index")); @@ -52,13 +52,13 @@ public void testNonExistingIndex() throws IOException { @Test public void testNonResolvingIndexPattern() throws IOException { try { - explainQuery("SELECT * " + - "FROM opensearch_sql_test_blah_blah* " + - "WHERE firstname = 'Leo'"); + explainQuery( + "SELECT * " + "FROM opensearch_sql_test_blah_blah* " + "WHERE firstname = 'Leo'"); Assert.fail("Expected ResponseException, but none was thrown"); } catch (ResponseException e) { - assertThat(e.getResponse().getStatusLine().getStatusCode(), + assertThat( + e.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.BAD_REQUEST.getStatus())); final String entity = TestUtils.getResponseBody(e.getResponse()); assertThat(entity, containsString("Field [firstname] cannot be found or used here.")); @@ -68,10 +68,11 @@ public void testNonResolvingIndexPattern() throws IOException { @Test public void testNonResolvingIndexPatternWithExistingIndex() throws IOException { - String result = explainQuery( - "SELECT * " + - "FROM opensearch_sql_test_blah_blah*, opensearch-sql_test_index_bank " + - "WHERE state = 'DC'"); + String result = + explainQuery( + "SELECT * " + + "FROM opensearch_sql_test_blah_blah*, opensearch-sql_test_index_bank " + + "WHERE state = 'DC'"); assertThat(result, containsString("\"term\":{\"state.keyword\"")); } @@ -79,12 +80,13 @@ public void testNonResolvingIndexPatternWithExistingIndex() throws IOException { public void testNonResolvingIndexPatternWithNonExistingIndex() throws IOException { try { explainQuery( - "SELECT firstname, lastname " + - "FROM opensearch_sql_test_blah_blah*, another_fake_index " + - "WHERE firstname = 'Leo'"); + "SELECT firstname, lastname " + + "FROM opensearch_sql_test_blah_blah*, another_fake_index " + + "WHERE firstname = 'Leo'"); Assert.fail("Expected ResponseException, but none was thrown"); } catch (ResponseException e) { - assertThat(e.getResponse().getStatusLine().getStatusCode(), + assertThat( + e.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.BAD_REQUEST.getStatus())); final String entity = TestUtils.getResponseBody(e.getResponse()); assertThat(entity, containsString("no such index")); @@ -95,11 +97,11 @@ public void testNonResolvingIndexPatternWithNonExistingIndex() throws IOExceptio @Test public void testNonCompatibleMappings() throws IOException { try { - explainQuery( - "SELECT * FROM opensearch-sql_test_index_dog, opensearch-sql_test_index_dog2"); + explainQuery("SELECT * FROM opensearch-sql_test_index_dog, opensearch-sql_test_index_dog2"); Assert.fail("Expected ResponseException, but none was thrown"); } catch (ResponseException e) { - assertThat(e.getResponse().getStatusLine().getStatusCode(), + assertThat( + e.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.BAD_REQUEST.getStatus())); final String entity = TestUtils.getResponseBody(e.getResponse()); assertThat(entity, containsString("Field [holdersName] have conflict type")); @@ -108,14 +110,15 @@ public void testNonCompatibleMappings() throws IOException { } /** - * The dog_name field has same type in dog and dog2 index. - * But, the holdersName field has different type. + * The dog_name field has same type in dog and dog2 index. But, the holdersName field has + * different type. */ @Test public void testNonCompatibleMappingsButTheFieldIsNotUsed() throws IOException { - String result = explainQuery( - "SELECT dog_name " + - "FROM opensearch-sql_test_index_dog, opensearch-sql_test_index_dog2 WHERE dog_name = 'dog'"); + String result = + explainQuery( + "SELECT dog_name FROM opensearch-sql_test_index_dog, opensearch-sql_test_index_dog2" + + " WHERE dog_name = 'dog'"); System.out.println(result); assertThat(result, containsString("dog_name")); assertThat(result, containsString("_source")); @@ -123,20 +126,21 @@ public void testNonCompatibleMappingsButTheFieldIsNotUsed() throws IOException { @Test public void testEqualFieldMappings() throws IOException { - String result = explainQuery( - "SELECT color " + - "FROM opensearch-sql_test_index_dog2, opensearch-sql_test_index_dog3"); + String result = + explainQuery( + "SELECT color " + + "FROM opensearch-sql_test_index_dog2, opensearch-sql_test_index_dog3"); assertThat(result, containsString("color")); assertThat(result, containsString("_source")); } @Test public void testIdenticalMappings() throws IOException { - String result = explainQuery( - "SELECT firstname, birthdate, state " + - "FROM opensearch-sql_test_index_bank, opensearch-sql_test_index_bank_two " + - "WHERE state = 'WA' OR male = true" - ); + String result = + explainQuery( + "SELECT firstname, birthdate, state " + + "FROM opensearch-sql_test_index_bank, opensearch-sql_test_index_bank_two " + + "WHERE state = 'WA' OR male = true"); assertThat(result, containsString("term")); assertThat(result, containsString("state.keyword")); assertThat(result, containsString("_source")); @@ -144,24 +148,23 @@ public void testIdenticalMappings() throws IOException { @Test public void testIdenticalMappingsWithTypes() throws IOException { - String result = explainQuery( - "SELECT firstname, birthdate, state " + - "FROM opensearch-sql_test_index_bank/account, opensearch-sql_test_index_bank_two/account_two " + - "WHERE state = 'WA' OR male = true" - ); + String result = + explainQuery( + "SELECT firstname, birthdate, state FROM opensearch-sql_test_index_bank/account," + + " opensearch-sql_test_index_bank_two/account_two WHERE state = 'WA' OR male =" + + " true"); assertThat(result, containsString("term")); assertThat(result, containsString("state.keyword")); assertThat(result, containsString("_source")); } - @Test public void testIdenticalMappingsWithPartialType() throws IOException { - String result = explainQuery( - "SELECT firstname, birthdate, state " + - "FROM opensearch-sql_test_index_bank/account, opensearch-sql_test_index_bank_two " + - "WHERE state = 'WA' OR male = true" - ); + String result = + explainQuery( + "SELECT firstname, birthdate, state " + + "FROM opensearch-sql_test_index_bank/account, opensearch-sql_test_index_bank_two " + + "WHERE state = 'WA' OR male = true"); assertThat(result, containsString("term")); assertThat(result, containsString("state.keyword")); assertThat(result, containsString("_source")); @@ -170,22 +173,22 @@ public void testIdenticalMappingsWithPartialType() throws IOException { @Test public void testTextFieldOnly() throws IOException { - String result = explainQuery( - "SELECT firstname, birthdate, state " + - "FROM opensearch-sql_test_index_bank " + - "WHERE firstname = 'Abbas'" - ); + String result = + explainQuery( + "SELECT firstname, birthdate, state " + + "FROM opensearch-sql_test_index_bank " + + "WHERE firstname = 'Abbas'"); assertThat(result, containsString("term")); assertThat(result, not(containsString("firstname."))); } @Test public void testTextAndKeywordAppendsKeywordAlias() throws IOException { - String result = explainQuery( - "SELECT firstname, birthdate, state " + - "FROM opensearch-sql_test_index_bank " + - "WHERE state = 'WA' OR lastname = 'Chen'" - ); + String result = + explainQuery( + "SELECT firstname, birthdate, state " + + "FROM opensearch-sql_test_index_bank " + + "WHERE state = 'WA' OR lastname = 'Chen'"); assertThat(result, containsString("term")); assertThat(result, containsString("state.keyword")); assertThat(result, not(containsString("lastname."))); @@ -194,8 +197,7 @@ public void testTextAndKeywordAppendsKeywordAlias() throws IOException { @Test public void testBooleanFieldNoKeywordAlias() throws IOException { - String result = - explainQuery("SELECT * FROM opensearch-sql_test_index_bank WHERE male = false"); + String result = explainQuery("SELECT * FROM opensearch-sql_test_index_bank WHERE male = false"); assertThat(result, containsString("term")); assertThat(result, not(containsString("male."))); } @@ -203,8 +205,8 @@ public void testBooleanFieldNoKeywordAlias() throws IOException { @Test public void testDateFieldNoKeywordAlias() throws IOException { - String result = explainQuery( - "SELECT * FROM opensearch-sql_test_index_bank WHERE birthdate = '2018-08-19'"); + String result = + explainQuery("SELECT * FROM opensearch-sql_test_index_bank WHERE birthdate = '2018-08-19'"); assertThat(result, containsString("term")); assertThat(result, not(containsString("birthdate."))); } @@ -218,11 +220,11 @@ public void testNumberNoKeywordAlias() throws IOException { @Test public void inTestInWhere() throws IOException { - String result = explainQuery( - "SELECT * " + - "FROM opensearch-sql_test_index_bank " + - "WHERE state IN ('WA' , 'PA' , 'TN')" - ); + String result = + explainQuery( + "SELECT * " + + "FROM opensearch-sql_test_index_bank " + + "WHERE state IN ('WA' , 'PA' , 'TN')"); assertThat(result, containsString("term")); assertThat(result, containsString("state.keyword")); } @@ -230,53 +232,52 @@ public void inTestInWhere() throws IOException { @Test @Ignore // TODO: enable when subqueries are fixed public void inTestInWhereSubquery() throws IOException { - String result = explainQuery( - "SELECT * " + - "FROM opensearch-sql_test_index_bank/account WHERE " + - "state IN (SELECT state FROM opensearch-sql_test_index_bank WHERE city = 'Nicholson')" - ); + String result = + explainQuery( + "SELECT * FROM opensearch-sql_test_index_bank/account WHERE state IN (SELECT state FROM" + + " opensearch-sql_test_index_bank WHERE city = 'Nicholson')"); assertThat(result, containsString("term")); assertThat(result, containsString("state.keyword")); } @Test public void testKeywordAliasGroupBy() throws IOException { - String result = explainQuery( - "SELECT firstname, state " + - "FROM opensearch-sql_test_index_bank/account " + - "GROUP BY firstname, state"); + String result = + explainQuery( + "SELECT firstname, state " + + "FROM opensearch-sql_test_index_bank/account " + + "GROUP BY firstname, state"); assertThat(result, containsString("term")); assertThat(result, containsString("state.keyword")); } @Test public void testKeywordAliasGroupByUsingTableAlias() throws IOException { - String result = explainQuery( - "SELECT a.firstname, a.state " + - "FROM opensearch-sql_test_index_bank/account a " + - "GROUP BY a.firstname, a.state"); + String result = + explainQuery( + "SELECT a.firstname, a.state " + + "FROM opensearch-sql_test_index_bank/account a " + + "GROUP BY a.firstname, a.state"); assertThat(result, containsString("term")); assertThat(result, containsString("state.keyword")); } @Test public void testKeywordAliasOrderBy() throws IOException { - String result = explainQuery( - "SELECT * " + - "FROM opensearch-sql_test_index_bank " + - "ORDER BY state, lastname " - ); + String result = + explainQuery( + "SELECT * " + "FROM opensearch-sql_test_index_bank " + "ORDER BY state, lastname "); assertThat(result, containsString("\"state.keyword\":{\"order\":\"asc\"")); assertThat(result, containsString("\"lastname\":{\"order\":\"asc\"}")); } @Test public void testKeywordAliasOrderByUsingTableAlias() throws IOException { - String result = explainQuery( - "SELECT * " + - "FROM opensearch-sql_test_index_bank b " + - "ORDER BY b.state, b.lastname " - ); + String result = + explainQuery( + "SELECT * " + + "FROM opensearch-sql_test_index_bank b " + + "ORDER BY b.state, b.lastname "); assertThat(result, containsString("\"state.keyword\":{\"order\":\"asc\"")); assertThat(result, containsString("\"lastname\":{\"order\":\"asc\"}")); } @@ -286,13 +287,13 @@ public void testKeywordAliasOrderByUsingTableAlias() throws IOException { public void testJoinWhere() throws IOException { String expectedOutput = TestUtils.fileToString("src/test/resources/expectedOutput/term_join_where", true); - String result = explainQuery( - "SELECT a.firstname, a.lastname , b.city " + - "FROM opensearch-sql_test_index_account a " + - "JOIN opensearch-sql_test_index_account b " + - "ON a.city = b.city " + - "WHERE a.city IN ('Nicholson', 'Yardville')" - ); + String result = + explainQuery( + "SELECT a.firstname, a.lastname , b.city " + + "FROM opensearch-sql_test_index_account a " + + "JOIN opensearch-sql_test_index_account b " + + "ON a.city = b.city " + + "WHERE a.city IN ('Nicholson', 'Yardville')"); assertThat(result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); } @@ -301,57 +302,56 @@ public void testJoinWhere() throws IOException { public void testJoinAliasMissing() throws IOException { try { explainQuery( - "SELECT a.firstname, a.lastname , b.city " + - "FROM opensearch-sql_test_index_account a " + - "JOIN opensearch-sql_test_index_account b " + - "ON a.city = b.city " + - "WHERE city IN ('Nicholson', 'Yardville')" - ); + "SELECT a.firstname, a.lastname , b.city " + + "FROM opensearch-sql_test_index_account a " + + "JOIN opensearch-sql_test_index_account b " + + "ON a.city = b.city " + + "WHERE city IN ('Nicholson', 'Yardville')"); Assert.fail("Expected ResponseException, but none was thrown"); } catch (ResponseException e) { - assertThat(e.getResponse().getStatusLine().getStatusCode(), + assertThat( + e.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.BAD_REQUEST.getStatus())); final String entity = TestUtils.getResponseBody(e.getResponse()); assertThat(entity, containsString("Field name [city] is ambiguous")); assertThat(entity, containsString("\"type\": \"VerificationException\"")); } - } @Test public void testNestedSingleConditionAllFields() throws IOException { - String result = explainQuery( - "SELECT * " + - "FROM opensearch-sql_test_index_employee_nested e, e.projects p " + - "WHERE p.name = 'something' " - ); - assertThat(result, - containsString("\"term\":{\"projects.name.keyword\":{\"value\":\"something\"")); + String result = + explainQuery( + "SELECT * " + + "FROM opensearch-sql_test_index_employee_nested e, e.projects p " + + "WHERE p.name = 'something' "); + assertThat( + result, containsString("\"term\":{\"projects.name.keyword\":{\"value\":\"something\"")); assertThat(result, containsString("\"path\":\"projects\"")); } @Test public void testNestedMultipleCondition() throws IOException { - String result = explainQuery( - "SELECT e.id, p.name " + - "FROM opensearch-sql_test_index_employee_nested e, e.projects p " + - "WHERE p.name = 'something' and p.started_year = 1990 " - ); - assertThat(result, - containsString("\"term\":{\"projects.name.keyword\":{\"value\":\"something\"")); + String result = + explainQuery( + "SELECT e.id, p.name " + + "FROM opensearch-sql_test_index_employee_nested e, e.projects p " + + "WHERE p.name = 'something' and p.started_year = 1990 "); + assertThat( + result, containsString("\"term\":{\"projects.name.keyword\":{\"value\":\"something\"")); assertThat(result, containsString("\"term\":{\"projects.started_year\":{\"value\":1990")); assertThat(result, containsString("\"path\":\"projects\"")); } @Test public void testConditionsOnDifferentNestedDocs() throws IOException { - String result = explainQuery( - "SELECT p.name, c.likes " + - "FROM opensearch-sql_test_index_employee_nested e, e.projects p, e.comments c " + - "WHERE p.name = 'something' or c.likes = 56 " - ); - assertThat(result, - containsString("\"term\":{\"projects.name.keyword\":{\"value\":\"something\"")); + String result = + explainQuery( + "SELECT p.name, c.likes " + + "FROM opensearch-sql_test_index_employee_nested e, e.projects p, e.comments c " + + "WHERE p.name = 'something' or c.likes = 56 "); + assertThat( + result, containsString("\"term\":{\"projects.name.keyword\":{\"value\":\"something\"")); assertThat(result, containsString("\"term\":{\"comments.likes\":{\"value\":56")); assertThat(result, containsString("\"path\":\"projects\"")); assertThat(result, containsString("\"path\":\"comments\"")); @@ -359,11 +359,11 @@ public void testConditionsOnDifferentNestedDocs() throws IOException { @Test public void testNestedSingleConditionSpecificFields() throws IOException { - String result = explainQuery( - "SELECT e.id, p.name " + - "FROM opensearch-sql_test_index_employee_nested e, e.projects p " + - "WHERE p.name = 'hello' or p.name = 'world' " - ); + String result = + explainQuery( + "SELECT e.id, p.name " + + "FROM opensearch-sql_test_index_employee_nested e, e.projects p " + + "WHERE p.name = 'hello' or p.name = 'world' "); assertThat(result, containsString("\"term\":{\"projects.name.keyword\":{\"value\":\"hello\"")); assertThat(result, containsString("\"term\":{\"projects.name.keyword\":{\"value\":\"world\"")); assertThat(result, containsString("\"path\":\"projects\"")); @@ -371,32 +371,33 @@ public void testNestedSingleConditionSpecificFields() throws IOException { @Test public void testNestedSingleGroupBy() throws IOException { - String result = explainQuery( - "SELECT e.id, p.name " + - "FROM opensearch-sql_test_index_employee_nested e, e.projects p " + - "GROUP BY p.name "); + String result = + explainQuery( + "SELECT e.id, p.name " + + "FROM opensearch-sql_test_index_employee_nested e, e.projects p " + + "GROUP BY p.name "); assertThat(result, containsString("\"terms\":{\"field\":\"projects.name.keyword\"")); assertThat(result, containsString("\"nested\":{\"path\":\"projects\"")); } @Test public void testNestedSingleOrderBy() throws IOException { - String result = explainQuery( - "SELECT e.id, p.name " + - "FROM opensearch-sql_test_index_employee_nested e, e.projects p " + - "ORDER BY p.name " - ); + String result = + explainQuery( + "SELECT e.id, p.name " + + "FROM opensearch-sql_test_index_employee_nested e, e.projects p " + + "ORDER BY p.name "); assertThat(result, containsString("\"sort\":[{\"projects.name.keyword\"")); assertThat(result, containsString("\"nested\":{\"path\":\"projects\"")); } @Test public void testNestedIsNotNullExplain() throws IOException { - String explain = explainQuery( - "SELECT e.name " + - "FROM opensearch-sql_test_index_employee_nested as e, e.projects as p " + - "WHERE p IS NOT NULL" - ); + String explain = + explainQuery( + "SELECT e.name " + + "FROM opensearch-sql_test_index_employee_nested as e, e.projects as p " + + "WHERE p IS NOT NULL"); assertThat(explain, containsString("\"exists\":{\"field\":\"projects\"")); assertThat(explain, containsString("\"path\":\"projects\"")); @@ -407,14 +408,15 @@ public void testNestedIsNotNullExplain() throws IOException { public void testMultiQuery() throws IOException { String expectedOutput = TestUtils.fileToString("src/test/resources/expectedOutput/term_union_where", true); - String result = explainQuery( - "SELECT firstname " + - "FROM opensearch-sql_test_index_account/account " + - "WHERE firstname = 'Amber' " + - "UNION ALL " + - "SELECT dog_name as firstname " + - "FROM opensearch-sql_test_index_dog/dog " + - "WHERE holdersName = 'Hattie' OR dog_name = 'rex'"); + String result = + explainQuery( + "SELECT firstname " + + "FROM opensearch-sql_test_index_account/account " + + "WHERE firstname = 'Amber' " + + "UNION ALL " + + "SELECT dog_name as firstname " + + "FROM opensearch-sql_test_index_dog/dog " + + "WHERE holdersName = 'Hattie' OR dog_name = 'rex'"); assertThat(result.replaceAll("\\s+", ""), equalTo(expectedOutput.replaceAll("\\s+", ""))); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/TestUtils.java b/integ-test/src/test/java/org/opensearch/sql/legacy/TestUtils.java index 30cee86e15..e02de782b4 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/TestUtils.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/TestUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static com.google.common.base.Strings.isNullOrEmpty; @@ -36,14 +35,14 @@ public class TestUtils { - private final static String MAPPING_FILE_PATH = "src/test/resources/indexDefinitions/"; + private static final String MAPPING_FILE_PATH = "src/test/resources/indexDefinitions/"; /** * Create test index by REST client. * - * @param client client connection + * @param client client connection * @param indexName test index name - * @param mapping test index mapping or null if no predefined mapping + * @param mapping test index mapping or null if no predefined mapping */ public static void createIndexByRestClient(RestClient client, String indexName, String mapping) { Request request = new Request("PUT", "/" + indexName); @@ -54,16 +53,15 @@ public static void createIndexByRestClient(RestClient client, String indexName, } /** - * https://github.com/elastic/elasticsearch/pull/49959 - * Deprecate creation of dot-prefixed index names except for hidden and system indices. - * Create hidden index by REST client. + * https://github.com/elastic/elasticsearch/pull/49959 Deprecate creation of dot-prefixed index + * names except for hidden and system indices. Create hidden index by REST client. * - * @param client client connection + * @param client client connection * @param indexName test index name - * @param mapping test index mapping or null if no predefined mapping + * @param mapping test index mapping or null if no predefined mapping */ - public static void createHiddenIndexByRestClient(RestClient client, String indexName, - String mapping) { + public static void createHiddenIndexByRestClient( + RestClient client, String indexName, String mapping) { Request request = new Request("PUT", "/" + indexName); JSONObject jsonObject = isNullOrEmpty(mapping) ? new JSONObject() : new JSONObject(mapping); jsonObject.put("settings", new JSONObject("{\"index\":{\"hidden\":true}}")); @@ -73,11 +71,10 @@ public static void createHiddenIndexByRestClient(RestClient client, String index } /** - * Check if index already exists by OpenSearch index exists API which returns: - * 200 - specified indices or aliases exist - * 404 - one or more indices specified or aliases do not exist + * Check if index already exists by OpenSearch index exists API which returns: 200 - specified + * indices or aliases exist 404 - one or more indices specified or aliases do not exist * - * @param client client connection + * @param client client connection * @param indexName index name * @return true for index exist */ @@ -93,13 +90,13 @@ public static boolean isIndexExist(RestClient client, String indexName) { /** * Load test data set by REST client. * - * @param client client connection - * @param indexName index name + * @param client client connection + * @param indexName index name * @param dataSetFilePath file path of test data set * @throws IOException */ - public static void loadDataByRestClient(RestClient client, String indexName, - String dataSetFilePath) throws IOException { + public static void loadDataByRestClient( + RestClient client, String indexName, String dataSetFilePath) throws IOException { Path path = Paths.get(getResourceFilePath(dataSetFilePath)); Request request = new Request("POST", "/" + indexName + "/_bulk?refresh=true"); request.setJsonEntity(new String(Files.readAllBytes(path))); @@ -109,7 +106,7 @@ public static void loadDataByRestClient(RestClient client, String indexName, /** * Perform a request by REST client. * - * @param client client connection + * @param client client connection * @param request request object */ public static Response performRequest(RestClient client, Request request) { @@ -177,7 +174,6 @@ public static String getEmployeeNestedTypeIndexMapping() { return getMappingFile(mappingFile); } - public static String getNestedTypeIndexMapping() { String mappingFile = "nested_type_index_mapping.json"; return getMappingFile(mappingFile); @@ -255,8 +251,8 @@ public static void loadBulk(Client client, String jsonPath, String defaultIndex) BulkRequest bulkRequest = new BulkRequest(); try (final InputStream stream = new FileInputStream(absJsonPath); - final Reader streamReader = new InputStreamReader(stream, StandardCharsets.UTF_8); - final BufferedReader br = new BufferedReader(streamReader)) { + final Reader streamReader = new InputStreamReader(stream, StandardCharsets.UTF_8); + final BufferedReader br = new BufferedReader(streamReader)) { while (true) { @@ -285,8 +281,11 @@ public static void loadBulk(Client client, String jsonPath, String defaultIndex) BulkResponse bulkResponse = client.bulk(bulkRequest).actionGet(); if (bulkResponse.hasFailures()) { - throw new Exception("Failed to load test data into index " + defaultIndex + ", " + - bulkResponse.buildFailureMessage()); + throw new Exception( + "Failed to load test data into index " + + defaultIndex + + ", " + + bulkResponse.buildFailureMessage()); } System.out.println(bulkResponse.getItems().length + " documents loaded."); // ensure the documents are searchable @@ -312,8 +311,8 @@ public static String getResponseBody(Response response, boolean retainNewLines) final StringBuilder sb = new StringBuilder(); try (final InputStream is = response.getEntity().getContent(); - final BufferedReader br = new BufferedReader( - new InputStreamReader(is, StandardCharsets.UTF_8))) { + final BufferedReader br = + new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8))) { String line; while ((line = br.readLine()) != null) { @@ -326,15 +325,14 @@ public static String getResponseBody(Response response, boolean retainNewLines) return sb.toString(); } - public static String fileToString(final String filePathFromProjectRoot, - final boolean removeNewLines) - throws IOException { + public static String fileToString( + final String filePathFromProjectRoot, final boolean removeNewLines) throws IOException { final String absolutePath = getResourceFilePath(filePathFromProjectRoot); try (final InputStream stream = new FileInputStream(absolutePath); - final Reader streamReader = new InputStreamReader(stream, StandardCharsets.UTF_8); - final BufferedReader br = new BufferedReader(streamReader)) { + final Reader streamReader = new InputStreamReader(stream, StandardCharsets.UTF_8); + final BufferedReader br = new BufferedReader(streamReader)) { final StringBuilder stringBuilder = new StringBuilder(); String line = br.readLine(); @@ -388,12 +386,16 @@ public static List> getPermutations(final List items) { } final String currentItem = items.get(i); - result.addAll(getPermutations(smallerSet).stream().map(smallerSetPermutation -> { - final List permutation = new ArrayList<>(); - permutation.add(currentItem); - permutation.addAll(smallerSetPermutation); - return permutation; - }).collect(Collectors.toCollection(LinkedList::new))); + result.addAll( + getPermutations(smallerSet).stream() + .map( + smallerSetPermutation -> { + final List permutation = new ArrayList<>(); + permutation.add(currentItem); + permutation.addAll(smallerSetPermutation); + return permutation; + }) + .collect(Collectors.toCollection(LinkedList::new))); } return result; diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/TestsConstants.java b/integ-test/src/test/java/org/opensearch/sql/legacy/TestsConstants.java index 338be25a0c..29bc9813fa 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/TestsConstants.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/TestsConstants.java @@ -3,66 +3,63 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; -/** - * Created by omershelef on 18/12/14. - */ +/** Created by omershelef on 18/12/14. */ public class TestsConstants { - public final static String PERSISTENT = "persistent"; - public final static String TRANSIENT = "transient"; + public static final String PERSISTENT = "persistent"; + public static final String TRANSIENT = "transient"; - public final static String TEST_INDEX = "opensearch-sql_test_index"; + public static final String TEST_INDEX = "opensearch-sql_test_index"; - public final static String TEST_INDEX_ONLINE = TEST_INDEX + "_online"; - public final static String TEST_INDEX_ACCOUNT = TEST_INDEX + "_account"; - public final static String TEST_INDEX_PHRASE = TEST_INDEX + "_phrase"; - public final static String TEST_INDEX_DOG = TEST_INDEX + "_dog"; - public final static String TEST_INDEX_DOG2 = TEST_INDEX + "_dog2"; - public final static String TEST_INDEX_DOG3 = TEST_INDEX + "_dog3"; - public final static String TEST_INDEX_DOGSUBQUERY = TEST_INDEX + "_subquery"; - public final static String TEST_INDEX_PEOPLE = TEST_INDEX + "_people"; - public final static String TEST_INDEX_PEOPLE2 = TEST_INDEX + "_people2"; - public final static String TEST_INDEX_GAME_OF_THRONES = TEST_INDEX + "_game_of_thrones"; - public final static String TEST_INDEX_SYSTEM = TEST_INDEX + "_system"; - public final static String TEST_INDEX_ODBC = TEST_INDEX + "_odbc"; - public final static String TEST_INDEX_LOCATION = TEST_INDEX + "_location"; - public final static String TEST_INDEX_LOCATION2 = TEST_INDEX + "_location2"; - public final static String TEST_INDEX_NESTED_TYPE = TEST_INDEX + "_nested_type"; - public final static String TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS = + public static final String TEST_INDEX_ONLINE = TEST_INDEX + "_online"; + public static final String TEST_INDEX_ACCOUNT = TEST_INDEX + "_account"; + public static final String TEST_INDEX_PHRASE = TEST_INDEX + "_phrase"; + public static final String TEST_INDEX_DOG = TEST_INDEX + "_dog"; + public static final String TEST_INDEX_DOG2 = TEST_INDEX + "_dog2"; + public static final String TEST_INDEX_DOG3 = TEST_INDEX + "_dog3"; + public static final String TEST_INDEX_DOGSUBQUERY = TEST_INDEX + "_subquery"; + public static final String TEST_INDEX_PEOPLE = TEST_INDEX + "_people"; + public static final String TEST_INDEX_PEOPLE2 = TEST_INDEX + "_people2"; + public static final String TEST_INDEX_GAME_OF_THRONES = TEST_INDEX + "_game_of_thrones"; + public static final String TEST_INDEX_SYSTEM = TEST_INDEX + "_system"; + public static final String TEST_INDEX_ODBC = TEST_INDEX + "_odbc"; + public static final String TEST_INDEX_LOCATION = TEST_INDEX + "_location"; + public static final String TEST_INDEX_LOCATION2 = TEST_INDEX + "_location2"; + public static final String TEST_INDEX_NESTED_TYPE = TEST_INDEX + "_nested_type"; + public static final String TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS = TEST_INDEX + "_nested_type_without_arrays"; - public final static String TEST_INDEX_NESTED_SIMPLE = TEST_INDEX + "_nested_simple"; - public final static String TEST_INDEX_NESTED_WITH_QUOTES = + public static final String TEST_INDEX_NESTED_SIMPLE = TEST_INDEX + "_nested_simple"; + public static final String TEST_INDEX_NESTED_WITH_QUOTES = TEST_INDEX + "_nested_type_with_quotes"; - public final static String TEST_INDEX_EMPLOYEE_NESTED = TEST_INDEX + "_employee_nested"; - public final static String TEST_INDEX_JOIN_TYPE = TEST_INDEX + "_join_type"; - public final static String TEST_INDEX_UNEXPANDED_OBJECT = TEST_INDEX + "_unexpanded_object"; - public final static String TEST_INDEX_BANK = TEST_INDEX + "_bank"; - public final static String TEST_INDEX_BANK_TWO = TEST_INDEX_BANK + "_two"; - public final static String TEST_INDEX_BANK_WITH_NULL_VALUES = + public static final String TEST_INDEX_EMPLOYEE_NESTED = TEST_INDEX + "_employee_nested"; + public static final String TEST_INDEX_JOIN_TYPE = TEST_INDEX + "_join_type"; + public static final String TEST_INDEX_UNEXPANDED_OBJECT = TEST_INDEX + "_unexpanded_object"; + public static final String TEST_INDEX_BANK = TEST_INDEX + "_bank"; + public static final String TEST_INDEX_BANK_TWO = TEST_INDEX_BANK + "_two"; + public static final String TEST_INDEX_BANK_WITH_NULL_VALUES = TEST_INDEX_BANK + "_with_null_values"; - public final static String TEST_INDEX_BANK_CSV_SANITIZE = TEST_INDEX_BANK + "_csv_sanitize"; - public final static String TEST_INDEX_BANK_RAW_SANITIZE = TEST_INDEX_BANK + "_raw_sanitize"; - public final static String TEST_INDEX_ORDER = TEST_INDEX + "_order"; - public final static String TEST_INDEX_WEBLOG = TEST_INDEX + "_weblog"; - public final static String TEST_INDEX_DATE = TEST_INDEX + "_date"; - public final static String TEST_INDEX_DATE_TIME = TEST_INDEX + "_datetime"; - public final static String TEST_INDEX_DEEP_NESTED = TEST_INDEX + "_deep_nested"; - public final static String TEST_INDEX_STRINGS = TEST_INDEX + "_strings"; - public final static String TEST_INDEX_DATATYPE_NUMERIC = TEST_INDEX + "_datatypes_numeric"; - public final static String TEST_INDEX_DATATYPE_NONNUMERIC = TEST_INDEX + "_datatypes_nonnumeric"; - public final static String TEST_INDEX_BEER = TEST_INDEX + "_beer"; - public final static String TEST_INDEX_NULL_MISSING = TEST_INDEX + "_null_missing"; - public final static String TEST_INDEX_CALCS = TEST_INDEX + "_calcs"; - public final static String TEST_INDEX_DATE_FORMATS = TEST_INDEX + "_date_formats"; - public final static String TEST_INDEX_WILDCARD = TEST_INDEX + "_wildcard"; - public final static String TEST_INDEX_MULTI_NESTED_TYPE = TEST_INDEX + "_multi_nested"; - public final static String TEST_INDEX_NESTED_WITH_NULLS = TEST_INDEX + "_nested_with_nulls"; - public final static String DATASOURCES = ".ql-datasources"; + public static final String TEST_INDEX_BANK_CSV_SANITIZE = TEST_INDEX_BANK + "_csv_sanitize"; + public static final String TEST_INDEX_BANK_RAW_SANITIZE = TEST_INDEX_BANK + "_raw_sanitize"; + public static final String TEST_INDEX_ORDER = TEST_INDEX + "_order"; + public static final String TEST_INDEX_WEBLOG = TEST_INDEX + "_weblog"; + public static final String TEST_INDEX_DATE = TEST_INDEX + "_date"; + public static final String TEST_INDEX_DATE_TIME = TEST_INDEX + "_datetime"; + public static final String TEST_INDEX_DEEP_NESTED = TEST_INDEX + "_deep_nested"; + public static final String TEST_INDEX_STRINGS = TEST_INDEX + "_strings"; + public static final String TEST_INDEX_DATATYPE_NUMERIC = TEST_INDEX + "_datatypes_numeric"; + public static final String TEST_INDEX_DATATYPE_NONNUMERIC = TEST_INDEX + "_datatypes_nonnumeric"; + public static final String TEST_INDEX_BEER = TEST_INDEX + "_beer"; + public static final String TEST_INDEX_NULL_MISSING = TEST_INDEX + "_null_missing"; + public static final String TEST_INDEX_CALCS = TEST_INDEX + "_calcs"; + public static final String TEST_INDEX_DATE_FORMATS = TEST_INDEX + "_date_formats"; + public static final String TEST_INDEX_WILDCARD = TEST_INDEX + "_wildcard"; + public static final String TEST_INDEX_MULTI_NESTED_TYPE = TEST_INDEX + "_multi_nested"; + public static final String TEST_INDEX_NESTED_WITH_NULLS = TEST_INDEX + "_nested_with_nulls"; + public static final String DATASOURCES = ".ql-datasources"; - public final static String DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"; - public final static String TS_DATE_FORMAT = "yyyy-MM-dd HH:mm:ss.SSS"; - public final static String SIMPLE_DATE_FORMAT = "yyyy-MM-dd"; + public static final String DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"; + public static final String TS_DATE_FORMAT = "yyyy-MM-dd HH:mm:ss.SSS"; + public static final String SIMPLE_DATE_FORMAT = "yyyy-MM-dd"; } diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/TypeInformationIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/TypeInformationIT.java index 2bd3835a3a..421aae9622 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/TypeInformationIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/TypeInformationIT.java @@ -3,15 +3,12 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy; import static org.opensearch.sql.util.MatcherUtils.schema; import static org.opensearch.sql.util.MatcherUtils.verifySchema; import org.json.JSONObject; - -import org.junit.Ignore; import org.junit.Test; public class TypeInformationIT extends SQLIntegTestCase { @@ -28,8 +25,8 @@ protected void init() throws Exception { @Test public void testAbsWithIntFieldReturnsInt() { JSONObject response = - executeJdbcRequest("SELECT ABS(age) FROM " + TestsConstants.TEST_INDEX_ACCOUNT + - " ORDER BY age LIMIT 5"); + executeJdbcRequest( + "SELECT ABS(age) FROM " + TestsConstants.TEST_INDEX_ACCOUNT + " ORDER BY age LIMIT 5"); verifySchema(response, schema("ABS(age)", null, "long")); } @@ -37,8 +34,10 @@ public void testAbsWithIntFieldReturnsInt() { @Test public void testCeilWithLongFieldReturnsLong() { JSONObject response = - executeJdbcRequest("SELECT CEIL(balance) FROM " + TestsConstants.TEST_INDEX_ACCOUNT + - " ORDER BY balance LIMIT 5"); + executeJdbcRequest( + "SELECT CEIL(balance) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY balance LIMIT 5"); verifySchema(response, schema("CEIL(balance)", null, "long")); } @@ -48,8 +47,8 @@ public void testCeilWithLongFieldReturnsLong() { */ @Test public void testPiReturnsDouble() { - JSONObject response = executeJdbcRequest("SELECT PI() FROM " + TestsConstants.TEST_INDEX_ACCOUNT - + " LIMIT 1"); + JSONObject response = + executeJdbcRequest("SELECT PI() FROM " + TestsConstants.TEST_INDEX_ACCOUNT + " LIMIT 1"); verifySchema(response, schema("PI()", null, "double")); } @@ -59,16 +58,22 @@ public void testPiReturnsDouble() { */ @Test public void testUpperWithStringFieldReturnsString() { - JSONObject response = executeJdbcRequest("SELECT UPPER(firstname) AS firstname_alias FROM " + - TestsConstants.TEST_INDEX_ACCOUNT + " ORDER BY firstname_alias LIMIT 2"); + JSONObject response = + executeJdbcRequest( + "SELECT UPPER(firstname) AS firstname_alias FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY firstname_alias LIMIT 2"); verifySchema(response, schema("UPPER(firstname)", "firstname_alias", "keyword")); } @Test public void testLowerWithTextFieldReturnsText() { - JSONObject response = executeJdbcRequest("SELECT LOWER(firstname) FROM " + - TestsConstants.TEST_INDEX_ACCOUNT + " ORDER BY firstname LIMIT 2"); + JSONObject response = + executeJdbcRequest( + "SELECT LOWER(firstname) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY firstname LIMIT 2"); verifySchema(response, schema("LOWER(firstname)", null, "keyword")); } @@ -78,8 +83,11 @@ public void testLowerWithTextFieldReturnsText() { */ @Test public void testLengthWithTextFieldReturnsInt() { - JSONObject response = executeJdbcRequest("SELECT length(firstname) FROM " + - TestsConstants.TEST_INDEX_ACCOUNT + " ORDER BY firstname LIMIT 2"); + JSONObject response = + executeJdbcRequest( + "SELECT length(firstname) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY firstname LIMIT 2"); verifySchema(response, schema("length(firstname)", null, "integer")); } @@ -87,8 +95,10 @@ public void testLengthWithTextFieldReturnsInt() { @Test public void testLengthWithGroupByExpr() { JSONObject response = - executeJdbcRequest("SELECT Length(firstname) FROM " + TestsConstants.TEST_INDEX_ACCOUNT + - " GROUP BY LENGTH(firstname) LIMIT 5"); + executeJdbcRequest( + "SELECT Length(firstname) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " GROUP BY LENGTH(firstname) LIMIT 5"); verifySchema(response, schema("Length(firstname)", null, "integer")); } @@ -98,16 +108,22 @@ public void testLengthWithGroupByExpr() { */ @Test public void testSinWithLongFieldReturnsDouble() { - JSONObject response = executeJdbcRequest("SELECT sin(balance) FROM " + - TestsConstants.TEST_INDEX_ACCOUNT + " ORDER BY firstname LIMIT 2"); + JSONObject response = + executeJdbcRequest( + "SELECT sin(balance) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY firstname LIMIT 2"); verifySchema(response, schema("sin(balance)", null, "double")); } @Test public void testRadiansWithLongFieldReturnsDouble() { - JSONObject response = executeJdbcRequest("SELECT radians(balance) FROM " + - TestsConstants.TEST_INDEX_ACCOUNT + " ORDER BY firstname LIMIT 2"); + JSONObject response = + executeJdbcRequest( + "SELECT radians(balance) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY firstname LIMIT 2"); verifySchema(response, schema("radians(balance)", null, "double")); } @@ -117,16 +133,22 @@ public void testRadiansWithLongFieldReturnsDouble() { */ @Test public void testAddWithIntReturnsInt() { - JSONObject response = executeJdbcRequest("SELECT (balance + 5) AS balance_add_five FROM " + - TestsConstants.TEST_INDEX_ACCOUNT + " ORDER BY firstname LIMIT 2"); + JSONObject response = + executeJdbcRequest( + "SELECT (balance + 5) AS balance_add_five FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY firstname LIMIT 2"); verifySchema(response, schema("(balance + 5)", "balance_add_five", "long")); } @Test public void testSubtractLongWithLongReturnsLong() { - JSONObject response = executeJdbcRequest("SELECT (balance - balance) FROM " + - TestsConstants.TEST_INDEX_ACCOUNT + " ORDER BY firstname LIMIT 2"); + JSONObject response = + executeJdbcRequest( + "SELECT (balance - balance) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " ORDER BY firstname LIMIT 2"); verifySchema(response, schema("(balance - balance)", null, "long")); } @@ -136,17 +158,18 @@ public void testSubtractLongWithLongReturnsLong() { */ @Test public void testDayOfWeekWithKeywordReturnsText() { - JSONObject response = executeJdbcRequest("SELECT DAYOFWEEK(insert_time) FROM " - + TestsConstants.TEST_INDEX_ONLINE + " LIMIT 2"); + JSONObject response = + executeJdbcRequest( + "SELECT DAYOFWEEK(insert_time) FROM " + TestsConstants.TEST_INDEX_ONLINE + " LIMIT 2"); - verifySchema(response, - schema("DAYOFWEEK(insert_time)", null, "integer")); + verifySchema(response, schema("DAYOFWEEK(insert_time)", null, "integer")); } @Test public void testYearWithKeywordReturnsText() { - JSONObject response = executeJdbcRequest("SELECT YEAR(insert_time) FROM " - + TestsConstants.TEST_INDEX_ONLINE + " LIMIT 2"); + JSONObject response = + executeJdbcRequest( + "SELECT YEAR(insert_time) FROM " + TestsConstants.TEST_INDEX_ONLINE + " LIMIT 2"); verifySchema(response, schema("YEAR(insert_time)", null, "integer")); } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/ConvertTZFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/ConvertTZFunctionIT.java index 48cdb9684f..105669c7ca 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/ConvertTZFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/ConvertTZFunctionIT.java @@ -1,184 +1,190 @@ - /* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ package org.opensearch.sql.ppl; - import org.json.JSONObject; - import org.junit.Test; - import org.opensearch.sql.legacy.SQLIntegTestCase; - - import java.io.IOException; - - import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_DATE; - import static org.opensearch.sql.util.MatcherUtils.rows; - import static org.opensearch.sql.util.MatcherUtils.schema; - import static org.opensearch.sql.util.MatcherUtils.verifyDataRows; - import static org.opensearch.sql.util.MatcherUtils.verifySchema; - import static org.opensearch.sql.util.MatcherUtils.verifySome; - - public class ConvertTZFunctionIT extends PPLIntegTestCase { - - - @Override - public void init() throws IOException { - loadIndex(Index.DATE); - } - - - @Test - public void inRangeZeroToPositive() throws IOException { - JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2008-05-15 12:00:00','+00:00','+10:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows("2008-05-15 22:00:00")); - } - - @Test - public void inRangeZeroToZero() throws IOException { - JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 00:00:00','-00:00','+00:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows("2021-05-12 00:00:00")); - } - - @Test - public void inRangePositiveToPositive() throws IOException { - JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 00:00:00','+10:00','+11:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows("2021-05-12 01:00:00")); - } - - @Test - public void inRangeNegativeToPositive() throws IOException { - JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-08:00','+09:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows("2021-05-13 04:34:50")); - } - - @Test - public void inRangeNoTZChange() throws IOException { - JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 11:34:50','+09:00','+09:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows("2021-05-12 11:34:50")); - } - - @Test - public void inRangeTwentyFourHourChange() throws IOException { - JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-12:00','+12:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows("2021-05-13 11:34:50")); - } - - @Test - public void inRangeFifteenMinuteTZ() throws IOException { - JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 13:00:00','+09:30','+05:45') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows("2021-05-12 09:15:00")); - } - - @Test - public void nullFromFieldUnder() throws IOException { - JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-30 11:34:50','-17:00','+08:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); - } - - @Test - public void nullToFieldOver() throws IOException { - JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-12:00','+15:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); - } - - @Test - public void nullFromGarbageInput1() throws IOException { - JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-12:00','test') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); - } - - @Test - public void nullFromGarbageInput2() throws IOException { - JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021test','-12:00','+00:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); - } - - @Test - public void nullDateTimeInvalidDateValueFebruary() throws IOException { - JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-02-30 10:00:00','+00:00','+00:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); - } - - @Test - public void nullDateTimeInvalidDateValueApril() throws IOException { - JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-04-31 10:00:00','+00:00','+00:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); - } - - @Test - public void nullDateTimeInvalidDateValueMonth() throws IOException { - JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-13-03 10:00:00','+00:00','+00:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); - } +import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_DATE; +import static org.opensearch.sql.util.MatcherUtils.rows; +import static org.opensearch.sql.util.MatcherUtils.schema; +import static org.opensearch.sql.util.MatcherUtils.verifySchema; +import static org.opensearch.sql.util.MatcherUtils.verifySome; +import java.io.IOException; +import org.json.JSONObject; +import org.junit.Test; + +public class ConvertTZFunctionIT extends PPLIntegTestCase { + + @Override + public void init() throws IOException { + loadIndex(Index.DATE); + } + + @Test + public void inRangeZeroToPositive() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2008-05-15 12:00:00','+00:00','+10:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows("2008-05-15 22:00:00")); + } + + @Test + public void inRangeZeroToZero() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 00:00:00','-00:00','+00:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows("2021-05-12 00:00:00")); + } + + @Test + public void inRangePositiveToPositive() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 00:00:00','+10:00','+11:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows("2021-05-12 01:00:00")); + } + + @Test + public void inRangeNegativeToPositive() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-08:00','+09:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows("2021-05-13 04:34:50")); + } + + @Test + public void inRangeNoTZChange() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 11:34:50','+09:00','+09:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows("2021-05-12 11:34:50")); + } + + @Test + public void inRangeTwentyFourHourChange() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-12:00','+12:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows("2021-05-13 11:34:50")); + } + + @Test + public void inRangeFifteenMinuteTZ() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 13:00:00','+09:30','+05:45') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows("2021-05-12 09:15:00")); + } + + @Test + public void nullFromFieldUnder() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-30 11:34:50','-17:00','+08:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); + } + + @Test + public void nullToFieldOver() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-12:00','+15:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); + } + + @Test + public void nullFromGarbageInput1() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-12:00','test') | fields f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); + } + + @Test + public void nullFromGarbageInput2() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021test','-12:00','+00:00') | fields f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); + } + + @Test + public void nullDateTimeInvalidDateValueFebruary() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-02-30 10:00:00','+00:00','+00:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); + } + + @Test + public void nullDateTimeInvalidDateValueApril() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-04-31 10:00:00','+00:00','+00:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); + } + + @Test + public void nullDateTimeInvalidDateValueMonth() throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-13-03 10:00:00','+00:00','+00:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } +} diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/CrossClusterSearchIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/CrossClusterSearchIT.java index a8e686a893..19e3debdf0 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/CrossClusterSearchIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/CrossClusterSearchIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT; @@ -23,13 +22,13 @@ public class CrossClusterSearchIT extends PPLIntegTestCase { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); + @Rule public ExpectedException exceptionRule = ExpectedException.none(); - private final static String TEST_INDEX_BANK_REMOTE = REMOTE_CLUSTER + ":" + TEST_INDEX_BANK; - private final static String TEST_INDEX_DOG_REMOTE = REMOTE_CLUSTER + ":" + TEST_INDEX_DOG; - private final static String TEST_INDEX_DOG_MATCH_ALL_REMOTE = MATCH_ALL_REMOTE_CLUSTER + ":" + TEST_INDEX_DOG; - private final static String TEST_INDEX_ACCOUNT_REMOTE = REMOTE_CLUSTER + ":" + TEST_INDEX_ACCOUNT; + private static final String TEST_INDEX_BANK_REMOTE = REMOTE_CLUSTER + ":" + TEST_INDEX_BANK; + private static final String TEST_INDEX_DOG_REMOTE = REMOTE_CLUSTER + ":" + TEST_INDEX_DOG; + private static final String TEST_INDEX_DOG_MATCH_ALL_REMOTE = + MATCH_ALL_REMOTE_CLUSTER + ":" + TEST_INDEX_DOG; + private static final String TEST_INDEX_ACCOUNT_REMOTE = REMOTE_CLUSTER + ":" + TEST_INDEX_ACCOUNT; @Override public void init() throws IOException { @@ -49,7 +48,8 @@ public void testCrossClusterSearchAllFields() throws IOException { @Test public void testMatchAllCrossClusterSearchAllFields() throws IOException { - JSONObject result = executeQuery(String.format("search source=%s", TEST_INDEX_DOG_MATCH_ALL_REMOTE)); + JSONObject result = + executeQuery(String.format("search source=%s", TEST_INDEX_DOG_MATCH_ALL_REMOTE)); verifyColumn(result, columnName("dog_name"), columnName("holdersName"), columnName("age")); } @@ -64,18 +64,21 @@ public void testCrossClusterSearchWithoutLocalFieldMappingShouldFail() throws IO @Test public void testCrossClusterSearchCommandWithLogicalExpression() throws IOException { - JSONObject result = executeQuery(String.format( - "search source=%s firstname='Hattie' | fields firstname", TEST_INDEX_BANK_REMOTE)); + JSONObject result = + executeQuery( + String.format( + "search source=%s firstname='Hattie' | fields firstname", TEST_INDEX_BANK_REMOTE)); verifyDataRows(result, rows("Hattie")); } @Test public void testCrossClusterSearchMultiClusters() throws IOException { - JSONObject result = executeQuery(String.format( - "search source=%s,%s firstname='Hattie' | fields firstname", TEST_INDEX_BANK_REMOTE, TEST_INDEX_BANK)); - verifyDataRows(result, - rows("Hattie"), - rows("Hattie")); + JSONObject result = + executeQuery( + String.format( + "search source=%s,%s firstname='Hattie' | fields firstname", + TEST_INDEX_BANK_REMOTE, TEST_INDEX_BANK)); + verifyDataRows(result, rows("Hattie"), rows("Hattie")); } @Test @@ -106,8 +109,7 @@ public void testCrossClusterDescribeAllFields() throws IOException { columnName("SCOPE_TABLE"), columnName("SOURCE_DATA_TYPE"), columnName("IS_AUTOINCREMENT"), - columnName("IS_GENERATEDCOLUMN") - ); + columnName("IS_GENERATEDCOLUMN")); } @Test @@ -138,7 +140,6 @@ public void testMatchAllCrossClusterDescribeAllFields() throws IOException { columnName("SCOPE_TABLE"), columnName("SOURCE_DATA_TYPE"), columnName("IS_AUTOINCREMENT"), - columnName("IS_GENERATEDCOLUMN") - ); + columnName("IS_GENERATEDCOLUMN")); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/CsvFormatIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/CsvFormatIT.java index 430ae9a7b2..a9eb18c2a1 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/CsvFormatIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/CsvFormatIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK_CSV_SANITIZE; @@ -22,29 +21,40 @@ public void init() throws IOException { @Test public void sanitizeTest() throws IOException { - String result = executeCsvQuery( - String.format(Locale.ROOT, "source=%s | fields firstname, lastname", TEST_INDEX_BANK_CSV_SANITIZE)); - assertEquals(StringUtils.format( - "firstname,lastname%n" - + "'+Amber JOHnny,Duke Willmington+%n" - + "'-Hattie,Bond-%n" - + "'=Nanette,Bates=%n" - + "'@Dale,Adams@%n" - + "\",Elinor\",\"Ratliff,,,\"%n"), + String result = + executeCsvQuery( + String.format( + Locale.ROOT, + "source=%s | fields firstname, lastname", + TEST_INDEX_BANK_CSV_SANITIZE)); + assertEquals( + StringUtils.format( + "firstname,lastname%n" + + "'+Amber JOHnny,Duke Willmington+%n" + + "'-Hattie,Bond-%n" + + "'=Nanette,Bates=%n" + + "'@Dale,Adams@%n" + + "\",Elinor\",\"Ratliff,,,\"%n"), result); } @Test public void escapeSanitizeTest() throws IOException { - String result = executeCsvQuery( - String.format(Locale.ROOT, "source=%s | fields firstname, lastname", TEST_INDEX_BANK_CSV_SANITIZE), false); - assertEquals(StringUtils.format( - "firstname,lastname%n" - + "+Amber JOHnny,Duke Willmington+%n" - + "-Hattie,Bond-%n" - + "=Nanette,Bates=%n" - + "@Dale,Adams@%n" - + "\",Elinor\",\"Ratliff,,,\"%n"), + String result = + executeCsvQuery( + String.format( + Locale.ROOT, + "source=%s | fields firstname, lastname", + TEST_INDEX_BANK_CSV_SANITIZE), + false); + assertEquals( + StringUtils.format( + "firstname,lastname%n" + + "+Amber JOHnny,Duke Willmington+%n" + + "-Hattie,Bond-%n" + + "=Nanette,Bates=%n" + + "@Dale,Adams@%n" + + "\",Elinor\",\"Ratliff,,,\"%n"), result); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/DataTypeIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/DataTypeIT.java index 9911c35d8f..8b5a6d498e 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/DataTypeIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/DataTypeIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.SQLIntegTestCase.Index.DATA_TYPE_NONNUMERIC; @@ -27,9 +26,9 @@ public void init() throws IOException { @Test public void test_numeric_data_types() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s", TEST_INDEX_DATATYPE_NUMERIC)); - verifySchema(result, + JSONObject result = executeQuery(String.format("source=%s", TEST_INDEX_DATATYPE_NUMERIC)); + verifySchema( + result, schema("long_number", "long"), schema("integer_number", "integer"), schema("short_number", "short"), @@ -42,9 +41,9 @@ public void test_numeric_data_types() throws IOException { @Test public void test_nonnumeric_data_types() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s", TEST_INDEX_DATATYPE_NONNUMERIC)); - verifySchema(result, + JSONObject result = executeQuery(String.format("source=%s", TEST_INDEX_DATATYPE_NONNUMERIC)); + verifySchema( + result, schema("boolean_value", "boolean"), schema("keyword_value", "string"), schema("text_value", "string"), @@ -58,15 +57,18 @@ public void test_nonnumeric_data_types() throws IOException { @Test public void test_long_integer_data_type() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval " - + " int1 = 2147483647," - + " int2 = -2147483648," - + " long1 = 2147483648," - + " long2 = -2147483649 | " - + "fields int1, int2, long1, long2 ", - TEST_INDEX_DATATYPE_NUMERIC)); - verifySchema(result, + JSONObject result = + executeQuery( + String.format( + "source=%s | eval " + + " int1 = 2147483647," + + " int2 = -2147483648," + + " long1 = 2147483648," + + " long2 = -2147483649 | " + + "fields int1, int2, long1, long2 ", + TEST_INDEX_DATATYPE_NUMERIC)); + verifySchema( + result, schema("int1", "integer"), schema("int2", "integer"), schema("long1", "long"), diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeComparisonIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeComparisonIT.java index b795977e7d..6f6b5cc297 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeComparisonIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeComparisonIT.java @@ -13,14 +13,12 @@ import static org.opensearch.sql.util.MatcherUtils.verifyDataRows; import static org.opensearch.sql.util.MatcherUtils.verifySchema; +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import java.io.IOException; import java.time.LocalDate; import java.util.Arrays; import java.util.TimeZone; - -import com.carrotsearch.randomizedtesting.annotations.Name; -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.json.JSONObject; import org.junit.After; import org.junit.Before; import org.junit.jupiter.api.Test; @@ -49,9 +47,10 @@ public void resetTimeZone() { private String name; private Boolean expectedResult; - public DateTimeComparisonIT(@Name("functionCall") String functionCall, - @Name("name") String name, - @Name("expectedResult") Boolean expectedResult) { + public DateTimeComparisonIT( + @Name("functionCall") String functionCall, + @Name("name") String name, + @Name("expectedResult") Boolean expectedResult) { this.functionCall = functionCall; this.name = name; this.expectedResult = expectedResult; @@ -59,548 +58,707 @@ public DateTimeComparisonIT(@Name("functionCall") String functionCall, @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareTwoDates() { - return Arrays.asList($$( - $("DATE('2020-09-16') = DATE('2020-09-16')", "eq1", true), - $("DATE('2020-09-16') = DATE('1961-04-12')", "eq2", false), - $("DATE('2020-09-16') != DATE('1984-12-15')", "neq1", true), - $("DATE('1961-04-12') != DATE('1984-12-15')", "neq2", true), - $("DATE('1961-04-12') != DATE('1961-04-12')", "neq3", false), - $("DATE('1984-12-15') > DATE('1961-04-12')", "gt1", true), - $("DATE('1984-12-15') > DATE('2020-09-16')", "gt2", false), - $("DATE('1961-04-12') < DATE('1984-12-15')", "lt1", true), - $("DATE('1984-12-15') < DATE('1961-04-12')", "lt2", false), - $("DATE('1984-12-15') >= DATE('1961-04-12')", "gte1", true), - $("DATE('1984-12-15') >= DATE('1984-12-15')", "gte2", true), - $("DATE('1984-12-15') >= DATE('2020-09-16')", "gte3", false), - $("DATE('1961-04-12') <= DATE('1984-12-15')", "lte1", true), - $("DATE('1961-04-12') <= DATE('1961-04-12')", "lte2", true), - $("DATE('2020-09-16') <= DATE('1961-04-12')", "lte3", false) - )); + return Arrays.asList( + $$( + $("DATE('2020-09-16') = DATE('2020-09-16')", "eq1", true), + $("DATE('2020-09-16') = DATE('1961-04-12')", "eq2", false), + $("DATE('2020-09-16') != DATE('1984-12-15')", "neq1", true), + $("DATE('1961-04-12') != DATE('1984-12-15')", "neq2", true), + $("DATE('1961-04-12') != DATE('1961-04-12')", "neq3", false), + $("DATE('1984-12-15') > DATE('1961-04-12')", "gt1", true), + $("DATE('1984-12-15') > DATE('2020-09-16')", "gt2", false), + $("DATE('1961-04-12') < DATE('1984-12-15')", "lt1", true), + $("DATE('1984-12-15') < DATE('1961-04-12')", "lt2", false), + $("DATE('1984-12-15') >= DATE('1961-04-12')", "gte1", true), + $("DATE('1984-12-15') >= DATE('1984-12-15')", "gte2", true), + $("DATE('1984-12-15') >= DATE('2020-09-16')", "gte3", false), + $("DATE('1961-04-12') <= DATE('1984-12-15')", "lte1", true), + $("DATE('1961-04-12') <= DATE('1961-04-12')", "lte2", true), + $("DATE('2020-09-16') <= DATE('1961-04-12')", "lte3", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareTwoTimes() { - return Arrays.asList($$( - $("TIME('09:16:37') = TIME('09:16:37')", "eq1", true), - $("TIME('09:16:37') = TIME('04:12:42')", "eq2", false), - $("TIME('09:16:37') != TIME('12:15:22')", "neq1", true), - $("TIME('04:12:42') != TIME('12:15:22')", "neq2", true), - $("TIME('04:12:42') != TIME('04:12:42')", "neq3", false), - $("TIME('12:15:22') > TIME('04:12:42')", "gt1", true), - $("TIME('12:15:22') > TIME('19:16:03')", "gt2", false), - $("TIME('04:12:42') < TIME('12:15:22')", "lt1", true), - $("TIME('14:12:38') < TIME('12:15:22')", "lt2", false), - $("TIME('12:15:22') >= TIME('04:12:42')", "gte1", true), - $("TIME('12:15:22') >= TIME('12:15:22')", "gte2", true), - $("TIME('12:15:22') >= TIME('19:16:03')", "gte3", false), - $("TIME('04:12:42') <= TIME('12:15:22')", "lte1", true), - $("TIME('04:12:42') <= TIME('04:12:42')", "lte2", true), - $("TIME('19:16:03') <= TIME('04:12:42')", "lte3", false) - )); + return Arrays.asList( + $$( + $("TIME('09:16:37') = TIME('09:16:37')", "eq1", true), + $("TIME('09:16:37') = TIME('04:12:42')", "eq2", false), + $("TIME('09:16:37') != TIME('12:15:22')", "neq1", true), + $("TIME('04:12:42') != TIME('12:15:22')", "neq2", true), + $("TIME('04:12:42') != TIME('04:12:42')", "neq3", false), + $("TIME('12:15:22') > TIME('04:12:42')", "gt1", true), + $("TIME('12:15:22') > TIME('19:16:03')", "gt2", false), + $("TIME('04:12:42') < TIME('12:15:22')", "lt1", true), + $("TIME('14:12:38') < TIME('12:15:22')", "lt2", false), + $("TIME('12:15:22') >= TIME('04:12:42')", "gte1", true), + $("TIME('12:15:22') >= TIME('12:15:22')", "gte2", true), + $("TIME('12:15:22') >= TIME('19:16:03')", "gte3", false), + $("TIME('04:12:42') <= TIME('12:15:22')", "lte1", true), + $("TIME('04:12:42') <= TIME('04:12:42')", "lte2", true), + $("TIME('19:16:03') <= TIME('04:12:42')", "lte3", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareTwoDateTimes() { - return Arrays.asList($$( - $("DATETIME('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", "eq1", true), - $("DATETIME('2020-09-16 10:20:30') = DATETIME('1961-04-12 09:07:00')", "eq2", false), - $("DATETIME('2020-09-16 10:20:30') != DATETIME('1984-12-15 22:15:07')", "neq1", true), - $("DATETIME('1984-12-15 22:15:08') != DATETIME('1984-12-15 22:15:07')", "neq2", true), - $("DATETIME('1961-04-12 09:07:00') != DATETIME('1961-04-12 09:07:00')", "neq3", false), - $("DATETIME('1984-12-15 22:15:07') > DATETIME('1961-04-12 22:15:07')", "gt1", true), - $("DATETIME('1984-12-15 22:15:07') > DATETIME('1984-12-15 22:15:06')", "gt2", true), - $("DATETIME('1984-12-15 22:15:07') > DATETIME('2020-09-16 10:20:30')", "gt3", false), - $("DATETIME('1961-04-12 09:07:00') < DATETIME('1984-12-15 09:07:00')", "lt1", true), - $("DATETIME('1984-12-15 22:15:07') < DATETIME('1984-12-15 22:15:08')", "lt2", true), - $("DATETIME('1984-12-15 22:15:07') < DATETIME('1961-04-12 09:07:00')", "lt3", false), - $("DATETIME('1984-12-15 22:15:07') >= DATETIME('1961-04-12 09:07:00')", "gte1", true), - $("DATETIME('1984-12-15 22:15:07') >= DATETIME('1984-12-15 22:15:07')", "gte2", true), - $("DATETIME('1984-12-15 22:15:07') >= DATETIME('2020-09-16 10:20:30')", "gte3", false), - $("DATETIME('1961-04-12 09:07:00') <= DATETIME('1984-12-15 22:15:07')", "lte1", true), - $("DATETIME('1961-04-12 09:07:00') <= DATETIME('1961-04-12 09:07:00')", "lte2", true), - $("DATETIME('2020-09-16 10:20:30') <= DATETIME('1961-04-12 09:07:00')", "lte3", false) - )); + return Arrays.asList( + $$( + $("DATETIME('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", "eq1", true), + $("DATETIME('2020-09-16 10:20:30') = DATETIME('1961-04-12 09:07:00')", "eq2", false), + $("DATETIME('2020-09-16 10:20:30') != DATETIME('1984-12-15 22:15:07')", "neq1", true), + $("DATETIME('1984-12-15 22:15:08') != DATETIME('1984-12-15 22:15:07')", "neq2", true), + $("DATETIME('1961-04-12 09:07:00') != DATETIME('1961-04-12 09:07:00')", "neq3", false), + $("DATETIME('1984-12-15 22:15:07') > DATETIME('1961-04-12 22:15:07')", "gt1", true), + $("DATETIME('1984-12-15 22:15:07') > DATETIME('1984-12-15 22:15:06')", "gt2", true), + $("DATETIME('1984-12-15 22:15:07') > DATETIME('2020-09-16 10:20:30')", "gt3", false), + $("DATETIME('1961-04-12 09:07:00') < DATETIME('1984-12-15 09:07:00')", "lt1", true), + $("DATETIME('1984-12-15 22:15:07') < DATETIME('1984-12-15 22:15:08')", "lt2", true), + $("DATETIME('1984-12-15 22:15:07') < DATETIME('1961-04-12 09:07:00')", "lt3", false), + $("DATETIME('1984-12-15 22:15:07') >= DATETIME('1961-04-12 09:07:00')", "gte1", true), + $("DATETIME('1984-12-15 22:15:07') >= DATETIME('1984-12-15 22:15:07')", "gte2", true), + $("DATETIME('1984-12-15 22:15:07') >= DATETIME('2020-09-16 10:20:30')", "gte3", false), + $("DATETIME('1961-04-12 09:07:00') <= DATETIME('1984-12-15 22:15:07')", "lte1", true), + $("DATETIME('1961-04-12 09:07:00') <= DATETIME('1961-04-12 09:07:00')", "lte2", true), + $( + "DATETIME('2020-09-16 10:20:30') <= DATETIME('1961-04-12 09:07:00')", + "lte3", + false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareTwoTimestamps() { - return Arrays.asList($$( - $("TIMESTAMP('2020-09-16 10:20:30') = TIMESTAMP('2020-09-16 10:20:30')", "eq1", true), - $("TIMESTAMP('2020-09-16 10:20:30') = TIMESTAMP('1961-04-12 09:07:00')", "eq2", false), - $("TIMESTAMP('2020-09-16 10:20:30') != TIMESTAMP('1984-12-15 22:15:07')", "neq1", true), - $("TIMESTAMP('1984-12-15 22:15:08') != TIMESTAMP('1984-12-15 22:15:07')", "neq2", true), - $("TIMESTAMP('1961-04-12 09:07:00') != TIMESTAMP('1961-04-12 09:07:00')", "neq3", false), - $("TIMESTAMP('1984-12-15 22:15:07') > TIMESTAMP('1961-04-12 22:15:07')", "gt1", true), - $("TIMESTAMP('1984-12-15 22:15:07') > TIMESTAMP('1984-12-15 22:15:06')", "gt2", true), - $("TIMESTAMP('1984-12-15 22:15:07') > TIMESTAMP('2020-09-16 10:20:30')", "gt3", false), - $("TIMESTAMP('1961-04-12 09:07:00') < TIMESTAMP('1984-12-15 09:07:00')", "lt1", true), - $("TIMESTAMP('1984-12-15 22:15:07') < TIMESTAMP('1984-12-15 22:15:08')", "lt2", true), - $("TIMESTAMP('1984-12-15 22:15:07') < TIMESTAMP('1961-04-12 09:07:00')", "lt3", false), - $("TIMESTAMP('1984-12-15 22:15:07') >= TIMESTAMP('1961-04-12 09:07:00')", "gte1", true), - $("TIMESTAMP('1984-12-15 22:15:07') >= TIMESTAMP('1984-12-15 22:15:07')", "gte2", true), - $("TIMESTAMP('1984-12-15 22:15:07') >= TIMESTAMP('2020-09-16 10:20:30')", "gte3", false), - $("TIMESTAMP('1961-04-12 09:07:00') <= TIMESTAMP('1984-12-15 22:15:07')", "lte1", true), - $("TIMESTAMP('1961-04-12 09:07:00') <= TIMESTAMP('1961-04-12 09:07:00')", "lte2", true), - $("TIMESTAMP('2020-09-16 10:20:30') <= TIMESTAMP('1961-04-12 09:07:00')", "lte3", false) - )); + return Arrays.asList( + $$( + $("TIMESTAMP('2020-09-16 10:20:30') = TIMESTAMP('2020-09-16 10:20:30')", "eq1", true), + $("TIMESTAMP('2020-09-16 10:20:30') = TIMESTAMP('1961-04-12 09:07:00')", "eq2", false), + $("TIMESTAMP('2020-09-16 10:20:30') != TIMESTAMP('1984-12-15 22:15:07')", "neq1", true), + $("TIMESTAMP('1984-12-15 22:15:08') != TIMESTAMP('1984-12-15 22:15:07')", "neq2", true), + $( + "TIMESTAMP('1961-04-12 09:07:00') != TIMESTAMP('1961-04-12 09:07:00')", + "neq3", + false), + $("TIMESTAMP('1984-12-15 22:15:07') > TIMESTAMP('1961-04-12 22:15:07')", "gt1", true), + $("TIMESTAMP('1984-12-15 22:15:07') > TIMESTAMP('1984-12-15 22:15:06')", "gt2", true), + $("TIMESTAMP('1984-12-15 22:15:07') > TIMESTAMP('2020-09-16 10:20:30')", "gt3", false), + $("TIMESTAMP('1961-04-12 09:07:00') < TIMESTAMP('1984-12-15 09:07:00')", "lt1", true), + $("TIMESTAMP('1984-12-15 22:15:07') < TIMESTAMP('1984-12-15 22:15:08')", "lt2", true), + $("TIMESTAMP('1984-12-15 22:15:07') < TIMESTAMP('1961-04-12 09:07:00')", "lt3", false), + $("TIMESTAMP('1984-12-15 22:15:07') >= TIMESTAMP('1961-04-12 09:07:00')", "gte1", true), + $("TIMESTAMP('1984-12-15 22:15:07') >= TIMESTAMP('1984-12-15 22:15:07')", "gte2", true), + $( + "TIMESTAMP('1984-12-15 22:15:07') >= TIMESTAMP('2020-09-16 10:20:30')", + "gte3", + false), + $("TIMESTAMP('1961-04-12 09:07:00') <= TIMESTAMP('1984-12-15 22:15:07')", "lte1", true), + $("TIMESTAMP('1961-04-12 09:07:00') <= TIMESTAMP('1961-04-12 09:07:00')", "lte2", true), + $( + "TIMESTAMP('2020-09-16 10:20:30') <= TIMESTAMP('1961-04-12 09:07:00')", + "lte3", + false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareEqTimestampWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIMESTAMP('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') = TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') = DATETIME('1961-04-12 09:07:00')", "ts_dt_f", false), - $("DATETIME('1961-04-12 09:07:00') = TIMESTAMP('1984-12-15 22:15:07')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 00:00:00') = DATE('2020-09-16')", "ts_d_t", true), - $("DATE('2020-09-16') = TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') = DATE('1961-04-12')", "ts_d_f", false), - $("DATE('1961-04-12') = TIMESTAMP('1984-12-15 22:15:07')", "d_ts_f", false), - $("TIMESTAMP('" + today + " 10:20:30') = TIME('10:20:30')", "ts_t_t", true), - $("TIME('10:20:30') = TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') = TIME('09:07:00')", "ts_t_f", false), - $("TIME('09:07:00') = TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false) - )); + return Arrays.asList( + $$( + $( + "TIMESTAMP('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') = TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') = DATETIME('1961-04-12 09:07:00')", + "ts_dt_f", + false), + $( + "DATETIME('1961-04-12 09:07:00') = TIMESTAMP('1984-12-15 22:15:07')", + "dt_ts_f", + false), + $("TIMESTAMP('2020-09-16 00:00:00') = DATE('2020-09-16')", "ts_d_t", true), + $("DATE('2020-09-16') = TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), + $("TIMESTAMP('2020-09-16 10:20:30') = DATE('1961-04-12')", "ts_d_f", false), + $("DATE('1961-04-12') = TIMESTAMP('1984-12-15 22:15:07')", "d_ts_f", false), + $("TIMESTAMP('" + today + " 10:20:30') = TIME('10:20:30')", "ts_t_t", true), + $("TIME('10:20:30') = TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), + $("TIMESTAMP('2020-09-16 10:20:30') = TIME('09:07:00')", "ts_t_f", false), + $("TIME('09:07:00') = TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareEqDateTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATETIME('2020-09-16 10:20:30') = TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') = TIMESTAMP('1961-04-12 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('1961-04-12 09:07:00') = DATETIME('1984-12-15 22:15:07')", "ts_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') = DATE('2020-09-16')", "dt_d_t", true), - $("DATE('2020-09-16') = DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') = DATE('1961-04-12')", "dt_d_f", false), - $("DATE('1961-04-12') = DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('" + today + " 10:20:30') = TIME('10:20:30')", "dt_t_t", true), - $("TIME('10:20:30') = DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') = TIME('09:07:00')", "dt_t_f", false), - $("TIME('09:07:00') = DATETIME('1984-12-15 22:15:07')", "t_dt_f", false) - )); + return Arrays.asList( + $$( + $( + "DATETIME('2020-09-16 10:20:30') = TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') = TIMESTAMP('1961-04-12 09:07:00')", + "dt_ts_f", + false), + $( + "TIMESTAMP('1961-04-12 09:07:00') = DATETIME('1984-12-15 22:15:07')", + "ts_dt_f", + false), + $("DATETIME('2020-09-16 00:00:00') = DATE('2020-09-16')", "dt_d_t", true), + $("DATE('2020-09-16') = DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), + $("DATETIME('2020-09-16 10:20:30') = DATE('1961-04-12')", "dt_d_f", false), + $("DATE('1961-04-12') = DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), + $("DATETIME('" + today + " 10:20:30') = TIME('10:20:30')", "dt_t_t", true), + $("TIME('10:20:30') = DATETIME('" + today + " 10:20:30')", "t_dt_t", true), + $("DATETIME('2020-09-16 10:20:30') = TIME('09:07:00')", "dt_t_f", false), + $("TIME('09:07:00') = DATETIME('1984-12-15 22:15:07')", "t_dt_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareEqDateWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATE('2020-09-16') = TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), - $("TIMESTAMP('2020-09-16 00:00:00') = DATE('2020-09-16')", "ts_d_t", true), - $("DATE('2020-09-16') = TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), - $("TIMESTAMP('1984-12-15 09:07:00') = DATE('1984-12-15')", "ts_d_f", false), - $("DATE('2020-09-16') = DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') = DATE('2020-09-16')", "dt_d_t", true), - $("DATE('1961-04-12') = DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('1961-04-12 10:20:30') = DATE('1961-04-12')", "dt_d_f", false), - $("DATE('" + today + "') = TIME('00:00:00')", "d_t_t", true), - $("TIME('00:00:00') = DATE('" + today + "')", "t_d_t", true), - $("DATE('2020-09-16') = TIME('09:07:00')", "d_t_f", false), - $("TIME('09:07:00') = DATE('" + today + "')", "t_d_f", false) - )); + return Arrays.asList( + $$( + $("DATE('2020-09-16') = TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), + $("TIMESTAMP('2020-09-16 00:00:00') = DATE('2020-09-16')", "ts_d_t", true), + $("DATE('2020-09-16') = TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), + $("TIMESTAMP('1984-12-15 09:07:00') = DATE('1984-12-15')", "ts_d_f", false), + $("DATE('2020-09-16') = DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), + $("DATETIME('2020-09-16 00:00:00') = DATE('2020-09-16')", "dt_d_t", true), + $("DATE('1961-04-12') = DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), + $("DATETIME('1961-04-12 10:20:30') = DATE('1961-04-12')", "dt_d_f", false), + $("DATE('" + today + "') = TIME('00:00:00')", "d_t_t", true), + $("TIME('00:00:00') = DATE('" + today + "')", "t_d_t", true), + $("DATE('2020-09-16') = TIME('09:07:00')", "d_t_f", false), + $("TIME('09:07:00') = DATE('" + today + "')", "t_d_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareEqTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIME('10:20:30') = DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('" + today + " 10:20:30') = TIME('10:20:30')", "dt_t_t", true), - $("TIME('09:07:00') = DATETIME('1961-04-12 09:07:00')", "t_dt_f", false), - $("DATETIME('" + today + " 09:07:00') = TIME('10:20:30')", "dt_t_f", false), - $("TIME('10:20:30') = TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), - $("TIMESTAMP('" + today + " 10:20:30') = TIME('10:20:30')", "ts_t_t", true), - $("TIME('22:15:07') = TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false), - $("TIMESTAMP('1984-12-15 10:20:30') = TIME('10:20:30')", "ts_t_f", false), - $("TIME('00:00:00') = DATE('" + today + "')", "t_d_t", true), - $("DATE('" + today + "') = TIME('00:00:00')", "d_t_t", true), - $("TIME('09:07:00') = DATE('" + today + "')", "t_d_f", false), - $("DATE('2020-09-16') = TIME('09:07:00')", "d_t_f", false) - )); + return Arrays.asList( + $$( + $("TIME('10:20:30') = DATETIME('" + today + " 10:20:30')", "t_dt_t", true), + $("DATETIME('" + today + " 10:20:30') = TIME('10:20:30')", "dt_t_t", true), + $("TIME('09:07:00') = DATETIME('1961-04-12 09:07:00')", "t_dt_f", false), + $("DATETIME('" + today + " 09:07:00') = TIME('10:20:30')", "dt_t_f", false), + $("TIME('10:20:30') = TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), + $("TIMESTAMP('" + today + " 10:20:30') = TIME('10:20:30')", "ts_t_t", true), + $("TIME('22:15:07') = TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false), + $("TIMESTAMP('1984-12-15 10:20:30') = TIME('10:20:30')", "ts_t_f", false), + $("TIME('00:00:00') = DATE('" + today + "')", "t_d_t", true), + $("DATE('" + today + "') = TIME('00:00:00')", "d_t_t", true), + $("TIME('09:07:00') = DATE('" + today + "')", "t_d_f", false), + $("DATE('2020-09-16') = TIME('09:07:00')", "d_t_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareNeqTimestampWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIMESTAMP('2020-09-16 10:20:30') != DATETIME('1961-04-12 09:07:00')", "ts_dt_t", true), - $("DATETIME('1961-04-12 09:07:00') != TIMESTAMP('1984-12-15 22:15:07')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') != DATETIME('2020-09-16 10:20:30')", "ts_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') != TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') != DATE('1961-04-12')", "ts_d_t", true), - $("DATE('1961-04-12') != TIMESTAMP('1984-12-15 22:15:07')", "d_ts_t", true), - $("TIMESTAMP('2020-09-16 00:00:00') != DATE('2020-09-16')", "ts_d_f", false), - $("DATE('2020-09-16') != TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') != TIME('09:07:00')", "ts_t_t", true), - $("TIME('09:07:00') != TIMESTAMP('1984-12-15 22:15:07')", "t_ts_t", true), - $("TIMESTAMP('" + today + " 10:20:30') != TIME('10:20:30')", "ts_t_f", false), - $("TIME('10:20:30') != TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false) - )); + return Arrays.asList( + $$( + $( + "TIMESTAMP('2020-09-16 10:20:30') != DATETIME('1961-04-12 09:07:00')", + "ts_dt_t", + true), + $( + "DATETIME('1961-04-12 09:07:00') != TIMESTAMP('1984-12-15 22:15:07')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') != DATETIME('2020-09-16 10:20:30')", + "ts_dt_f", + false), + $( + "DATETIME('2020-09-16 10:20:30') != TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_f", + false), + $("TIMESTAMP('2020-09-16 10:20:30') != DATE('1961-04-12')", "ts_d_t", true), + $("DATE('1961-04-12') != TIMESTAMP('1984-12-15 22:15:07')", "d_ts_t", true), + $("TIMESTAMP('2020-09-16 00:00:00') != DATE('2020-09-16')", "ts_d_f", false), + $("DATE('2020-09-16') != TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), + $("TIMESTAMP('2020-09-16 10:20:30') != TIME('09:07:00')", "ts_t_t", true), + $("TIME('09:07:00') != TIMESTAMP('1984-12-15 22:15:07')", "t_ts_t", true), + $("TIMESTAMP('" + today + " 10:20:30') != TIME('10:20:30')", "ts_t_f", false), + $("TIME('10:20:30') != TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareNeqDateTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATETIME('2020-09-16 10:20:30') != TIMESTAMP('1961-04-12 09:07:00')", "dt_ts_t", true), - $("TIMESTAMP('1961-04-12 09:07:00') != DATETIME('1984-12-15 22:15:07')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') != TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') != DATETIME('2020-09-16 10:20:30')", "ts_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') != DATE('1961-04-12')", "dt_d_t", true), - $("DATE('1961-04-12') != DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') != DATE('2020-09-16')", "dt_d_f", false), - $("DATE('2020-09-16') != DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') != TIME('09:07:00')", "dt_t_t", true), - $("TIME('09:07:00') != DATETIME('1984-12-15 22:15:07')", "t_dt_t", true), - $("DATETIME('" + today + " 10:20:30') != TIME('10:20:30')", "dt_t_f", false), - $("TIME('10:20:30') != DATETIME('" + today + " 10:20:30')", "t_dt_f", false) - )); + return Arrays.asList( + $$( + $( + "DATETIME('2020-09-16 10:20:30') != TIMESTAMP('1961-04-12 09:07:00')", + "dt_ts_t", + true), + $( + "TIMESTAMP('1961-04-12 09:07:00') != DATETIME('1984-12-15 22:15:07')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') != TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_f", + false), + $( + "TIMESTAMP('2020-09-16 10:20:30') != DATETIME('2020-09-16 10:20:30')", + "ts_dt_f", + false), + $("DATETIME('2020-09-16 10:20:30') != DATE('1961-04-12')", "dt_d_t", true), + $("DATE('1961-04-12') != DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), + $("DATETIME('2020-09-16 00:00:00') != DATE('2020-09-16')", "dt_d_f", false), + $("DATE('2020-09-16') != DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), + $("DATETIME('2020-09-16 10:20:30') != TIME('09:07:00')", "dt_t_t", true), + $("TIME('09:07:00') != DATETIME('1984-12-15 22:15:07')", "t_dt_t", true), + $("DATETIME('" + today + " 10:20:30') != TIME('10:20:30')", "dt_t_f", false), + $("TIME('10:20:30') != DATETIME('" + today + " 10:20:30')", "t_dt_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareNeqDateWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATE('2020-09-16') != TIMESTAMP('1961-04-12 09:07:00')", "d_ts_t", true), - $("TIMESTAMP('1984-12-15 09:07:00') != DATE('1984-12-15')", "ts_d_t", true), - $("DATE('2020-09-16') != TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), - $("TIMESTAMP('2020-09-16 00:00:00') != DATE('2020-09-16')", "ts_d_f", false), - $("DATE('1961-04-12') != DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('1961-04-12 10:20:30') != DATE('1961-04-12')", "dt_d_t", true), - $("DATE('2020-09-16') != DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') != DATE('2020-09-16')", "dt_d_f", false), - $("DATE('2020-09-16') != TIME('09:07:00')", "d_t_t", true), - $("TIME('09:07:00') != DATE('" + today + "')", "t_d_t", true), - $("DATE('" + today + "') != TIME('00:00:00')", "d_t_f", false), - $("TIME('00:00:00') != DATE('" + today + "')", "t_d_f", false) - )); + return Arrays.asList( + $$( + $("DATE('2020-09-16') != TIMESTAMP('1961-04-12 09:07:00')", "d_ts_t", true), + $("TIMESTAMP('1984-12-15 09:07:00') != DATE('1984-12-15')", "ts_d_t", true), + $("DATE('2020-09-16') != TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), + $("TIMESTAMP('2020-09-16 00:00:00') != DATE('2020-09-16')", "ts_d_f", false), + $("DATE('1961-04-12') != DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), + $("DATETIME('1961-04-12 10:20:30') != DATE('1961-04-12')", "dt_d_t", true), + $("DATE('2020-09-16') != DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), + $("DATETIME('2020-09-16 00:00:00') != DATE('2020-09-16')", "dt_d_f", false), + $("DATE('2020-09-16') != TIME('09:07:00')", "d_t_t", true), + $("TIME('09:07:00') != DATE('" + today + "')", "t_d_t", true), + $("DATE('" + today + "') != TIME('00:00:00')", "d_t_f", false), + $("TIME('00:00:00') != DATE('" + today + "')", "t_d_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareNeqTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIME('09:07:00') != DATETIME('1961-04-12 09:07:00')", "t_dt_t", true), - $("DATETIME('" + today + " 09:07:00') != TIME('10:20:30')", "dt_t_t", true), - $("TIME('10:20:30') != DATETIME('" + today + " 10:20:30')", "t_dt_f", false), - $("DATETIME('" + today + " 10:20:30') != TIME('10:20:30')", "dt_t_f", false), - $("TIME('22:15:07') != TIMESTAMP('1984-12-15 22:15:07')", "t_ts_t", true), - $("TIMESTAMP('1984-12-15 10:20:30') != TIME('10:20:30')", "ts_t_t", true), - $("TIME('10:20:30') != TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), - $("TIMESTAMP('" + today + " 10:20:30') != TIME('10:20:30')", "ts_t_f", false), - $("TIME('09:07:00') != DATE('" + today + "')", "t_d_t", true), - $("DATE('2020-09-16') != TIME('09:07:00')", "d_t_t", true), - $("TIME('00:00:00') != DATE('" + today + "')", "t_d_f", false), - $("DATE('" + today + "') != TIME('00:00:00')", "d_t_f", false) - )); + return Arrays.asList( + $$( + $("TIME('09:07:00') != DATETIME('1961-04-12 09:07:00')", "t_dt_t", true), + $("DATETIME('" + today + " 09:07:00') != TIME('10:20:30')", "dt_t_t", true), + $("TIME('10:20:30') != DATETIME('" + today + " 10:20:30')", "t_dt_f", false), + $("DATETIME('" + today + " 10:20:30') != TIME('10:20:30')", "dt_t_f", false), + $("TIME('22:15:07') != TIMESTAMP('1984-12-15 22:15:07')", "t_ts_t", true), + $("TIMESTAMP('1984-12-15 10:20:30') != TIME('10:20:30')", "ts_t_t", true), + $("TIME('10:20:30') != TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), + $("TIMESTAMP('" + today + " 10:20:30') != TIME('10:20:30')", "ts_t_f", false), + $("TIME('09:07:00') != DATE('" + today + "')", "t_d_t", true), + $("DATE('2020-09-16') != TIME('09:07:00')", "d_t_t", true), + $("TIME('00:00:00') != DATE('" + today + "')", "t_d_f", false), + $("DATE('" + today + "') != TIME('00:00:00')", "d_t_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLtTimestampWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIMESTAMP('2020-09-16 10:20:30') < DATETIME('2061-04-12 09:07:00')", "ts_dt_t", true), - $("DATETIME('1961-04-12 09:07:00') < TIMESTAMP('1984-12-15 22:15:07')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') < DATETIME('2020-09-16 10:20:30')", "ts_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') < TIMESTAMP('1961-04-12 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') < DATE('2077-04-12')", "ts_d_t", true), - $("DATE('1961-04-12') < TIMESTAMP('1984-12-15 22:15:07')", "d_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') < DATE('1961-04-12')", "ts_d_f", false), - $("DATE('2020-09-16') < TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') < TIME('09:07:00')", "ts_t_t", true), - $("TIME('09:07:00') < TIMESTAMP('3077-12-15 22:15:07')", "t_ts_t", true), - $("TIMESTAMP('" + today + " 10:20:30') < TIME('10:20:30')", "ts_t_f", false), - $("TIME('20:50:40') < TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false) - )); + return Arrays.asList( + $$( + $( + "TIMESTAMP('2020-09-16 10:20:30') < DATETIME('2061-04-12 09:07:00')", + "ts_dt_t", + true), + $( + "DATETIME('1961-04-12 09:07:00') < TIMESTAMP('1984-12-15 22:15:07')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') < DATETIME('2020-09-16 10:20:30')", + "ts_dt_f", + false), + $( + "DATETIME('2020-09-16 10:20:30') < TIMESTAMP('1961-04-12 09:07:00')", + "dt_ts_f", + false), + $("TIMESTAMP('2020-09-16 10:20:30') < DATE('2077-04-12')", "ts_d_t", true), + $("DATE('1961-04-12') < TIMESTAMP('1984-12-15 22:15:07')", "d_ts_t", true), + $("TIMESTAMP('2020-09-16 10:20:30') < DATE('1961-04-12')", "ts_d_f", false), + $("DATE('2020-09-16') < TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), + $("TIMESTAMP('2020-09-16 10:20:30') < TIME('09:07:00')", "ts_t_t", true), + $("TIME('09:07:00') < TIMESTAMP('3077-12-15 22:15:07')", "t_ts_t", true), + $("TIMESTAMP('" + today + " 10:20:30') < TIME('10:20:30')", "ts_t_f", false), + $("TIME('20:50:40') < TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLtDateTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATETIME('2020-09-16 10:20:30') < TIMESTAMP('2077-04-12 09:07:00')", "dt_ts_t", true), - $("TIMESTAMP('1961-04-12 09:07:00') < DATETIME('1984-12-15 22:15:07')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') < TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') < DATETIME('1984-12-15 22:15:07')", "ts_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') < DATE('3077-04-12')", "dt_d_t", true), - $("DATE('1961-04-12') < DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') < DATE('2020-09-16')", "dt_d_f", false), - $("DATE('2020-09-16') < DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') < TIME('09:07:00')", "dt_t_t", true), - $("TIME('09:07:00') < DATETIME('3077-12-15 22:15:07')", "t_dt_t", true), - $("DATETIME('" + today + " 10:20:30') < TIME('10:20:30')", "dt_t_f", false), - $("TIME('20:40:50') < DATETIME('" + today + " 10:20:30')", "t_dt_f", false) - )); + return Arrays.asList( + $$( + $( + "DATETIME('2020-09-16 10:20:30') < TIMESTAMP('2077-04-12 09:07:00')", + "dt_ts_t", + true), + $( + "TIMESTAMP('1961-04-12 09:07:00') < DATETIME('1984-12-15 22:15:07')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') < TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_f", + false), + $( + "TIMESTAMP('2020-09-16 10:20:30') < DATETIME('1984-12-15 22:15:07')", + "ts_dt_f", + false), + $("DATETIME('2020-09-16 10:20:30') < DATE('3077-04-12')", "dt_d_t", true), + $("DATE('1961-04-12') < DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), + $("DATETIME('2020-09-16 00:00:00') < DATE('2020-09-16')", "dt_d_f", false), + $("DATE('2020-09-16') < DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), + $("DATETIME('2020-09-16 10:20:30') < TIME('09:07:00')", "dt_t_t", true), + $("TIME('09:07:00') < DATETIME('3077-12-15 22:15:07')", "t_dt_t", true), + $("DATETIME('" + today + " 10:20:30') < TIME('10:20:30')", "dt_t_f", false), + $("TIME('20:40:50') < DATETIME('" + today + " 10:20:30')", "t_dt_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLtDateWithOtherTypes() { - return Arrays.asList($$( - $("DATE('2020-09-16') < TIMESTAMP('3077-04-12 09:07:00')", "d_ts_t", true), - $("TIMESTAMP('1961-04-12 09:07:00') < DATE('1984-12-15')", "ts_d_t", true), - $("DATE('2020-09-16') < TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), - $("TIMESTAMP('2077-04-12 09:07:00') < DATE('2020-09-16')", "ts_d_f", false), - $("DATE('1961-04-12') < DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('1961-04-12 10:20:30') < DATE('1984-11-15')", "dt_d_t", true), - $("DATE('2020-09-16') < DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') < DATE('1984-03-22')", "dt_d_f", false), - $("DATE('2020-09-16') < TIME('09:07:00')", "d_t_t", true), - $("TIME('09:07:00') < DATE('3077-04-12')", "t_d_t", true), - $("DATE('3077-04-12') < TIME('00:00:00')", "d_t_f", false), - $("TIME('00:00:00') < DATE('2020-09-16')", "t_d_f", false) - )); + return Arrays.asList( + $$( + $("DATE('2020-09-16') < TIMESTAMP('3077-04-12 09:07:00')", "d_ts_t", true), + $("TIMESTAMP('1961-04-12 09:07:00') < DATE('1984-12-15')", "ts_d_t", true), + $("DATE('2020-09-16') < TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), + $("TIMESTAMP('2077-04-12 09:07:00') < DATE('2020-09-16')", "ts_d_f", false), + $("DATE('1961-04-12') < DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), + $("DATETIME('1961-04-12 10:20:30') < DATE('1984-11-15')", "dt_d_t", true), + $("DATE('2020-09-16') < DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), + $("DATETIME('2020-09-16 00:00:00') < DATE('1984-03-22')", "dt_d_f", false), + $("DATE('2020-09-16') < TIME('09:07:00')", "d_t_t", true), + $("TIME('09:07:00') < DATE('3077-04-12')", "t_d_t", true), + $("DATE('3077-04-12') < TIME('00:00:00')", "d_t_f", false), + $("TIME('00:00:00') < DATE('2020-09-16')", "t_d_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLtTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIME('09:07:00') < DATETIME('3077-04-12 09:07:00')", "t_dt_t", true), - $("DATETIME('" + today + " 09:07:00') < TIME('10:20:30')", "dt_t_t", true), - $("TIME('10:20:30') < DATETIME('" + today + " 10:20:30')", "t_dt_f", false), - $("DATETIME('" + today + " 20:40:50') < TIME('10:20:30')", "dt_t_f", false), - $("TIME('22:15:07') < TIMESTAMP('3077-12-15 22:15:07')", "t_ts_t", true), - $("TIMESTAMP('1984-12-15 10:20:30') < TIME('10:20:30')", "ts_t_t", true), - $("TIME('10:20:30') < TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), - $("TIMESTAMP('" + today + " 20:50:42') < TIME('10:20:30')", "ts_t_f", false), - $("TIME('09:07:00') < DATE('3077-04-12')", "t_d_t", true), - $("DATE('2020-09-16') < TIME('09:07:00')", "d_t_t", true), - $("TIME('00:00:00') < DATE('1961-04-12')", "t_d_f", false), - $("DATE('3077-04-12') < TIME('10:20:30')", "d_t_f", false) - )); + return Arrays.asList( + $$( + $("TIME('09:07:00') < DATETIME('3077-04-12 09:07:00')", "t_dt_t", true), + $("DATETIME('" + today + " 09:07:00') < TIME('10:20:30')", "dt_t_t", true), + $("TIME('10:20:30') < DATETIME('" + today + " 10:20:30')", "t_dt_f", false), + $("DATETIME('" + today + " 20:40:50') < TIME('10:20:30')", "dt_t_f", false), + $("TIME('22:15:07') < TIMESTAMP('3077-12-15 22:15:07')", "t_ts_t", true), + $("TIMESTAMP('1984-12-15 10:20:30') < TIME('10:20:30')", "ts_t_t", true), + $("TIME('10:20:30') < TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), + $("TIMESTAMP('" + today + " 20:50:42') < TIME('10:20:30')", "ts_t_f", false), + $("TIME('09:07:00') < DATE('3077-04-12')", "t_d_t", true), + $("DATE('2020-09-16') < TIME('09:07:00')", "d_t_t", true), + $("TIME('00:00:00') < DATE('1961-04-12')", "t_d_f", false), + $("DATE('3077-04-12') < TIME('10:20:30')", "d_t_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGtTimestampWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIMESTAMP('2020-09-16 10:20:30') > DATETIME('2020-09-16 10:20:25')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') > TIMESTAMP('1961-04-12 09:07:00')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') > DATETIME('2061-04-12 09:07:00')", "ts_dt_f", false), - $("DATETIME('1961-04-12 09:07:00') > TIMESTAMP('1984-12-15 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') > DATE('1961-04-12')", "ts_d_t", true), - $("DATE('2020-09-16') > TIMESTAMP('2020-09-15 22:15:07')", "d_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') > DATE('2077-04-12')", "ts_d_f", false), - $("DATE('1961-04-12') > TIMESTAMP('1961-04-12 00:00:00')", "d_ts_f", false), - $("TIMESTAMP('3077-07-08 20:20:30') > TIME('10:20:30')", "ts_t_t", true), - $("TIME('20:50:40') > TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), - $("TIMESTAMP('" + today + " 10:20:30') > TIME('10:20:30')", "ts_t_f", false), - $("TIME('09:07:00') > TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false) - )); + return Arrays.asList( + $$( + $( + "TIMESTAMP('2020-09-16 10:20:30') > DATETIME('2020-09-16 10:20:25')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') > TIMESTAMP('1961-04-12 09:07:00')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') > DATETIME('2061-04-12 09:07:00')", + "ts_dt_f", + false), + $( + "DATETIME('1961-04-12 09:07:00') > TIMESTAMP('1984-12-15 09:07:00')", + "dt_ts_f", + false), + $("TIMESTAMP('2020-09-16 10:20:30') > DATE('1961-04-12')", "ts_d_t", true), + $("DATE('2020-09-16') > TIMESTAMP('2020-09-15 22:15:07')", "d_ts_t", true), + $("TIMESTAMP('2020-09-16 10:20:30') > DATE('2077-04-12')", "ts_d_f", false), + $("DATE('1961-04-12') > TIMESTAMP('1961-04-12 00:00:00')", "d_ts_f", false), + $("TIMESTAMP('3077-07-08 20:20:30') > TIME('10:20:30')", "ts_t_t", true), + $("TIME('20:50:40') > TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), + $("TIMESTAMP('" + today + " 10:20:30') > TIME('10:20:30')", "ts_t_f", false), + $("TIME('09:07:00') > TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGtDateTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATETIME('2020-09-16 10:20:31') > TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') > DATETIME('1984-12-15 22:15:07')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') > TIMESTAMP('2077-04-12 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('1961-04-12 09:07:00') > DATETIME('1961-04-12 09:07:00')", "ts_dt_f", false), - $("DATETIME('3077-04-12 10:20:30') > DATE('2020-09-16')", "dt_d_t", true), - $("DATE('2020-09-16') > DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') > DATE('2020-09-16')", "dt_d_f", false), - $("DATE('1961-04-12') > DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('3077-04-12 10:20:30') > TIME('09:07:00')", "dt_t_t", true), - $("TIME('20:40:50') > DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('" + today + " 10:20:30') > TIME('10:20:30')", "dt_t_f", false), - $("TIME('09:07:00') > DATETIME('3077-12-15 22:15:07')", "t_dt_f", false) - )); + return Arrays.asList( + $$( + $( + "DATETIME('2020-09-16 10:20:31') > TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') > DATETIME('1984-12-15 22:15:07')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') > TIMESTAMP('2077-04-12 09:07:00')", + "dt_ts_f", + false), + $( + "TIMESTAMP('1961-04-12 09:07:00') > DATETIME('1961-04-12 09:07:00')", + "ts_dt_f", + false), + $("DATETIME('3077-04-12 10:20:30') > DATE('2020-09-16')", "dt_d_t", true), + $("DATE('2020-09-16') > DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), + $("DATETIME('2020-09-16 00:00:00') > DATE('2020-09-16')", "dt_d_f", false), + $("DATE('1961-04-12') > DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), + $("DATETIME('3077-04-12 10:20:30') > TIME('09:07:00')", "dt_t_t", true), + $("TIME('20:40:50') > DATETIME('" + today + " 10:20:30')", "t_dt_t", true), + $("DATETIME('" + today + " 10:20:30') > TIME('10:20:30')", "dt_t_f", false), + $("TIME('09:07:00') > DATETIME('3077-12-15 22:15:07')", "t_dt_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGtDateWithOtherTypes() { - return Arrays.asList($$( - $("DATE('2020-09-16') > TIMESTAMP('1961-04-12 09:07:00')", "d_ts_t", true), - $("TIMESTAMP('2077-04-12 09:07:00') > DATE('2020-09-16')", "ts_d_t", true), - $("DATE('2020-09-16') > TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), - $("TIMESTAMP('1961-04-12 09:07:00') > DATE('1984-12-15')", "ts_d_f", false), - $("DATE('1984-12-15') > DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') > DATE('1984-03-22')", "dt_d_t", true), - $("DATE('2020-09-16') > DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), - $("DATETIME('1961-04-12 10:20:30') > DATE('1984-11-15')", "dt_d_f", false), - $("DATE('3077-04-12') > TIME('00:00:00')", "d_t_t", true), - $("TIME('00:00:00') > DATE('2020-09-16')", "t_d_t", true), - $("DATE('2020-09-16') > TIME('09:07:00')", "d_t_f", false), - $("TIME('09:07:00') > DATE('3077-04-12')", "t_d_f", false) - )); + return Arrays.asList( + $$( + $("DATE('2020-09-16') > TIMESTAMP('1961-04-12 09:07:00')", "d_ts_t", true), + $("TIMESTAMP('2077-04-12 09:07:00') > DATE('2020-09-16')", "ts_d_t", true), + $("DATE('2020-09-16') > TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), + $("TIMESTAMP('1961-04-12 09:07:00') > DATE('1984-12-15')", "ts_d_f", false), + $("DATE('1984-12-15') > DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), + $("DATETIME('2020-09-16 00:00:00') > DATE('1984-03-22')", "dt_d_t", true), + $("DATE('2020-09-16') > DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), + $("DATETIME('1961-04-12 10:20:30') > DATE('1984-11-15')", "dt_d_f", false), + $("DATE('3077-04-12') > TIME('00:00:00')", "d_t_t", true), + $("TIME('00:00:00') > DATE('2020-09-16')", "t_d_t", true), + $("DATE('2020-09-16') > TIME('09:07:00')", "d_t_f", false), + $("TIME('09:07:00') > DATE('3077-04-12')", "t_d_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGtTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIME('09:07:00') > DATETIME('1961-04-12 09:07:00')", "t_dt_t", true), - $("DATETIME('" + today + " 20:40:50') > TIME('10:20:30')", "dt_t_t", true), - $("TIME('10:20:30') > DATETIME('" + today + " 10:20:30')", "t_dt_f", false), - $("DATETIME('" + today + " 09:07:00') > TIME('10:20:30')", "dt_t_f", false), - $("TIME('22:15:07') > TIMESTAMP('1984-12-15 22:15:07')", "t_ts_t", true), - $("TIMESTAMP('" + today + " 20:50:42') > TIME('10:20:30')", "ts_t_t", true), - $("TIME('10:20:30') > TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), - $("TIMESTAMP('1984-12-15 10:20:30') > TIME('10:20:30')", "ts_t_f", false), - $("TIME('00:00:00') > DATE('1961-04-12')", "t_d_t", true), - $("DATE('3077-04-12') > TIME('10:20:30')", "d_t_t", true), - $("TIME('09:07:00') > DATE('3077-04-12')", "t_d_f", false), - $("DATE('2020-09-16') > TIME('09:07:00')", "d_t_f", false) - )); + return Arrays.asList( + $$( + $("TIME('09:07:00') > DATETIME('1961-04-12 09:07:00')", "t_dt_t", true), + $("DATETIME('" + today + " 20:40:50') > TIME('10:20:30')", "dt_t_t", true), + $("TIME('10:20:30') > DATETIME('" + today + " 10:20:30')", "t_dt_f", false), + $("DATETIME('" + today + " 09:07:00') > TIME('10:20:30')", "dt_t_f", false), + $("TIME('22:15:07') > TIMESTAMP('1984-12-15 22:15:07')", "t_ts_t", true), + $("TIMESTAMP('" + today + " 20:50:42') > TIME('10:20:30')", "ts_t_t", true), + $("TIME('10:20:30') > TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), + $("TIMESTAMP('1984-12-15 10:20:30') > TIME('10:20:30')", "ts_t_f", false), + $("TIME('00:00:00') > DATE('1961-04-12')", "t_d_t", true), + $("DATE('3077-04-12') > TIME('10:20:30')", "d_t_t", true), + $("TIME('09:07:00') > DATE('3077-04-12')", "t_d_f", false), + $("DATE('2020-09-16') > TIME('09:07:00')", "d_t_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLteTimestampWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('2020-09-16 10:20:30')", "ts_dt_t", true), - $("DATETIME('1961-04-12 09:07:00') <= TIMESTAMP('1984-12-15 22:15:07')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('1961-04-12 09:07:00')", "ts_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') <= TIMESTAMP('1961-04-12 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') <= DATE('2077-04-12')", "ts_d_t", true), - $("DATE('2020-09-16') <= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') <= DATE('1961-04-12')", "ts_d_f", false), - $("DATE('2077-04-12') <= TIMESTAMP('1984-12-15 22:15:07')", "d_ts_f", false), - $("TIMESTAMP('" + today + " 10:20:30') <= TIME('10:20:30')", "ts_t_t", true), - $("TIME('09:07:00') <= TIMESTAMP('3077-12-15 22:15:07')", "t_ts_t", true), - $("TIMESTAMP('3077-09-16 10:20:30') <= TIME('09:07:00')", "ts_t_f", false), - $("TIME('20:50:40') <= TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false) - )); + return Arrays.asList( + $$( + $( + "TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('2020-09-16 10:20:30')", + "ts_dt_t", + true), + $( + "DATETIME('1961-04-12 09:07:00') <= TIMESTAMP('1984-12-15 22:15:07')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('1961-04-12 09:07:00')", + "ts_dt_f", + false), + $( + "DATETIME('2020-09-16 10:20:30') <= TIMESTAMP('1961-04-12 09:07:00')", + "dt_ts_f", + false), + $("TIMESTAMP('2020-09-16 10:20:30') <= DATE('2077-04-12')", "ts_d_t", true), + $("DATE('2020-09-16') <= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), + $("TIMESTAMP('2020-09-16 10:20:30') <= DATE('1961-04-12')", "ts_d_f", false), + $("DATE('2077-04-12') <= TIMESTAMP('1984-12-15 22:15:07')", "d_ts_f", false), + $("TIMESTAMP('" + today + " 10:20:30') <= TIME('10:20:30')", "ts_t_t", true), + $("TIME('09:07:00') <= TIMESTAMP('3077-12-15 22:15:07')", "t_ts_t", true), + $("TIMESTAMP('3077-09-16 10:20:30') <= TIME('09:07:00')", "ts_t_f", false), + $("TIME('20:50:40') <= TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLteDateTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATETIME('2020-09-16 10:20:30') <= TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_t", true), - $("TIMESTAMP('1961-04-12 09:07:00') <= DATETIME('1984-12-15 22:15:07')", "ts_dt_t", true), - $("DATETIME('3077-09-16 10:20:30') <= TIMESTAMP('2077-04-12 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('1984-12-15 22:15:07')", "ts_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') <= DATE('2020-09-16')", "dt_d_t", true), - $("DATE('1961-04-12') <= DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') <= DATE('1984-04-12')", "dt_d_f", false), - $("DATE('2020-09-16') <= DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), - $("DATETIME('" + today + " 10:20:30') <= TIME('10:20:30')", "dt_t_t", true), - $("TIME('09:07:00') <= DATETIME('3077-12-15 22:15:07')", "t_dt_t", true), - $("DATETIME('3077-09-16 10:20:30') <= TIME('19:07:00')", "dt_t_f", false), - $("TIME('20:40:50') <= DATETIME('" + today + " 10:20:30')", "t_dt_f", false) - )); + return Arrays.asList( + $$( + $( + "DATETIME('2020-09-16 10:20:30') <= TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_t", + true), + $( + "TIMESTAMP('1961-04-12 09:07:00') <= DATETIME('1984-12-15 22:15:07')", + "ts_dt_t", + true), + $( + "DATETIME('3077-09-16 10:20:30') <= TIMESTAMP('2077-04-12 09:07:00')", + "dt_ts_f", + false), + $( + "TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('1984-12-15 22:15:07')", + "ts_dt_f", + false), + $("DATETIME('2020-09-16 00:00:00') <= DATE('2020-09-16')", "dt_d_t", true), + $("DATE('1961-04-12') <= DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), + $("DATETIME('2020-09-16 10:20:30') <= DATE('1984-04-12')", "dt_d_f", false), + $("DATE('2020-09-16') <= DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), + $("DATETIME('" + today + " 10:20:30') <= TIME('10:20:30')", "dt_t_t", true), + $("TIME('09:07:00') <= DATETIME('3077-12-15 22:15:07')", "t_dt_t", true), + $("DATETIME('3077-09-16 10:20:30') <= TIME('19:07:00')", "dt_t_f", false), + $("TIME('20:40:50') <= DATETIME('" + today + " 10:20:30')", "t_dt_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLteDateWithOtherTypes() { - return Arrays.asList($$( - $("DATE('2020-09-16') <= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), - $("TIMESTAMP('1961-04-12 09:07:00') <= DATE('1984-12-15')", "ts_d_t", true), - $("DATE('2020-09-16') <= TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), - $("TIMESTAMP('2077-04-12 09:07:00') <= DATE('2020-09-16')", "ts_d_f", false), - $("DATE('2020-09-16') <= DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), - $("DATETIME('1961-04-12 10:20:30') <= DATE('1984-11-15')", "dt_d_t", true), - $("DATE('2077-04-12') <= DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') <= DATE('1984-03-22')", "dt_d_f", false), - $("DATE('2020-09-16') <= TIME('09:07:00')", "d_t_t", true), - $("TIME('09:07:00') <= DATE('3077-04-12')", "t_d_t", true), - $("DATE('3077-04-12') <= TIME('00:00:00')", "d_t_f", false), - $("TIME('00:00:00') <= DATE('2020-09-16')", "t_d_f", false) - )); + return Arrays.asList( + $$( + $("DATE('2020-09-16') <= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), + $("TIMESTAMP('1961-04-12 09:07:00') <= DATE('1984-12-15')", "ts_d_t", true), + $("DATE('2020-09-16') <= TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), + $("TIMESTAMP('2077-04-12 09:07:00') <= DATE('2020-09-16')", "ts_d_f", false), + $("DATE('2020-09-16') <= DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), + $("DATETIME('1961-04-12 10:20:30') <= DATE('1984-11-15')", "dt_d_t", true), + $("DATE('2077-04-12') <= DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), + $("DATETIME('2020-09-16 00:00:00') <= DATE('1984-03-22')", "dt_d_f", false), + $("DATE('2020-09-16') <= TIME('09:07:00')", "d_t_t", true), + $("TIME('09:07:00') <= DATE('3077-04-12')", "t_d_t", true), + $("DATE('3077-04-12') <= TIME('00:00:00')", "d_t_f", false), + $("TIME('00:00:00') <= DATE('2020-09-16')", "t_d_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLteTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIME('10:20:30') <= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('" + today + " 09:07:00') <= TIME('10:20:30')", "dt_t_t", true), - $("TIME('09:07:00') <= DATETIME('1961-04-12 09:07:00')", "t_dt_f", false), - $("DATETIME('" + today + " 20:40:50') <= TIME('10:20:30')", "dt_t_f", false), - $("TIME('10:20:30') <= TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), - $("TIMESTAMP('1984-12-15 10:20:30') <= TIME('10:20:30')", "ts_t_t", true), - $("TIME('22:15:07') <= TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false), - $("TIMESTAMP('" + today + " 20:50:42') <= TIME('10:20:30')", "ts_t_f", false), - $("TIME('09:07:00') <= DATE('3077-04-12')", "t_d_t", true), - $("DATE('2020-09-16') <= TIME('09:07:00')", "d_t_t", true), - $("TIME('00:00:00') <= DATE('1961-04-12')", "t_d_f", false), - $("DATE('3077-04-12') <= TIME('10:20:30')", "d_t_f", false) - )); + return Arrays.asList( + $$( + $("TIME('10:20:30') <= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), + $("DATETIME('" + today + " 09:07:00') <= TIME('10:20:30')", "dt_t_t", true), + $("TIME('09:07:00') <= DATETIME('1961-04-12 09:07:00')", "t_dt_f", false), + $("DATETIME('" + today + " 20:40:50') <= TIME('10:20:30')", "dt_t_f", false), + $("TIME('10:20:30') <= TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), + $("TIMESTAMP('1984-12-15 10:20:30') <= TIME('10:20:30')", "ts_t_t", true), + $("TIME('22:15:07') <= TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false), + $("TIMESTAMP('" + today + " 20:50:42') <= TIME('10:20:30')", "ts_t_f", false), + $("TIME('09:07:00') <= DATE('3077-04-12')", "t_d_t", true), + $("DATE('2020-09-16') <= TIME('09:07:00')", "d_t_t", true), + $("TIME('00:00:00') <= DATE('1961-04-12')", "t_d_f", false), + $("DATE('3077-04-12') <= TIME('10:20:30')", "d_t_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGteTimestampWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('2020-09-16 10:20:30')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('1961-04-12 09:07:00')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('2061-04-12 09:07:00')", "ts_dt_f", false), - $("DATETIME('1961-04-12 09:07:00') >= TIMESTAMP('1984-12-15 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') >= DATE('1961-04-12')", "ts_d_t", true), - $("DATE('2020-09-16') >= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') >= DATE('2077-04-12')", "ts_d_f", false), - $("DATE('1961-04-11') >= TIMESTAMP('1961-04-12 00:00:00')", "d_ts_f", false), - $("TIMESTAMP('" + today + " 10:20:30') >= TIME('10:20:30')", "ts_t_t", true), - $("TIME('20:50:40') >= TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), - $("TIMESTAMP('1977-07-08 10:20:30') >= TIME('10:20:30')", "ts_t_f", false), - $("TIME('09:07:00') >= TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false) - )); + return Arrays.asList( + $$( + $( + "TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('2020-09-16 10:20:30')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('1961-04-12 09:07:00')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('2061-04-12 09:07:00')", + "ts_dt_f", + false), + $( + "DATETIME('1961-04-12 09:07:00') >= TIMESTAMP('1984-12-15 09:07:00')", + "dt_ts_f", + false), + $("TIMESTAMP('2020-09-16 10:20:30') >= DATE('1961-04-12')", "ts_d_t", true), + $("DATE('2020-09-16') >= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), + $("TIMESTAMP('2020-09-16 10:20:30') >= DATE('2077-04-12')", "ts_d_f", false), + $("DATE('1961-04-11') >= TIMESTAMP('1961-04-12 00:00:00')", "d_ts_f", false), + $("TIMESTAMP('" + today + " 10:20:30') >= TIME('10:20:30')", "ts_t_t", true), + $("TIME('20:50:40') >= TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), + $("TIMESTAMP('1977-07-08 10:20:30') >= TIME('10:20:30')", "ts_t_f", false), + $("TIME('09:07:00') >= TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGteDateTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('1984-12-15 22:15:07')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('2077-04-12 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('1961-04-12 00:00:00') >= DATETIME('1961-04-12 09:07:00')", "ts_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') >= DATE('2020-09-16')", "dt_d_t", true), - $("DATE('2020-09-16') >= DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), - $("DATETIME('1961-04-12 09:07:00') >= DATE('2020-09-16')", "dt_d_f", false), - $("DATE('1961-04-12') >= DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('" + today + " 10:20:30') >= TIME('10:20:30')", "dt_t_t", true), - $("TIME('20:40:50') >= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('1961-04-12 09:07:00') >= TIME('09:07:00')", "dt_t_f", false), - $("TIME('09:07:00') >= DATETIME('3077-12-15 22:15:07')", "t_dt_f", false) - )); + return Arrays.asList( + $$( + $( + "DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('1984-12-15 22:15:07')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('2077-04-12 09:07:00')", + "dt_ts_f", + false), + $( + "TIMESTAMP('1961-04-12 00:00:00') >= DATETIME('1961-04-12 09:07:00')", + "ts_dt_f", + false), + $("DATETIME('2020-09-16 00:00:00') >= DATE('2020-09-16')", "dt_d_t", true), + $("DATE('2020-09-16') >= DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), + $("DATETIME('1961-04-12 09:07:00') >= DATE('2020-09-16')", "dt_d_f", false), + $("DATE('1961-04-12') >= DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), + $("DATETIME('" + today + " 10:20:30') >= TIME('10:20:30')", "dt_t_t", true), + $("TIME('20:40:50') >= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), + $("DATETIME('1961-04-12 09:07:00') >= TIME('09:07:00')", "dt_t_f", false), + $("TIME('09:07:00') >= DATETIME('3077-12-15 22:15:07')", "t_dt_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGteDateWithOtherTypes() { - return Arrays.asList($$( - $("DATE('2020-09-16') >= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), - $("TIMESTAMP('2077-04-12 09:07:00') >= DATE('2020-09-16')", "ts_d_t", true), - $("DATE('1961-04-12') >= TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), - $("TIMESTAMP('1961-04-12 09:07:00') >= DATE('1984-12-15')", "ts_d_f", false), - $("DATE('2020-09-16') >= DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') >= DATE('1984-03-22')", "dt_d_t", true), - $("DATE('1960-12-15') >= DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), - $("DATETIME('1961-04-12 10:20:30') >= DATE('1984-11-15')", "dt_d_f", false), - $("DATE('3077-04-12') >= TIME('00:00:00')", "d_t_t", true), - $("TIME('00:00:00') >= DATE('2020-09-16')", "t_d_t", true), - $("DATE('2020-09-16') >= TIME('09:07:00')", "d_t_f", false), - $("TIME('09:07:00') >= DATE('3077-04-12')", "t_d_f", false) - )); + return Arrays.asList( + $$( + $("DATE('2020-09-16') >= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), + $("TIMESTAMP('2077-04-12 09:07:00') >= DATE('2020-09-16')", "ts_d_t", true), + $("DATE('1961-04-12') >= TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), + $("TIMESTAMP('1961-04-12 09:07:00') >= DATE('1984-12-15')", "ts_d_f", false), + $("DATE('2020-09-16') >= DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), + $("DATETIME('2020-09-16 00:00:00') >= DATE('1984-03-22')", "dt_d_t", true), + $("DATE('1960-12-15') >= DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), + $("DATETIME('1961-04-12 10:20:30') >= DATE('1984-11-15')", "dt_d_f", false), + $("DATE('3077-04-12') >= TIME('00:00:00')", "d_t_t", true), + $("TIME('00:00:00') >= DATE('2020-09-16')", "t_d_t", true), + $("DATE('2020-09-16') >= TIME('09:07:00')", "d_t_f", false), + $("TIME('09:07:00') >= DATE('3077-04-12')", "t_d_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGteTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIME('10:20:30') >= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('" + today + " 20:40:50') >= TIME('10:20:30')", "dt_t_t", true), - $("TIME('09:07:00') >= DATETIME('3077-04-12 09:07:00')", "t_dt_f", false), - $("DATETIME('" + today + " 09:07:00') >= TIME('10:20:30')", "dt_t_f", false), - $("TIME('10:20:30') >= TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), - $("TIMESTAMP('" + today + " 20:50:42') >= TIME('10:20:30')", "ts_t_t", true), - $("TIME('22:15:07') >= TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false), - $("TIMESTAMP('1984-12-15 10:20:30') >= TIME('10:20:30')", "ts_t_f", false), - $("TIME('00:00:00') >= DATE('1961-04-12')", "t_d_t", true), - $("DATE('3077-04-12') >= TIME('10:20:30')", "d_t_t", true), - $("TIME('09:07:00') >= DATE('3077-04-12')", "t_d_f", false), - $("DATE('2020-09-16') >= TIME('09:07:00')", "d_t_f", false) - )); + return Arrays.asList( + $$( + $("TIME('10:20:30') >= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), + $("DATETIME('" + today + " 20:40:50') >= TIME('10:20:30')", "dt_t_t", true), + $("TIME('09:07:00') >= DATETIME('3077-04-12 09:07:00')", "t_dt_f", false), + $("DATETIME('" + today + " 09:07:00') >= TIME('10:20:30')", "dt_t_f", false), + $("TIME('10:20:30') >= TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), + $("TIMESTAMP('" + today + " 20:50:42') >= TIME('10:20:30')", "ts_t_t", true), + $("TIME('22:15:07') >= TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false), + $("TIMESTAMP('1984-12-15 10:20:30') >= TIME('10:20:30')", "ts_t_f", false), + $("TIME('00:00:00') >= DATE('1961-04-12')", "t_d_t", true), + $("DATE('3077-04-12') >= TIME('10:20:30')", "d_t_t", true), + $("TIME('09:07:00') >= DATE('3077-04-12')", "t_d_f", false), + $("DATE('2020-09-16') >= TIME('09:07:00')", "d_t_f", false))); } @Test public void testCompare() throws IOException { - var result = executeQuery(String.format("source=%s | eval `%s` = %s | fields `%s`", - TEST_INDEX_DATATYPE_NONNUMERIC, name, functionCall, name)); + var result = + executeQuery( + String.format( + "source=%s | eval `%s` = %s | fields `%s`", + TEST_INDEX_DATATYPE_NONNUMERIC, name, functionCall, name)); verifySchema(result, schema(name, null, "boolean")); verifyDataRows(result, rows(expectedResult)); } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeFunctionIT.java index b75b0ecaef..1df87a87b3 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeFunctionIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; @@ -53,826 +52,1151 @@ public void resetTimeZone() { @Test public void testAddDateWithDays() throws IOException { - var result = executeQuery(String.format("source=%s | eval " - + " f = adddate(date('2020-09-16'), 1)" - + " | fields f", TEST_INDEX_DATE)); + var result = + executeQuery( + String.format( + "source=%s | eval " + " f = adddate(date('2020-09-16'), 1)" + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "date")); verifySome(result.getJSONArray("datarows"), rows("2020-09-17")); - result = executeQuery(String.format("source=%s | eval " - + " f = adddate(timestamp('2020-09-16 17:30:00'), 1)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = adddate(timestamp('2020-09-16 17:30:00'), 1)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-17 17:30:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = adddate(DATETIME('2020-09-16 07:40:00'), 1)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = adddate(DATETIME('2020-09-16 07:40:00'), 1)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-17 07:40:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = adddate(TIME('07:40:00'), 0)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + " f = adddate(TIME('07:40:00'), 0)" + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows(LocalDate.now() + " 07:40:00")); } @Test public void testAddDateWithInterval() throws IOException { - JSONObject result = executeQuery(String.format("source=%s | eval " - + " f = adddate(timestamp('2020-09-16 17:30:00'), interval 1 day)" - + " | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval " + + " f = adddate(timestamp('2020-09-16 17:30:00'), interval 1 day)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-17 17:30:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = adddate(DATETIME('2020-09-16 17:30:00'), interval 1 day)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = adddate(DATETIME('2020-09-16 17:30:00'), interval 1 day)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-17 17:30:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = adddate(date('2020-09-16'), interval 1 day) " - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = adddate(date('2020-09-16'), interval 1 day) " + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-17 00:00:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = adddate(date('2020-09-16'), interval 1 hour)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = adddate(date('2020-09-16'), interval 1 hour)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-16 01:00:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = adddate(TIME('07:40:00'), interval 1 day)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = adddate(TIME('07:40:00'), interval 1 day)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), - rows(LocalDate.now().plusDays(1).atTime(LocalTime.of(7, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySome( + result.getJSONArray("datarows"), + rows( + LocalDate.now() + .plusDays(1) + .atTime(LocalTime.of(7, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); - result = executeQuery(String.format("source=%s | eval " - + " f = adddate(TIME('07:40:00'), interval 1 hour)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = adddate(TIME('07:40:00'), interval 1 hour)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), - rows(LocalDate.now().atTime(LocalTime.of(8, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySome( + result.getJSONArray("datarows"), + rows( + LocalDate.now() + .atTime(LocalTime.of(8, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); } @Test public void testConvertTZ() throws IOException { JSONObject result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2008-05-15 12:00:00','+00:00','+10:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2008-05-15 12:00:00','+00:00','+10:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2008-05-15 22:00:00")); result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 00:00:00','-00:00','+00:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 00:00:00','-00:00','+00:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2021-05-12 00:00:00")); result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 00:00:00','+10:00','+11:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 00:00:00','+10:00','+11:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2021-05-12 01:00:00")); result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-08:00','+09:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-08:00','+09:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2021-05-13 04:34:50")); result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 11:34:50','+09:00','+09:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 11:34:50','+09:00','+09:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2021-05-12 11:34:50")); result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-12:00','+12:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-12:00','+12:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2021-05-13 11:34:50")); result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 13:00:00','+09:30','+05:45') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 13:00:00','+09:30','+05:45') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2021-05-12 09:15:00")); result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-30 11:34:50','-17:00','+08:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-30 11:34:50','-17:00','+08:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); result = - executeQuery(String.format( - "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-12:00','+15:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); + executeQuery( + String.format( + "source=%s | eval f = convert_tz('2021-05-12 11:34:50','-12:00','+15:00') | fields" + + " f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @Test public void testDateAdd() throws IOException { - JSONObject result = executeQuery(String.format("source=%s | eval " - + " f = date_add(timestamp('2020-09-16 17:30:00'), interval 1 day)" - + " | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval " + + " f = date_add(timestamp('2020-09-16 17:30:00'), interval 1 day)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-17 17:30:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = date_add(DATETIME('2020-09-16 17:30:00'), interval 1 day)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = date_add(DATETIME('2020-09-16 17:30:00'), interval 1 day)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-17 17:30:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = date_add(date('2020-09-16'), interval 1 day)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = date_add(date('2020-09-16'), interval 1 day)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-17 00:00:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = date_add(date('2020-09-16'), interval 1 hour)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = date_add(date('2020-09-16'), interval 1 hour)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-16 01:00:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = date_add(TIME('07:40:00'), interval 1 day)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = date_add(TIME('07:40:00'), interval 1 day)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), - rows(LocalDate.now().plusDays(1).atTime(LocalTime.of(7, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySome( + result.getJSONArray("datarows"), + rows( + LocalDate.now() + .plusDays(1) + .atTime(LocalTime.of(7, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); - result = executeQuery(String.format("source=%s | eval " - + " f = date_add(TIME('07:40:00'), interval 1 hour)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = date_add(TIME('07:40:00'), interval 1 hour)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), - rows(LocalDate.now().atTime(LocalTime.of(8, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySome( + result.getJSONArray("datarows"), + rows( + LocalDate.now() + .atTime(LocalTime.of(8, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); - result = executeQuery(String.format("source=%s | eval " - + " f = DATE_ADD(birthdate, INTERVAL 1 YEAR)" - + " | fields f", TEST_INDEX_BANK)); + result = + executeQuery( + String.format( + "source=%s | eval " + " f = DATE_ADD(birthdate, INTERVAL 1 YEAR)" + " | fields f", + TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "datetime")); - verifyDataRows(result, - rows("2018-10-23 00:00:00"), - rows("2018-11-20 00:00:00"), - rows("2019-06-23 00:00:00"), - rows("2019-11-13 23:33:20"), - rows("2019-06-27 00:00:00"), - rows("2019-08-19 00:00:00"), - rows("2019-08-11 00:00:00")); + verifyDataRows( + result, + rows("2018-10-23 00:00:00"), + rows("2018-11-20 00:00:00"), + rows("2019-06-23 00:00:00"), + rows("2019-11-13 23:33:20"), + rows("2019-06-27 00:00:00"), + rows("2019-08-19 00:00:00"), + rows("2019-08-11 00:00:00")); } @Test public void testDateTime() throws IOException { JSONObject result = - executeQuery(String.format( - "source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', 'America/Los_Angeles') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', 'America/Los_Angeles')" + + " | fields f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2008-12-24 21:30:00")); result = - executeQuery(String.format( - "source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', '+01:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', '+01:00') | fields f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2008-12-25 06:30:00")); result = - executeQuery(String.format( - "source=%s | eval f = DATETIME('2008-12-25 05:30:00-05:00', '+05:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-12-25 05:30:00-05:00', '+05:00') | fields f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2008-12-25 15:30:00")); result = - executeQuery(String.format( - "source=%s | eval f = DATETIME('2004-02-28 23:00:00-10:00', '+10:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2004-02-28 23:00:00-10:00', '+10:00') | fields f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2004-02-29 19:00:00")); result = - executeQuery(String.format( - "source=%s | eval f = DATETIME('2003-02-28 23:00:00-10:00', '+10:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2003-02-28 23:00:00-10:00', '+10:00') | fields f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2003-03-01 19:00:00")); result = - executeQuery(String.format( - "source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', '+14:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', '+14:00') | fields f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2008-12-25 19:30:00")); result = - executeQuery(String.format( - "source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00', '-10:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00', '-10:00') | fields f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2007-12-31 06:00:00")); result = - executeQuery(String.format( - "source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00') | fields f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2008-01-01 02:00:00")); result = - executeQuery(String.format( - "source=%s | eval f = DATETIME('2008-01-01 02:00:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-01-01 02:00:00') | fields f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2008-01-01 02:00:00")); result = - executeQuery(String.format( - "source=%s | eval f = DATETIME('2008-01-01 02:00:00+15:00', '-12:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-01-01 02:00:00+15:00', '-12:00') | fields f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); result = - executeQuery(String.format( - "source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00', '-14:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00', '-14:00') | fields f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); result = - executeQuery(String.format( - "source=%s | eval f = DATETIME('2008-01-01 02:00:00', '-14:00') | fields f", - TEST_INDEX_DATE)); - verifySchema(result, - schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-01-01 02:00:00', '-14:00') | fields f", + TEST_INDEX_DATE)); + verifySchema(result, schema("f", null, "datetime")); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @Test public void testDateSub() throws IOException { - JSONObject result = executeQuery(String.format("source=%s | eval " - + " f = date_sub(timestamp('2020-09-16 17:30:00'), interval 1 day)" - + " | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval " + + " f = date_sub(timestamp('2020-09-16 17:30:00'), interval 1 day)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15 17:30:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = date_sub(DATETIME('2020-09-16 17:30:00'), interval 1 day)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = date_sub(DATETIME('2020-09-16 17:30:00'), interval 1 day)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15 17:30:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = date_sub(date('2020-09-16'), interval 1 day)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = date_sub(date('2020-09-16'), interval 1 day)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15 00:00:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = date_sub(date('2020-09-16'), interval 1 hour)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = date_sub(date('2020-09-16'), interval 1 hour)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15 23:00:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = date_sub(TIME('07:40:00'), interval 1 day)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = date_sub(TIME('07:40:00'), interval 1 day)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), - rows(LocalDate.now().plusDays(-1).atTime(LocalTime.of(7, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySome( + result.getJSONArray("datarows"), + rows( + LocalDate.now() + .plusDays(-1) + .atTime(LocalTime.of(7, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); - result = executeQuery(String.format("source=%s | eval " - + " f = date_sub(TIME('07:40:00'), interval 1 hour)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = date_sub(TIME('07:40:00'), interval 1 hour)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), - rows(LocalDate.now().atTime(LocalTime.of(6, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySome( + result.getJSONArray("datarows"), + rows( + LocalDate.now() + .atTime(LocalTime.of(6, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); } @Test public void testDay() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = day(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = day(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(16)); - result = executeQuery(String.format( - "source=%s | eval f = day('2020-09-16') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format("source=%s | eval f = day('2020-09-16') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(16)); } @Test public void testDay_of_week() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = day_of_week(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = day_of_week(date('2020-09-16')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(4)); - result = executeQuery(String.format( - "source=%s | eval f = day_of_week('2020-09-16') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = day_of_week('2020-09-16') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(4)); } @Test public void testDay_of_month() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = day_of_month(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = day_of_month(date('2020-09-16')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(16)); - result = executeQuery(String.format( - "source=%s | eval f = day_of_month('2020-09-16') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = day_of_month('2020-09-16') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(16)); } @Test public void testDay_of_year() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = day_of_year(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = day_of_year(date('2020-09-16')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(260)); - result = executeQuery(String.format( - "source=%s | eval f = day_of_year('2020-09-16') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = day_of_year('2020-09-16') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(260)); } @Test public void testDayName() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = dayname(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = dayname(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "string")); verifySome(result.getJSONArray("datarows"), rows("Wednesday")); - result = executeQuery(String.format( - "source=%s | eval f = dayname('2020-09-16') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = dayname('2020-09-16') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "string")); verifySome(result.getJSONArray("datarows"), rows("Wednesday")); } @Test public void testDayOfMonth() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = dayofmonth(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = dayofmonth(date('2020-09-16')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(16)); - result = executeQuery(String.format( - "source=%s | eval f = dayofmonth('2020-09-16') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = dayofmonth('2020-09-16') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(16)); } @Test public void testDayOfWeek() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = dayofweek(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = dayofweek(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(4)); - result = executeQuery(String.format( - "source=%s | eval f = dayofweek('2020-09-16') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = dayofweek('2020-09-16') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(4)); } @Test public void testDayOfYear() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = dayofyear(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = dayofyear(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(260)); - result = executeQuery(String.format( - "source=%s | eval f = dayofyear('2020-09-16') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = dayofyear('2020-09-16') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(260)); } @Test public void testFromDays() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = from_days(738049) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format("source=%s | eval f = from_days(738049) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "date")); verifySome(result.getJSONArray("datarows"), rows("2020-09-16")); } @Test public void testHour() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = hour(timestamp('2020-09-16 17:30:00')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = hour(timestamp('2020-09-16 17:30:00')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(17)); - result = executeQuery(String.format( - "source=%s | eval f = hour(time('17:30:00')) | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = hour(time('17:30:00')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(17)); - result = executeQuery(String.format( - "source=%s | eval f = hour('2020-09-16 17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = hour('2020-09-16 17:30:00') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(17)); - result = executeQuery(String.format( - "source=%s | eval f = hour('17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format("source=%s | eval f = hour('17:30:00') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(17)); } @Test public void testHour_of_day() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = hour_of_day(timestamp('2020-09-16 17:30:00')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = hour_of_day(timestamp('2020-09-16 17:30:00')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(17)); - result = executeQuery(String.format( - "source=%s | eval f = hour_of_day(time('17:30:00')) | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = hour_of_day(time('17:30:00')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(17)); - result = executeQuery(String.format( - "source=%s | eval f = hour_of_day('2020-09-16 17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = hour_of_day('2020-09-16 17:30:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(17)); - result = executeQuery(String.format( - "source=%s | eval f = hour_of_day('17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = hour_of_day('17:30:00') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(17)); } @Test public void testMicrosecond() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = microsecond(timestamp('2020-09-16 17:30:00.123456')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = microsecond(timestamp('2020-09-16 17:30:00.123456')) |" + + " fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(123456)); // Explicit timestamp value with less than 6 microsecond digits - result = executeQuery(String.format( - "source=%s | eval f = microsecond(timestamp('2020-09-16 17:30:00.1234')) | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = microsecond(timestamp('2020-09-16 17:30:00.1234')) | fields" + + " f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(123400)); - result = executeQuery(String.format( - "source=%s | eval f = microsecond(time('17:30:00.000010')) | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = microsecond(time('17:30:00.000010')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(10)); // Explicit time value with less than 6 microsecond digits - result = executeQuery(String.format( - "source=%s | eval f = microsecond(time('17:30:00.1234')) | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = microsecond(time('17:30:00.1234')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(123400)); - result = executeQuery(String.format( - "source=%s | eval f = microsecond('2020-09-16 17:30:00.123456') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = microsecond('2020-09-16 17:30:00.123456') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(123456)); // Implicit timestamp value with less than 6 microsecond digits - result = executeQuery(String.format( - "source=%s | eval f = microsecond('2020-09-16 17:30:00.1234') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = microsecond('2020-09-16 17:30:00.1234') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(123400)); - result = executeQuery(String.format( - "source=%s | eval f = microsecond('17:30:00.000010') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = microsecond('17:30:00.000010') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(10)); // Implicit time value with less than 6 microsecond digits - result = executeQuery(String.format( - "source=%s | eval f = microsecond('17:30:00.1234') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = microsecond('17:30:00.1234') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(123400)); } @Test public void testMinute() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = minute(timestamp('2020-09-16 17:30:00')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = minute(timestamp('2020-09-16 17:30:00')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(30)); - result = executeQuery(String.format( - "source=%s | eval f = minute(time('17:30:00')) | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = minute(time('17:30:00')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(30)); - result = executeQuery(String.format( - "source=%s | eval f = minute('2020-09-16 17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = minute('2020-09-16 17:30:00') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(30)); - result = executeQuery(String.format( - "source=%s | eval f = minute('17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format("source=%s | eval f = minute('17:30:00') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(30)); } @Test public void testMinute_of_hour() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = minute_of_hour(timestamp('2020-09-16 17:30:00')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = minute_of_hour(timestamp('2020-09-16 17:30:00')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(30)); - result = executeQuery(String.format( - "source=%s | eval f = minute_of_hour(time('17:30:00')) | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = minute_of_hour(time('17:30:00')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(30)); - result = executeQuery(String.format( - "source=%s | eval f = minute_of_hour('2020-09-16 17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = minute_of_hour('2020-09-16 17:30:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(30)); - result = executeQuery(String.format( - "source=%s | eval f = minute_of_hour('17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = minute_of_hour('17:30:00') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(30)); } @Test public void testMinute_of_day() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = minute_of_day(timestamp('2020-09-16 17:30:00')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = minute_of_day(timestamp('2020-09-16 17:30:00')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(1050)); - result = executeQuery(String.format( - "source=%s | eval f = minute_of_day(time('17:30:00')) | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = minute_of_day(time('17:30:00')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(1050)); - result = executeQuery(String.format( - "source=%s | eval f = minute_of_day('2020-09-16 17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = minute_of_day('2020-09-16 17:30:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(1050)); - result = executeQuery(String.format( - "source=%s | eval f = minute_of_day('17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = minute_of_day('17:30:00') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(1050)); } @Test public void testMonth() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = month(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = month(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(9)); - result = executeQuery(String.format( - "source=%s | eval f = month('2020-09-16') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format("source=%s | eval f = month('2020-09-16') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(9)); } @Test public void testMonth_of_year() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = month_of_year(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = month_of_year(date('2020-09-16')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(9)); - result = executeQuery(String.format( - "source=%s | eval f = month_of_year('2020-09-16') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = month_of_year('2020-09-16') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(9)); } @Test public void testMonthName() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = monthname(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = monthname(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "string")); verifySome(result.getJSONArray("datarows"), rows("September")); - result = executeQuery(String.format( - "source=%s | eval f = monthname('2020-09-16') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = monthname('2020-09-16') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "string")); verifySome(result.getJSONArray("datarows"), rows("September")); } @Test public void testQuarter() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = quarter(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = quarter(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(3)); - result = executeQuery(String.format( - "source=%s | eval f = quarter('2020-09-16') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = quarter('2020-09-16') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(3)); } @Test public void testSecond() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = second(timestamp('2020-09-16 17:30:00')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = second(timestamp('2020-09-16 17:30:00')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(0)); - result = executeQuery(String.format( - "source=%s | eval f = second(time('17:30:00')) | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = second(time('17:30:00')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(0)); - result = executeQuery(String.format( - "source=%s | eval f = second('2020-09-16 17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = second('2020-09-16 17:30:00') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(0)); - result = executeQuery(String.format( - "source=%s | eval f = second('17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format("source=%s | eval f = second('17:30:00') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(0)); } @Test public void testSecond_of_minute() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = second_of_minute(timestamp('2020-09-16 17:30:00')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = second_of_minute(timestamp('2020-09-16 17:30:00')) | fields" + + " f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(0)); - result = executeQuery(String.format( - "source=%s | eval f = second_of_minute(time('17:30:00')) | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = second_of_minute(time('17:30:00')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(0)); - result = executeQuery(String.format( - "source=%s | eval f = second_of_minute('2020-09-16 17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = second_of_minute('2020-09-16 17:30:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(0)); - result = executeQuery(String.format( - "source=%s | eval f = second_of_minute('17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = second_of_minute('17:30:00') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(0)); } @Test public void testSubDateDays() throws IOException { - var result = executeQuery(String.format("source=%s | eval " - + " f = subdate(date('2020-09-16'), 1)" - + " | fields f", TEST_INDEX_DATE)); + var result = + executeQuery( + String.format( + "source=%s | eval " + " f = subdate(date('2020-09-16'), 1)" + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "date")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15")); - result = executeQuery(String.format("source=%s | eval " - + " f = subdate(timestamp('2020-09-16 17:30:00'), 1)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = subdate(timestamp('2020-09-16 17:30:00'), 1)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15 17:30:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = subdate(date('2020-09-16'), 1)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + " f = subdate(date('2020-09-16'), 1)" + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "date")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15")); - result = executeQuery(String.format("source=%s | eval " - + " f = subdate(TIME('07:40:00'), 0)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + " f = subdate(TIME('07:40:00'), 0)" + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows(LocalDate.now() + " 07:40:00")); } @Test public void testSubDateInterval() throws IOException { - JSONObject result = executeQuery(String.format("source=%s | eval " - + " f = subdate(timestamp('2020-09-16 17:30:00'), interval 1 day)" - + " | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval " + + " f = subdate(timestamp('2020-09-16 17:30:00'), interval 1 day)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15 17:30:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = subdate(DATETIME('2020-09-16 17:30:00'), interval 1 day)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = subdate(DATETIME('2020-09-16 17:30:00'), interval 1 day)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15 17:30:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = subdate(date('2020-09-16'), interval 1 day) " - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = subdate(date('2020-09-16'), interval 1 day) " + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15 00:00:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = subdate(date('2020-09-16'), interval 1 hour)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = subdate(date('2020-09-16'), interval 1 hour)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2020-09-15 23:00:00")); - result = executeQuery(String.format("source=%s | eval " - + " f = subdate(TIME('07:40:00'), interval 1 day)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = subdate(TIME('07:40:00'), interval 1 day)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), - rows(LocalDate.now().plusDays(-1).atTime(LocalTime.of(7, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySome( + result.getJSONArray("datarows"), + rows( + LocalDate.now() + .plusDays(-1) + .atTime(LocalTime.of(7, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); - result = executeQuery(String.format("source=%s | eval " - + " f = subdate(TIME('07:40:00'), interval 1 hour)" - + " | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval " + + " f = subdate(TIME('07:40:00'), interval 1 hour)" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), - rows(LocalDate.now().atTime(LocalTime.of(6, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySome( + result.getJSONArray("datarows"), + rows( + LocalDate.now() + .atTime(LocalTime.of(6, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); } @Test public void testTimeToSec() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = time_to_sec(time('17:30:00')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = time_to_sec(time('17:30:00')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "long")); verifySome(result.getJSONArray("datarows"), rows(63000)); - result = executeQuery(String.format( - "source=%s | eval f = time_to_sec('17:30:00') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = time_to_sec('17:30:00') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "long")); verifySome(result.getJSONArray("datarows"), rows(63000)); } @Test public void testToDays() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = to_days(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = to_days(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "long")); verifySome(result.getJSONArray("datarows"), rows(738049)); - result = executeQuery(String.format( - "source=%s | eval f = to_days('2020-09-16') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format( + "source=%s | eval f = to_days('2020-09-16') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "long")); verifySome(result.getJSONArray("datarows"), rows(738049)); } private void week(String date, int mode, int expectedResult) throws IOException { - JSONObject result = executeQuery(StringUtils.format( - "source=%s | eval f = week(date('%s'), %d) | fields f", TEST_INDEX_DATE, date, mode)); + JSONObject result = + executeQuery( + StringUtils.format( + "source=%s | eval f = week(date('%s'), %d) | fields f", + TEST_INDEX_DATE, date, mode)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(expectedResult)); } @Test public void testWeek() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = week(date('2008-02-20')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = week(date('2008-02-20')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(7)); @@ -885,35 +1209,46 @@ public void testWeek() throws IOException { @Test public void testWeek_of_year() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = week_of_year(date('2008-02-20')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = week_of_year(date('2008-02-20')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(7)); } @Test public void testYear() throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = year(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = year(date('2020-09-16')) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(2020)); - result = executeQuery(String.format( - "source=%s | eval f = year('2020-09-16') | fields f", TEST_INDEX_DATE)); + result = + executeQuery( + String.format("source=%s | eval f = year('2020-09-16') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(2020)); } - void verifyDateFormat(String date, String type, String format, String formatted) throws IOException { - JSONObject result = executeQuery(String.format( - "source=%s | eval f = date_format(%s('%s'), '%s') | fields f", - TEST_INDEX_DATE, type, date, format)); + void verifyDateFormat(String date, String type, String format, String formatted) + throws IOException { + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = date_format(%s('%s'), '%s') | fields f", + TEST_INDEX_DATE, type, date, format)); verifySchema(result, schema("f", null, "string")); verifySome(result.getJSONArray("datarows"), rows(formatted)); - result = executeQuery(String.format( - "source=%s | eval f = date_format('%s', '%s') | fields f", - TEST_INDEX_DATE, date, format)); + result = + executeQuery( + String.format( + "source=%s | eval f = date_format('%s', '%s') | fields f", + TEST_INDEX_DATE, date, format)); verifySchema(result, schema("f", null, "string")); verifySome(result.getJSONArray("datarows"), rows(formatted)); } @@ -921,10 +1256,11 @@ void verifyDateFormat(String date, String type, String format, String formatted) @Test public void testDateFormat() throws IOException { String timestamp = "1998-01-31 13:14:15.012345"; - String timestampFormat = "%a %b %c %D %d %e %f %H %h %I %i %j %k %l %M " - + "%m %p %r %S %s %T %% %P"; - String timestampFormatted = "Sat Jan 01 31st 31 31 012345 13 01 01 14 031 13 1 " - + "January 01 PM 01:14:15 PM 15 15 13:14:15 % P"; + String timestampFormat = + "%a %b %c %D %d %e %f %H %h %I %i %j %k %l %M " + "%m %p %r %S %s %T %% %P"; + String timestampFormatted = + "Sat Jan 01 31st 31 31 012345 13 01 01 14 031 13 1 " + + "January 01 PM 01:14:15 PM 15 15 13:14:15 % P"; verifyDateFormat(timestamp, "timestamp", timestampFormat, timestampFormatted); String date = "1998-01-31"; @@ -948,76 +1284,119 @@ public void testDateFormatISO8601() throws IOException { @Test public void testMakeTime() throws IOException { - var result = executeQuery(String.format( - "source=%s | eval f1 = MAKETIME(20, 30, 40), f2 = MAKETIME(20.2, 49.5, 42.100502) | fields f1, f2", TEST_INDEX_DATE)); + var result = + executeQuery( + String.format( + "source=%s | eval f1 = MAKETIME(20, 30, 40), f2 = MAKETIME(20.2, 49.5, 42.100502) |" + + " fields f1, f2", + TEST_INDEX_DATE)); verifySchema(result, schema("f1", null, "time"), schema("f2", null, "time")); verifySome(result.getJSONArray("datarows"), rows("20:30:40", "20:50:42.100502")); } @Test public void testMakeDate() throws IOException { - var result = executeQuery(String.format( - "source=%s | eval f1 = MAKEDATE(1945, 5.9), f2 = MAKEDATE(1984, 1984) | fields f1, f2", TEST_INDEX_DATE)); + var result = + executeQuery( + String.format( + "source=%s | eval f1 = MAKEDATE(1945, 5.9), f2 = MAKEDATE(1984, 1984) | fields f1," + + " f2", + TEST_INDEX_DATE)); verifySchema(result, schema("f1", null, "date"), schema("f2", null, "date")); verifySome(result.getJSONArray("datarows"), rows("1945-01-06", "1989-06-06")); } @Test public void testAddTime() throws IOException { - var result = executeQuery(String.format("source=%s | eval" - + " `'2008-12-12' + 0` = ADDTIME(DATE('2008-12-12'), DATE('2008-11-15'))," - + " `'23:59:59' + 0` = ADDTIME(TIME('23:59:59'), DATE('2004-01-01'))," - + " `'2004-01-01' + '23:59:59'` = ADDTIME(DATE('2004-01-01'), TIME('23:59:59'))," - + " `'10:20:30' + '00:05:42'` = ADDTIME(TIME('10:20:30'), TIME('00:05:42'))," - + " `'15:42:13' + '09:07:00'` = ADDTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00'))" - + " | fields `'2008-12-12' + 0`, `'23:59:59' + 0`, `'2004-01-01' + '23:59:59'`, `'10:20:30' + '00:05:42'`, `'15:42:13' + '09:07:00'`", TEST_INDEX_DATE)); - verifySchema(result, + var result = + executeQuery( + String.format( + "source=%s | eval `'2008-12-12' + 0` = ADDTIME(DATE('2008-12-12')," + + " DATE('2008-11-15')), `'23:59:59' + 0` = ADDTIME(TIME('23:59:59')," + + " DATE('2004-01-01')), `'2004-01-01' + '23:59:59'` =" + + " ADDTIME(DATE('2004-01-01'), TIME('23:59:59')), `'10:20:30' + '00:05:42'` =" + + " ADDTIME(TIME('10:20:30'), TIME('00:05:42')), `'15:42:13' + '09:07:00'` =" + + " ADDTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00'))" + + " | fields `'2008-12-12' + 0`, `'23:59:59' + 0`, `'2004-01-01' + '23:59:59'`," + + " `'10:20:30' + '00:05:42'`, `'15:42:13' + '09:07:00'`", + TEST_INDEX_DATE)); + verifySchema( + result, schema("'2008-12-12' + 0", null, "datetime"), schema("'23:59:59' + 0", null, "time"), schema("'2004-01-01' + '23:59:59'", null, "datetime"), schema("'10:20:30' + '00:05:42'", null, "time"), schema("'15:42:13' + '09:07:00'", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows("2008-12-12 00:00:00", "23:59:59", "2004-01-01 23:59:59", "10:26:12", "2000-01-01 00:49:13")); + verifySome( + result.getJSONArray("datarows"), + rows( + "2008-12-12 00:00:00", + "23:59:59", + "2004-01-01 23:59:59", + "10:26:12", + "2000-01-01 00:49:13")); } @Test public void testSubTime() throws IOException { - var result = executeQuery(String.format("source=%s | eval" - + " `'2008-12-12' - 0` = SUBTIME(DATE('2008-12-12'), DATE('2008-11-15'))," - + " `'23:59:59' - 0` = SUBTIME(TIME('23:59:59'), DATE('2004-01-01'))," - + " `'2004-01-01' - '23:59:59'` = SUBTIME(DATE('2004-01-01'), TIME('23:59:59'))," - + " `'10:20:30' - '00:05:42'` = SUBTIME(TIME('10:20:30'), TIME('00:05:42'))," - + " `'15:42:13' - '09:07:00'` = SUBTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00'))" - + " | fields `'2008-12-12' - 0`, `'23:59:59' - 0`, `'2004-01-01' - '23:59:59'`, `'10:20:30' - '00:05:42'`, `'15:42:13' - '09:07:00'`", TEST_INDEX_DATE)); - verifySchema(result, + var result = + executeQuery( + String.format( + "source=%s | eval `'2008-12-12' - 0` = SUBTIME(DATE('2008-12-12')," + + " DATE('2008-11-15')), `'23:59:59' - 0` = SUBTIME(TIME('23:59:59')," + + " DATE('2004-01-01')), `'2004-01-01' - '23:59:59'` =" + + " SUBTIME(DATE('2004-01-01'), TIME('23:59:59')), `'10:20:30' - '00:05:42'` =" + + " SUBTIME(TIME('10:20:30'), TIME('00:05:42')), `'15:42:13' - '09:07:00'` =" + + " SUBTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00'))" + + " | fields `'2008-12-12' - 0`, `'23:59:59' - 0`, `'2004-01-01' - '23:59:59'`," + + " `'10:20:30' - '00:05:42'`, `'15:42:13' - '09:07:00'`", + TEST_INDEX_DATE)); + verifySchema( + result, schema("'2008-12-12' - 0", null, "datetime"), schema("'23:59:59' - 0", null, "time"), schema("'2004-01-01' - '23:59:59'", null, "datetime"), schema("'10:20:30' - '00:05:42'", null, "time"), schema("'15:42:13' - '09:07:00'", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows("2008-12-12 00:00:00", "23:59:59", "2003-12-31 00:00:01", "10:14:48", "1999-12-31 06:35:13")); + verifySome( + result.getJSONArray("datarows"), + rows( + "2008-12-12 00:00:00", + "23:59:59", + "2003-12-31 00:00:01", + "10:14:48", + "1999-12-31 06:35:13")); } @Test public void testFromUnixTime() throws IOException { - var result = executeQuery(String.format( - "source=%s | eval f1 = FROM_UNIXTIME(200300400), f2 = FROM_UNIXTIME(12224.12), " - + "f3 = FROM_UNIXTIME(1662601316, '%%T') | fields f1, f2, f3", TEST_INDEX_DATE)); - verifySchema(result, + var result = + executeQuery( + String.format( + "source=%s | eval f1 = FROM_UNIXTIME(200300400), f2 = FROM_UNIXTIME(12224.12), " + + "f3 = FROM_UNIXTIME(1662601316, '%%T') | fields f1, f2, f3", + TEST_INDEX_DATE)); + verifySchema( + result, schema("f1", null, "datetime"), schema("f2", null, "datetime"), schema("f3", null, "string")); - verifySome(result.getJSONArray("datarows"), + verifySome( + result.getJSONArray("datarows"), rows("1976-05-07 07:00:00", "1970-01-01 03:23:44.12", "01:41:56")); } @Test public void testUnixTimeStamp() throws IOException { - var result = executeQuery(String.format( - "source=%s | eval f1 = UNIX_TIMESTAMP(MAKEDATE(1984, 1984)), " - + "f2 = UNIX_TIMESTAMP(TIMESTAMP('2003-12-31 12:00:00')), " - + "f3 = UNIX_TIMESTAMP(20771122143845) | fields f1, f2, f3", TEST_INDEX_DATE)); - verifySchema(result, + var result = + executeQuery( + String.format( + "source=%s | eval f1 = UNIX_TIMESTAMP(MAKEDATE(1984, 1984)), " + + "f2 = UNIX_TIMESTAMP(TIMESTAMP('2003-12-31 12:00:00')), " + + "f3 = UNIX_TIMESTAMP(20771122143845) | fields f1, f2, f3", + TEST_INDEX_DATE)); + verifySchema( + result, schema("f1", null, "double"), schema("f2", null, "double"), schema("f3", null, "double")); @@ -1026,28 +1405,43 @@ public void testUnixTimeStamp() throws IOException { @Test public void testPeriodAdd() throws IOException { - var result = executeQuery(String.format( - "source=%s | eval f1 = PERIOD_ADD(200801, 2), f2 = PERIOD_ADD(200801, -12) | fields f1, f2", TEST_INDEX_DATE)); + var result = + executeQuery( + String.format( + "source=%s | eval f1 = PERIOD_ADD(200801, 2), f2 = PERIOD_ADD(200801, -12) | fields" + + " f1, f2", + TEST_INDEX_DATE)); verifySchema(result, schema("f1", null, "integer"), schema("f2", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(200803, 200701)); } @Test public void testPeriodDiff() throws IOException { - var result = executeQuery(String.format( - "source=%s | eval f1 = PERIOD_DIFF(200802, 200703), f2 = PERIOD_DIFF(200802, 201003) | fields f1, f2", TEST_INDEX_DATE)); + var result = + executeQuery( + String.format( + "source=%s | eval f1 = PERIOD_DIFF(200802, 200703), f2 = PERIOD_DIFF(200802," + + " 201003) | fields f1, f2", + TEST_INDEX_DATE)); verifySchema(result, schema("f1", null, "integer"), schema("f2", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(11, -25)); } public void testDateDiff() throws IOException { - var result = executeQuery(String.format("source=%s | eval" - + " `'2000-01-02' - '2000-01-01'` = DATEDIFF(TIMESTAMP('2000-01-02 00:00:00'), TIMESTAMP('2000-01-01 23:59:59'))," - + " `'2001-02-01' - '2004-01-01'` = DATEDIFF(DATE('2001-02-01'), TIMESTAMP('2004-01-01 00:00:00'))," - + " `'2004-01-01' - '2002-02-01'` = DATEDIFF(TIMESTAMP('2004-01-01 00:00:00'), DATETIME('2002-02-01 14:25:30'))," - + " `today - today` = DATEDIFF(TIME('23:59:59'), TIME('00:00:00'))" - + " | fields `'2000-01-02' - '2000-01-01'`, `'2001-02-01' - '2004-01-01'`, `'2004-01-01' - '2002-02-01'`, `today - today`", TEST_INDEX_DATE)); - verifySchema(result, + var result = + executeQuery( + String.format( + "source=%s | eval `'2000-01-02' - '2000-01-01'` = DATEDIFF(TIMESTAMP('2000-01-02" + + " 00:00:00'), TIMESTAMP('2000-01-01 23:59:59')), `'2001-02-01' -" + + " '2004-01-01'` = DATEDIFF(DATE('2001-02-01'), TIMESTAMP('2004-01-01" + + " 00:00:00')), `'2004-01-01' - '2002-02-01'` = DATEDIFF(TIMESTAMP('2004-01-01" + + " 00:00:00'), DATETIME('2002-02-01 14:25:30')), `today - today` =" + + " DATEDIFF(TIME('23:59:59'), TIME('00:00:00')) | fields `'2000-01-02' -" + + " '2000-01-01'`, `'2001-02-01' - '2004-01-01'`, `'2004-01-01' -" + + " '2002-02-01'`, `today - today`", + TEST_INDEX_DATE)); + verifySchema( + result, schema("'2000-01-02' - '2000-01-01'", null, "long"), schema("'2001-02-01' - '2004-01-01'", null, "long"), schema("'2004-01-01' - '2002-02-01'", null, "long"), @@ -1057,90 +1451,124 @@ public void testDateDiff() throws IOException { @Test public void testTimeDiff() throws IOException { - var result = executeQuery(String.format( - "source=%s | eval f = TIMEDIFF('23:59:59', '13:00:00') | fields f", TEST_INDEX_DATE)); + var result = + executeQuery( + String.format( + "source=%s | eval f = TIMEDIFF('23:59:59', '13:00:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "time")); verifySome(result.getJSONArray("datarows"), rows("10:59:59")); } @Test - public void testGetFormat() throws IOException{ - var result = executeQuery(String.format("source=%s | eval f = date_format('2003-10-03', get_format(DATE,'USA')) | fields f", TEST_INDEX_DATE)); + public void testGetFormat() throws IOException { + var result = + executeQuery( + String.format( + "source=%s | eval f = date_format('2003-10-03', get_format(DATE,'USA')) | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "string")); verifySome(result.getJSONArray("datarows"), rows("10.03.2003")); } @Test - public void testLastDay() throws IOException{ - var result = executeQuery(String.format("source=%s | eval f = last_day('2003-10-03') | fields f", TEST_INDEX_DATE)); + public void testLastDay() throws IOException { + var result = + executeQuery( + String.format( + "source=%s | eval f = last_day('2003-10-03') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "date")); verifySome(result.getJSONArray("datarows"), rows("2003-10-31")); } @Test - public void testSecToTime() throws IOException{ - var result = executeQuery(String.format("source=%s | eval f = sec_to_time(123456) | fields f", TEST_INDEX_DATE)); + public void testSecToTime() throws IOException { + var result = + executeQuery( + String.format("source=%s | eval f = sec_to_time(123456) | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "time")); verifySome(result.getJSONArray("datarows"), rows("10:17:36")); } @Test - public void testYearWeek() throws IOException{ - var result = executeQuery(String.format("source=%s | eval f1 = yearweek('2003-10-03') | eval f2 = yearweek('2003-10-03', 3) | fields f1, f2", TEST_INDEX_DATE)); - verifySchema(result, - schema("f1", null, "integer"), - schema("f2", null, "integer")); + public void testYearWeek() throws IOException { + var result = + executeQuery( + String.format( + "source=%s | eval f1 = yearweek('2003-10-03') | eval f2 = yearweek('2003-10-03', 3)" + + " | fields f1, f2", + TEST_INDEX_DATE)); + verifySchema(result, schema("f1", null, "integer"), schema("f2", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(200339, 200340)); } @Test - public void testWeekDay() throws IOException{ - var result = executeQuery(String.format("source=%s | eval f = weekday('2003-10-03') | fields f", TEST_INDEX_DATE)); + public void testWeekDay() throws IOException { + var result = + executeQuery( + String.format( + "source=%s | eval f = weekday('2003-10-03') | fields f", TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "integer")); verifySome(result.getJSONArray("datarows"), rows(4)); } @Test - public void testToSeconds() throws IOException{ - var result = executeQuery(String.format("source=%s | eval f1 = to_seconds(date('2008-10-07')) | " + - "eval f2 = to_seconds('2020-09-16 07:40:00') | " + - "eval f3 = to_seconds(DATETIME('2020-09-16 07:40:00')) | fields f1, f2, f3", TEST_INDEX_DATE)); - verifySchema(result, - schema("f1", null, "long"), - schema("f2", null, "long"), - schema("f3", null, "long")); + public void testToSeconds() throws IOException { + var result = + executeQuery( + String.format( + "source=%s | eval f1 = to_seconds(date('2008-10-07')) | " + + "eval f2 = to_seconds('2020-09-16 07:40:00') | " + + "eval f3 = to_seconds(DATETIME('2020-09-16 07:40:00')) | fields f1, f2, f3", + TEST_INDEX_DATE)); + verifySchema( + result, schema("f1", null, "long"), schema("f2", null, "long"), schema("f3", null, "long")); verifySome(result.getJSONArray("datarows"), rows(63390556800L, 63767461200L, 63767461200L)); } @Test - public void testStrToDate() throws IOException{ - var result = executeQuery(String.format("source=%s | eval f = str_to_date('01,5,2013', '%s') | fields f", TEST_INDEX_DATE, "%d,%m,%Y")); + public void testStrToDate() throws IOException { + var result = + executeQuery( + String.format( + "source=%s | eval f = str_to_date('01,5,2013', '%s') | fields f", + TEST_INDEX_DATE, "%d,%m,%Y")); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2013-05-01 00:00:00")); } @Test - public void testTimeStampAdd() throws IOException{ - var result = executeQuery(String.format("source=%s | eval f = timestampadd(YEAR, 15, '2001-03-06 00:00:00') | fields f", TEST_INDEX_DATE)); + public void testTimeStampAdd() throws IOException { + var result = + executeQuery( + String.format( + "source=%s | eval f = timestampadd(YEAR, 15, '2001-03-06 00:00:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2016-03-06 00:00:00")); } @Test - public void testTimestampDiff() throws IOException{ - var result = executeQuery(String.format("source=%s | eval f = timestampdiff(YEAR, '1997-01-01 00:00:00', '2001-03-06 00:00:00') | fields f", TEST_INDEX_DATE)); + public void testTimestampDiff() throws IOException { + var result = + executeQuery( + String.format( + "source=%s | eval f = timestampdiff(YEAR, '1997-01-01 00:00:00', '2001-03-06" + + " 00:00:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows(4)); } @Test - public void testExtract() throws IOException{ - var result = executeQuery(String.format("source=%s | eval f1 = extract(YEAR FROM '1997-01-01 00:00:00') | eval f2 = extract(MINUTE FROM time('10:17:36')) | fields f1, f2", TEST_INDEX_DATE)); - verifySchema(result, - schema("f1", null, "long"), - schema("f2", null, "long")); + public void testExtract() throws IOException { + var result = + executeQuery( + String.format( + "source=%s | eval f1 = extract(YEAR FROM '1997-01-01 00:00:00') | eval f2 =" + + " extract(MINUTE FROM time('10:17:36')) | fields f1, f2", + TEST_INDEX_DATE)); + verifySchema(result, schema("f1", null, "long"), schema("f2", null, "long")); verifySome(result.getJSONArray("datarows"), rows(1997L, 17L)); } - - } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeImplementationIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeImplementationIT.java index 158f25aadf..dd86470a39 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeImplementationIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/DateTimeImplementationIT.java @@ -5,145 +5,175 @@ package org.opensearch.sql.ppl; -import org.json.JSONObject; -import org.junit.Test; - -import java.io.IOException; - import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_DATE; import static org.opensearch.sql.util.MatcherUtils.rows; import static org.opensearch.sql.util.MatcherUtils.schema; -import static org.opensearch.sql.util.MatcherUtils.verifyDataRows; import static org.opensearch.sql.util.MatcherUtils.verifySchema; import static org.opensearch.sql.util.MatcherUtils.verifySome; -public class DateTimeImplementationIT extends PPLIntegTestCase { +import java.io.IOException; +import org.json.JSONObject; +import org.junit.Test; +public class DateTimeImplementationIT extends PPLIntegTestCase { @Override public void init() throws IOException { loadIndex(Index.DATE); } - @Test public void inRangeZeroToStringTZ() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', 'America/Los_Angeles') | fields f", - TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', 'America/Los_Angeles')" + + " | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2008-12-24 21:30:00")); } @Test public void inRangeZeroToPositive() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', '+01:00') | fields f", - TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', '+01:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2008-12-25 06:30:00")); } @Test public void inRangeNegativeToPositive() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval f = DATETIME('2008-12-25 05:30:00-05:00', '+05:00') | fields f", - TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-12-25 05:30:00-05:00', '+05:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2008-12-25 15:30:00")); } @Test public void inRangeTwentyHourOffset() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval f = DATETIME('2004-02-28 23:00:00-10:00', '+10:00') | fields f", - TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2004-02-28 23:00:00-10:00', '+10:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2004-02-29 19:00:00")); } - @Test public void inRangeYearChange() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00', '-10:00') | fields f", - TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00', '-10:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2007-12-31 06:00:00")); } @Test public void inRangeZeroToMax() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', '+14:00') | fields f", - TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-12-25 05:30:00+00:00', '+14:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2008-12-25 19:30:00")); } @Test public void inRangeNoToTZ() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00') | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2008-01-01 02:00:00")); } @Test public void inRangeNoTZ() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval f = DATETIME('2008-01-01 02:00:00') | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-01-01 02:00:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); verifySome(result.getJSONArray("datarows"), rows("2008-01-01 02:00:00")); } @Test public void nullField3Over() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval f = DATETIME('2008-01-01 02:00:00+15:00', '-12:00') | fields f", - TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-01-01 02:00:00+15:00', '-12:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @Test public void nullField2Under() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00', '-14:00') | fields f", - TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-01-01 02:00:00+10:00', '-14:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @Test public void nullTField3Over() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval f = DATETIME('2008-01-01 02:00:00', '+15:00') | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2008-01-01 02:00:00', '+15:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @Test public void nullDateTimeInvalidDateValueFebruary() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval f = DATETIME('2021-02-30 10:00:00') | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2021-02-30 10:00:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @Test public void nullDateTimeInvalidDateValueApril() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval f = DATETIME('2021-04-31 10:00:00') | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2021-04-31 10:00:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } @Test public void nullDateTimeInvalidDateValueMonth() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | eval f = DATETIME('2021-13-03 10:00:00') | fields f", TEST_INDEX_DATE)); + JSONObject result = + executeQuery( + String.format( + "source=%s | eval f = DATETIME('2021-13-03 10:00:00') | fields f", + TEST_INDEX_DATE)); verifySchema(result, schema("f", null, "datetime")); - verifySome(result.getJSONArray("datarows"), rows(new Object[]{null})); + verifySome(result.getJSONArray("datarows"), rows(new Object[] {null})); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/DedupCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/DedupCommandIT.java index bd4fadb57f..7a6cf16bb4 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/DedupCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/DedupCommandIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/DescribeCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/DescribeCommandIT.java index 23bea69a52..aee32e08d1 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/DescribeCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/DescribeCommandIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_DOG; @@ -52,19 +51,17 @@ public void testDescribeAllFields() throws IOException { columnName("SCOPE_TABLE"), columnName("SOURCE_DATA_TYPE"), columnName("IS_AUTOINCREMENT"), - columnName("IS_GENERATEDCOLUMN") - ); + columnName("IS_GENERATEDCOLUMN")); } @Test public void testDescribeFilterFields() throws IOException { - JSONObject result = executeQuery(String.format("describe %s | fields TABLE_NAME, COLUMN_NAME, TYPE_NAME", TEST_INDEX_DOG)); + JSONObject result = + executeQuery( + String.format( + "describe %s | fields TABLE_NAME, COLUMN_NAME, TYPE_NAME", TEST_INDEX_DOG)); verifyColumn( - result, - columnName("TABLE_NAME"), - columnName("COLUMN_NAME"), - columnName("TYPE_NAME") - ); + result, columnName("TABLE_NAME"), columnName("COLUMN_NAME"), columnName("TYPE_NAME")); } @Test diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/ExplainIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/ExplainIT.java index 1a785e9074..fce975ef92 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/ExplainIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/ExplainIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.util.MatcherUtils.assertJsonEquals; @@ -35,8 +34,7 @@ public void testExplain() throws Exception { + "| fields - city " + "| eval age2 = avg_age + 2 " + "| dedup age2 " - + "| fields age2") - ); + + "| fields age2")); } @Test @@ -50,8 +48,7 @@ public void testFilterPushDownExplain() throws Exception { + "| where age > 30 " + "| where age < 40 " + "| where balance > 10000 " - + "| fields age") - ); + + "| fields age")); } @Test @@ -63,8 +60,7 @@ public void testFilterAndAggPushDownExplain() throws Exception { explainQueryToString( "source=opensearch-sql_test_index_account" + "| where age > 30 " - + "| stats avg(age) AS avg_age by state, city") - ); + + "| stats avg(age) AS avg_age by state, city")); } @Test @@ -77,8 +73,7 @@ public void testSortPushDownExplain() throws Exception { "source=opensearch-sql_test_index_account" + "| sort age " + "| where age > 30" - + "| fields age") - ); + + "| fields age")); } String loadFromFile(String filename) throws Exception { diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/FieldsCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/FieldsCommandIT.java index 4eb99e8b04..e8a287c80e 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/FieldsCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/FieldsCommandIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT; @@ -43,7 +42,9 @@ public void testFieldsWithMultiFields() throws IOException { verifyColumn(result, columnName("firstname"), columnName("lastname")); } - @Ignore("Cannot resolve wildcard yet. Enable once https://github.com/opensearch-project/sql/issues/787 is resolved.") + @Ignore( + "Cannot resolve wildcard yet. Enable once" + + " https://github.com/opensearch-project/sql/issues/787 is resolved.") @Test public void testFieldsWildCard() throws IOException { JSONObject result = @@ -57,14 +58,14 @@ public void testSelectDateTypeField() throws IOException { executeQuery(String.format("source=%s | fields birthdate", TEST_INDEX_BANK)); verifySchema(result, schema("birthdate", null, "timestamp")); - verifyDataRows(result, + verifyDataRows( + result, rows("2017-10-23 00:00:00"), rows("2017-11-20 00:00:00"), rows("2018-06-23 00:00:00"), rows("2018-11-13 23:33:20"), rows("2018-06-27 00:00:00"), rows("2018-08-19 00:00:00"), - rows("2018-08-11 00:00:00") - ); + rows("2018-08-11 00:00:00")); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/HeadCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/HeadCommandIT.java index 48c489ce10..8a96620fe0 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/HeadCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/HeadCommandIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT; @@ -39,7 +38,8 @@ public void init() throws IOException { public void testHead() throws IOException { JSONObject result = executeQuery(String.format("source=%s | fields firstname, age | head", TEST_INDEX_ACCOUNT)); - verifyDataRows(result, + verifyDataRows( + result, rows("Amber", 32), rows("Hattie", 36), rows("Nanette", 28), @@ -55,11 +55,9 @@ public void testHead() throws IOException { @Test public void testHeadWithNumber() throws IOException { JSONObject result = - executeQuery(String.format("source=%s | fields firstname, age | head 3", TEST_INDEX_ACCOUNT)); - verifyDataRows(result, - rows("Amber", 32), - rows("Hattie", 36), - rows("Nanette", 28)); + executeQuery( + String.format("source=%s | fields firstname, age | head 3", TEST_INDEX_ACCOUNT)); + verifyDataRows(result, rows("Amber", 32), rows("Hattie", 36), rows("Nanette", 28)); } @Ignore("Fix https://github.com/opensearch-project/sql/issues/703#issuecomment-1211422130") @@ -67,9 +65,10 @@ public void testHeadWithNumber() throws IOException { public void testHeadWithNumberLargerThanQuerySizeLimit() throws IOException { setQuerySizeLimit(5); JSONObject result = - executeQuery(String.format( - "source=%s | fields firstname, age | head 10", TEST_INDEX_ACCOUNT)); - verifyDataRows(result, + executeQuery( + String.format("source=%s | fields firstname, age | head 10", TEST_INDEX_ACCOUNT)); + verifyDataRows( + result, rows("Amber", 32), rows("Hattie", 36), rows("Nanette", 28), @@ -86,9 +85,10 @@ public void testHeadWithNumberLargerThanQuerySizeLimit() throws IOException { public void testHeadWithNumberLargerThanMaxResultWindow() throws IOException { setMaxResultWindow(TEST_INDEX_ACCOUNT, 10); JSONObject result = - executeQuery(String.format( - "source=%s | fields firstname, age | head 15", TEST_INDEX_ACCOUNT)); - verifyDataRows(result, + executeQuery( + String.format("source=%s | fields firstname, age | head 15", TEST_INDEX_ACCOUNT)); + verifyDataRows( + result, rows("Amber", 32), rows("Hattie", 36), rows("Nanette", 28), @@ -112,9 +112,10 @@ public void testHeadWithLargeNumber() throws IOException { setQuerySizeLimit(5); setMaxResultWindow(TEST_INDEX_ACCOUNT, 10); JSONObject result = - executeQuery(String.format( - "source=%s | fields firstname, age | head 15", TEST_INDEX_ACCOUNT)); - verifyDataRows(result, + executeQuery( + String.format("source=%s | fields firstname, age | head 15", TEST_INDEX_ACCOUNT)); + verifyDataRows( + result, rows("Amber", 32), rows("Hattie", 36), rows("Nanette", 28), @@ -135,10 +136,8 @@ public void testHeadWithLargeNumber() throws IOException { @Test public void testHeadWithNumberAndFrom() throws IOException { JSONObject result = - executeQuery(String.format("source=%s | fields firstname, age | head 3 from 4", TEST_INDEX_ACCOUNT)); - verifyDataRows(result, - rows("Elinor", 36), - rows("Virginia", 39), - rows("Dillard", 34)); + executeQuery( + String.format("source=%s | fields firstname, age | head 3 from 4", TEST_INDEX_ACCOUNT)); + verifyDataRows(result, rows("Elinor", 36), rows("Virginia", 39), rows("Dillard", 34)); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/InformationSchemaCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/InformationSchemaCommandIT.java index 448bebd377..cf7cfcdb39 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/InformationSchemaCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/InformationSchemaCommandIT.java @@ -7,7 +7,6 @@ package org.opensearch.sql.ppl; -import static org.opensearch.sql.legacy.TestUtils.getResponseBody; import static org.opensearch.sql.util.MatcherUtils.columnName; import static org.opensearch.sql.util.MatcherUtils.rows; import static org.opensearch.sql.util.MatcherUtils.verifyColumn; @@ -18,12 +17,8 @@ import java.io.IOException; import org.json.JSONObject; import org.junit.After; -import org.junit.AfterClass; import org.junit.Assert; -import org.junit.Before; import org.junit.BeforeClass; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import org.opensearch.client.Request; import org.opensearch.client.Response; @@ -33,10 +28,10 @@ public class InformationSchemaCommandIT extends PPLIntegTestCase { /** - * Integ tests are dependent on self generated metrics in prometheus instance. - * When running individual integ tests there - * is no time for generation of metrics in the test prometheus instance. - * This method gives prometheus time to generate metrics on itself. + * Integ tests are dependent on self generated metrics in prometheus instance. When running + * individual integ tests there is no time for generation of metrics in the test prometheus + * instance. This method gives prometheus time to generate metrics on itself. + * * @throws InterruptedException */ @BeforeClass @@ -47,8 +42,11 @@ protected static void metricGenerationWait() throws InterruptedException { @Override protected void init() throws InterruptedException, IOException { DataSourceMetadata createDSM = - new DataSourceMetadata("my_prometheus", DataSourceType.PROMETHEUS, - ImmutableList.of(), ImmutableMap.of("prometheus.uri", "http://localhost:9090")); + new DataSourceMetadata( + "my_prometheus", + DataSourceType.PROMETHEUS, + ImmutableList.of(), + ImmutableMap.of("prometheus.uri", "http://localhost:9090")); Request createRequest = getCreateDataSourceRequest(createDSM); Response response = client().performRequest(createRequest); Assert.assertEquals(201, response.getStatusLine().getStatusCode()); @@ -64,8 +62,9 @@ protected void deleteDataSourceMetadata() throws IOException { @Test public void testSearchTablesFromPrometheusCatalog() throws IOException { JSONObject result = - executeQuery("source=my_prometheus.information_schema.tables " - + "| where LIKE(TABLE_NAME, '%http%')"); + executeQuery( + "source=my_prometheus.information_schema.tables " + + "| where LIKE(TABLE_NAME, '%http%')"); this.logger.error(result.toString()); verifyColumn( result, @@ -74,24 +73,53 @@ public void testSearchTablesFromPrometheusCatalog() throws IOException { columnName("TABLE_NAME"), columnName("TABLE_TYPE"), columnName("UNIT"), - columnName("REMARKS") - ); - verifyDataRows(result, - rows("my_prometheus", "default", "promhttp_metric_handler_requests_in_flight", - "gauge", "", "Current number of scrapes being served."), - rows("my_prometheus", "default", "prometheus_sd_http_failures_total", - "counter", "", "Number of HTTP service discovery refresh failures."), - rows("my_prometheus", "default", "promhttp_metric_handler_requests_total", - "counter", "", "Total number of scrapes by HTTP status code."), - rows("my_prometheus", "default", "prometheus_http_request_duration_seconds", - "histogram", "", "Histogram of latencies for HTTP requests."), - rows("my_prometheus", "default", "prometheus_http_requests_total", - "counter", "", "Counter of HTTP requests."), - rows("my_prometheus", "default", "prometheus_http_response_size_bytes", - "histogram", "", "Histogram of response size for HTTP requests.")); + columnName("REMARKS")); + verifyDataRows( + result, + rows( + "my_prometheus", + "default", + "promhttp_metric_handler_requests_in_flight", + "gauge", + "", + "Current number of scrapes being served."), + rows( + "my_prometheus", + "default", + "prometheus_sd_http_failures_total", + "counter", + "", + "Number of HTTP service discovery refresh failures."), + rows( + "my_prometheus", + "default", + "promhttp_metric_handler_requests_total", + "counter", + "", + "Total number of scrapes by HTTP status code."), + rows( + "my_prometheus", + "default", + "prometheus_http_request_duration_seconds", + "histogram", + "", + "Histogram of latencies for HTTP requests."), + rows( + "my_prometheus", + "default", + "prometheus_http_requests_total", + "counter", + "", + "Counter of HTTP requests."), + rows( + "my_prometheus", + "default", + "prometheus_http_response_size_bytes", + "histogram", + "", + "Histogram of response size for HTTP requests.")); } - @Test public void testTablesFromPrometheusCatalog() throws IOException { JSONObject result = @@ -106,15 +134,18 @@ public void testTablesFromPrometheusCatalog() throws IOException { columnName("TABLE_NAME"), columnName("TABLE_TYPE"), columnName("UNIT"), - columnName("REMARKS") - ); - verifyDataRows(result, - rows("my_prometheus", - "default", "prometheus_http_requests_total", - "counter", "", "Counter of HTTP requests.")); + columnName("REMARKS")); + verifyDataRows( + result, + rows( + "my_prometheus", + "default", + "prometheus_http_requests_total", + "counter", + "", + "Counter of HTTP requests.")); } - // Moved this IT from DescribeCommandIT to segregate Datasource Integ Tests. @Test public void testDescribeCommandWithPrometheusCatalog() throws IOException { @@ -125,16 +156,19 @@ public void testDescribeCommandWithPrometheusCatalog() throws IOException { columnName("TABLE_SCHEMA"), columnName("TABLE_NAME"), columnName("COLUMN_NAME"), - columnName("DATA_TYPE") - ); - verifyDataRows(result, + columnName("DATA_TYPE")); + verifyDataRows( + result, rows("my_prometheus", "default", "prometheus_http_requests_total", "handler", "keyword"), rows("my_prometheus", "default", "prometheus_http_requests_total", "code", "keyword"), rows("my_prometheus", "default", "prometheus_http_requests_total", "instance", "keyword"), rows("my_prometheus", "default", "prometheus_http_requests_total", "@value", "double"), - rows("my_prometheus", "default", "prometheus_http_requests_total", "@timestamp", + rows( + "my_prometheus", + "default", + "prometheus_http_requests_total", + "@timestamp", "timestamp"), rows("my_prometheus", "default", "prometheus_http_requests_total", "job", "keyword")); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/LegacyAPICompatibilityIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/LegacyAPICompatibilityIT.java index 4bf9a37a9f..c14b9baa35 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/LegacyAPICompatibilityIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/LegacyAPICompatibilityIT.java @@ -16,9 +16,7 @@ import org.opensearch.client.RequestOptions; import org.opensearch.client.Response; -/** - * For backward compatibility, check if legacy API endpoints are accessible. - */ +/** For backward compatibility, check if legacy API endpoints are accessible. */ public class LegacyAPICompatibilityIT extends PPLIntegTestCase { @Override @@ -51,22 +49,20 @@ public void stats() throws IOException { @Test public void legacySettingNewEndpoint() throws IOException { - String requestBody = "{" - + " \"persistent\": {" - + " \"opendistro.ppl.query.memory_limit\": \"80%\"" - + " }" - + "}"; + String requestBody = + "{" + + " \"persistent\": {" + + " \"opendistro.ppl.query.memory_limit\": \"80%\"" + + " }" + + "}"; Response response = updateSetting(SETTINGS_API_ENDPOINT, requestBody); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); } @Test public void newSettingNewEndpoint() throws IOException { - String requestBody = "{" - + " \"persistent\": {" - + " \"plugins.query.size_limit\": \"100\"" - + " }" - + "}"; + String requestBody = + "{" + " \"persistent\": {" + " \"plugins.query.size_limit\": \"100\"" + " }" + "}"; Response response = updateSetting(SETTINGS_API_ENDPOINT, requestBody); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); } @@ -83,5 +79,4 @@ private RequestOptions.Builder buildJsonOption() { restOptionsBuilder.addHeader("Content-Type", "application/json"); return restOptionsBuilder; } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/LikeQueryIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/LikeQueryIT.java index 67ad553689..75dd6aa268 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/LikeQueryIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/LikeQueryIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_WILDCARD; @@ -23,9 +22,13 @@ public void init() throws IOException { @Test public void test_like_with_percent() throws IOException { - String query = "source=" + TEST_INDEX_WILDCARD + " | WHERE Like(KeywordBody, 'test wildcard%') | fields KeywordBody"; + String query = + "source=" + + TEST_INDEX_WILDCARD + + " | WHERE Like(KeywordBody, 'test wildcard%') | fields KeywordBody"; JSONObject result = executeQuery(query); - verifyDataRows(result, + verifyDataRows( + result, rows("test wildcard"), rows("test wildcard in the end of the text%"), rows("test wildcard in % the middle of the text"), @@ -37,51 +40,66 @@ public void test_like_with_percent() throws IOException { @Test public void test_like_with_escaped_percent() throws IOException { - String query = "source=" + TEST_INDEX_WILDCARD + " | WHERE Like(KeywordBody, '\\\\%test wildcard%') | fields KeywordBody"; + String query = + "source=" + + TEST_INDEX_WILDCARD + + " | WHERE Like(KeywordBody, '\\\\%test wildcard%') | fields KeywordBody"; JSONObject result = executeQuery(query); - verifyDataRows(result, - rows("%test wildcard in the beginning of the text")); + verifyDataRows(result, rows("%test wildcard in the beginning of the text")); } @Test public void test_like_in_where_with_escaped_underscore() throws IOException { - String query = "source=" + TEST_INDEX_WILDCARD + " | WHERE Like(KeywordBody, '\\\\_test wildcard%') | fields KeywordBody"; + String query = + "source=" + + TEST_INDEX_WILDCARD + + " | WHERE Like(KeywordBody, '\\\\_test wildcard%') | fields KeywordBody"; JSONObject result = executeQuery(query); - verifyDataRows(result, - rows("_test wildcard in the beginning of the text")); + verifyDataRows(result, rows("_test wildcard in the beginning of the text")); } @Test public void test_like_on_text_field_with_one_word() throws IOException { - String query = "source=" + TEST_INDEX_WILDCARD + " | WHERE Like(TextBody, 'test*') | fields TextBody"; + String query = + "source=" + TEST_INDEX_WILDCARD + " | WHERE Like(TextBody, 'test*') | fields TextBody"; JSONObject result = executeQuery(query); assertEquals(9, result.getInt("total")); } @Test public void test_like_on_text_keyword_field_with_one_word() throws IOException { - String query = "source=" + TEST_INDEX_WILDCARD + " | WHERE Like(TextKeywordBody, 'test*') | fields TextKeywordBody"; + String query = + "source=" + + TEST_INDEX_WILDCARD + + " | WHERE Like(TextKeywordBody, 'test*') | fields TextKeywordBody"; JSONObject result = executeQuery(query); assertEquals(8, result.getInt("total")); } @Test public void test_like_on_text_keyword_field_with_greater_than_one_word() throws IOException { - String query = "source=" + TEST_INDEX_WILDCARD + " | WHERE Like(TextKeywordBody, 'test wild*') | fields TextKeywordBody"; + String query = + "source=" + + TEST_INDEX_WILDCARD + + " | WHERE Like(TextKeywordBody, 'test wild*') | fields TextKeywordBody"; JSONObject result = executeQuery(query); assertEquals(7, result.getInt("total")); } @Test public void test_like_on_text_field_with_greater_than_one_word() throws IOException { - String query = "source=" + TEST_INDEX_WILDCARD + " | WHERE Like(TextBody, 'test wild*') | fields TextBody"; + String query = + "source=" + TEST_INDEX_WILDCARD + " | WHERE Like(TextBody, 'test wild*') | fields TextBody"; JSONObject result = executeQuery(query); assertEquals(0, result.getInt("total")); } @Test public void test_convert_field_text_to_keyword() throws IOException { - String query = "source=" + TEST_INDEX_WILDCARD + " | WHERE Like(TextKeywordBody, '*') | fields TextKeywordBody"; + String query = + "source=" + + TEST_INDEX_WILDCARD + + " | WHERE Like(TextKeywordBody, '*') | fields TextKeywordBody"; String result = explainQueryToString(query); assertTrue(result.contains("TextKeywordBody.keyword")); } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/MatchBoolPrefixIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/MatchBoolPrefixIT.java index 42ba8bea53..67e6fac04d 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/MatchBoolPrefixIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/MatchBoolPrefixIT.java @@ -28,9 +28,7 @@ public void valid_query_match_test() throws IOException { "source=%s | where match_bool_prefix(phrase, 'qui') | fields phrase", TEST_INDEX_PHRASE)); - verifyDataRows(result, - rows("quick fox"), - rows("quick fox here")); + verifyDataRows(result, rows("quick fox"), rows("quick fox here")); } @Test @@ -38,12 +36,11 @@ public void optional_parameter_match_test() throws IOException { JSONObject result = executeQuery( String.format( - "source=%s | where match_bool_prefix(phrase, '2 tes', minimum_should_match=1, fuzziness=2) | fields phrase", + "source=%s | where match_bool_prefix(phrase, '2 tes', minimum_should_match=1," + + " fuzziness=2) | fields phrase", TEST_INDEX_PHRASE)); - verifyDataRows(result, - rows("my test"), - rows("my test 2")); + verifyDataRows(result, rows("my test"), rows("my test 2")); } @Test diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/MatchIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/MatchIT.java index 808be2334d..908f7a621c 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/MatchIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/MatchIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/MatchPhraseIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/MatchPhraseIT.java index 5b9fd07e31..5efc2108b9 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/MatchPhraseIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/MatchPhraseIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_PHRASE; @@ -12,8 +11,6 @@ import java.io.IOException; import org.json.JSONObject; -import org.junit.Ignore; -import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; public class MatchPhraseIT extends PPLIntegTestCase { @@ -26,18 +23,20 @@ public void init() throws IOException { @Test public void test_match_phrase_function() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | where match_phrase(phrase, 'quick fox') | fields phrase", TEST_INDEX_PHRASE)); + executeQuery( + String.format( + "source=%s | where match_phrase(phrase, 'quick fox') | fields phrase", + TEST_INDEX_PHRASE)); verifyDataRows(result, rows("quick fox"), rows("quick fox here")); } @Test public void test_match_phrase_with_slop() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | where match_phrase(phrase, 'brown fox', slop = 2) | fields phrase", TEST_INDEX_PHRASE)); + executeQuery( + String.format( + "source=%s | where match_phrase(phrase, 'brown fox', slop = 2) | fields phrase", + TEST_INDEX_PHRASE)); verifyDataRows(result, rows("brown fox"), rows("fox brown")); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/MatchPhrasePrefixIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/MatchPhrasePrefixIT.java index 0f827692a5..e9e9aa5cae 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/MatchPhrasePrefixIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/MatchPhrasePrefixIT.java @@ -24,46 +24,48 @@ public void init() throws IOException { public void required_parameters() throws IOException { String query = "source = %s | WHERE match_phrase_prefix(Title, 'champagne be') | fields Title"; JSONObject result = executeQuery(String.format(query, TEST_INDEX_BEER)); - verifyDataRows(result, + verifyDataRows( + result, rows("Can old flat champagne be used for vinegar?"), rows("Elder flower champagne best to use natural yeast or add a wine yeast?")); } - @Test public void all_optional_parameters() throws IOException { // The values for optional parameters are valid but arbitrary. - String query = "source = %s " + - "| WHERE match_phrase_prefix(Title, 'flat champ', boost = 1.0, " + - "zero_terms_query='ALL', max_expansions = 2, analyzer=standard, slop=0) " + - "| fields Title"; + String query = + "source = %s " + + "| WHERE match_phrase_prefix(Title, 'flat champ', boost = 1.0, " + + "zero_terms_query='ALL', max_expansions = 2, analyzer=standard, slop=0) " + + "| fields Title"; JSONObject result = executeQuery(String.format(query, TEST_INDEX_BEER)); verifyDataRows(result, rows("Can old flat champagne be used for vinegar?")); } - @Test public void max_expansions_is_3() throws IOException { // max_expansions applies to the last term in the query -- 'bottl' // It tells OpenSearch to consider only the first 3 terms that start with 'bottl' // In this dataset these are 'bottle-conditioning', 'bottling', 'bottles'. - String query = "source = %s " + - "| WHERE match_phrase_prefix(Tags, 'draught bottl', max_expansions=3) | fields Tags"; + String query = + "source = %s " + + "| WHERE match_phrase_prefix(Tags, 'draught bottl', max_expansions=3) | fields Tags"; JSONObject result = executeQuery(String.format(query, TEST_INDEX_BEER)); - verifyDataRows(result, rows("brewing draught bottling"), - rows("draught bottles")); + verifyDataRows(result, rows("brewing draught bottling"), rows("draught bottles")); } @Test public void analyzer_english() throws IOException { // English analyzer removes 'in' and 'to' as they are common words. // This results in an empty query. - String query = "source = %s " + - "| WHERE match_phrase_prefix(Title, 'in to', analyzer=english)" + - "| fields Title"; + String query = + "source = %s " + + "| WHERE match_phrase_prefix(Title, 'in to', analyzer=english)" + + "| fields Title"; JSONObject result = executeQuery(String.format(query, TEST_INDEX_BEER)); - assertTrue("Expect English analyzer to filter out common words 'in' and 'to'", + assertTrue( + "Expect English analyzer to filter out common words 'in' and 'to'", result.getInt("total") == 0); } @@ -71,9 +73,10 @@ public void analyzer_english() throws IOException { public void analyzer_standard() throws IOException { // Standard analyzer does not treat 'in' and 'to' as special terms. // This results in 'to' being used as a phrase prefix given us 'Tokyo'. - String query = "source = %s " + - "| WHERE match_phrase_prefix(Title, 'in to', analyzer=standard)" + - "| fields Title"; + String query = + "source = %s " + + "| WHERE match_phrase_prefix(Title, 'in to', analyzer=standard)" + + "| fields Title"; JSONObject result = executeQuery(String.format(query, TEST_INDEX_BEER)); verifyDataRows(result, rows("Local microbreweries and craft beer in Tokyo")); } @@ -83,21 +86,19 @@ public void zero_term_query_all() throws IOException { // English analyzer removes 'in' and 'to' as they are common words. // zero_terms_query of 'ALL' causes all rows to be returned. // ORDER BY ... LIMIT helps make the test understandable. - String query = "source = %s" + - "| WHERE match_phrase_prefix(Title, 'in to', analyzer=english, zero_terms_query='ALL') " + - "| sort -Title | head 1 | fields Title"; + String query = + "source = %s| WHERE match_phrase_prefix(Title, 'in to', analyzer=english," + + " zero_terms_query='ALL') | sort -Title | head 1 | fields Title"; JSONObject result = executeQuery(String.format(query, TEST_INDEX_BEER)); verifyDataRows(result, rows("was working great, now all foam")); } - @Test public void slop_is_2() throws IOException { // When slop is 2, the terms are matched exactly in the order specified. // 'open' is used to match prefix of the next term. - String query = "source = %s" + - "| where match_phrase_prefix(Tags, 'gas ta', slop=2) " + - "| fields Tags"; + String query = + "source = %s" + "| where match_phrase_prefix(Tags, 'gas ta', slop=2) " + "| fields Tags"; JSONObject result = executeQuery(String.format(query, TEST_INDEX_BEER)); verifyDataRows(result, rows("taste gas")); } @@ -105,12 +106,9 @@ public void slop_is_2() throws IOException { @Test public void slop_is_3() throws IOException { // When slop is 3, results will include phrases where the query terms are transposed. - String query = "source = %s" + - "| where match_phrase_prefix(Tags, 'gas ta', slop=3)" + - "| fields Tags"; + String query = + "source = %s" + "| where match_phrase_prefix(Tags, 'gas ta', slop=3)" + "| fields Tags"; JSONObject result = executeQuery(String.format(query, TEST_INDEX_BEER)); - verifyDataRows(result, - rows("taste draught gas"), - rows("taste gas")); + verifyDataRows(result, rows("taste draught gas"), rows("taste gas")); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/MathematicalFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/MathematicalFunctionIT.java index 6dd2d3916f..2d6a52c12b 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/MathematicalFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/MathematicalFunctionIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; @@ -31,138 +30,146 @@ public void init() throws IOException { @Test public void testAbs() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = abs(age) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = abs(age) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "integer")); - verifyDataRows( - result, - rows(32), rows(36), rows(28), rows(33), rows(36), rows(39), rows(34)); + verifyDataRows(result, rows(32), rows(36), rows(28), rows(33), rows(36), rows(39), rows(34)); } @Test public void testCeil() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = ceil(age) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = ceil(age) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "long")); - verifyDataRows( - result, - rows(32), rows(36), rows(28), rows(33), rows(36), rows(39), rows(34)); + verifyDataRows(result, rows(32), rows(36), rows(28), rows(33), rows(36), rows(39), rows(34)); } @Test public void testCeiling() throws IOException { JSONObject result = executeQuery( - String.format( - "source=%s | eval f = ceiling(age) | fields f", TEST_INDEX_BANK)); + String.format("source=%s | eval f = ceiling(age) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "long")); - verifyDataRows( - result, - rows(32), rows(36), rows(28), rows(33), rows(36), rows(39), rows(34)); + verifyDataRows(result, rows(32), rows(36), rows(28), rows(33), rows(36), rows(39), rows(34)); } @Test public void testE() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = e() | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = e() | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifyDataRows( - result, rows(Math.E), rows(Math.E), rows(Math.E), rows(Math.E), - rows(Math.E), rows(Math.E), rows(Math.E)); + result, + rows(Math.E), + rows(Math.E), + rows(Math.E), + rows(Math.E), + rows(Math.E), + rows(Math.E), + rows(Math.E)); } @Test public void testExp() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = exp(age) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = exp(age) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifyDataRows( - result, rows(Math.exp(32)), rows(Math.exp(36)), rows(Math.exp(28)), rows(Math.exp(33)), - rows(Math.exp(36)), rows(Math.exp(39)), rows(Math.exp(34))); + result, + rows(Math.exp(32)), + rows(Math.exp(36)), + rows(Math.exp(28)), + rows(Math.exp(33)), + rows(Math.exp(36)), + rows(Math.exp(39)), + rows(Math.exp(34))); } @Test public void testFloor() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = floor(age) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = floor(age) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "long")); - verifyDataRows( - result, - rows(32), rows(36), rows(28), rows(33), rows(36), rows(39), rows(34)); + verifyDataRows(result, rows(32), rows(36), rows(28), rows(33), rows(36), rows(39), rows(34)); } @Test public void testLn() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = ln(age) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = ln(age) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifyDataRows( - result, rows(Math.log(32)), rows(Math.log(36)), rows(Math.log(28)), rows(Math.log(33)), - rows(Math.log(36)), rows(Math.log(39)), rows(Math.log(34))); + result, + rows(Math.log(32)), + rows(Math.log(36)), + rows(Math.log(28)), + rows(Math.log(33)), + rows(Math.log(36)), + rows(Math.log(39)), + rows(Math.log(34))); } @Test public void testLogOneArg() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = log(age) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = log(age) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); - verifyDataRows(result, - rows(Math.log(28)), rows(Math.log(32)), rows(Math.log(33)), rows(Math.log(34)), - rows(Math.log(36)), rows(Math.log(36)), rows(Math.log(39)) - ); + verifyDataRows( + result, + rows(Math.log(28)), + rows(Math.log(32)), + rows(Math.log(33)), + rows(Math.log(34)), + rows(Math.log(36)), + rows(Math.log(36)), + rows(Math.log(39))); } @Test public void testLogTwoArgs() throws IOException { JSONObject result = executeQuery( - String.format( - "source=%s | eval f = log(age, balance) | fields f", TEST_INDEX_BANK)); + String.format("source=%s | eval f = log(age, balance) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifyDataRows( - result, closeTo(Math.log(39225) / Math.log(32)), closeTo(Math.log(5686) / Math.log(36)), - closeTo(Math.log(32838) / Math.log(28)), closeTo(Math.log(4180) / Math.log(33)), - closeTo(Math.log(16418) / Math.log(36)), closeTo(Math.log(40540) / Math.log(39)), + result, + closeTo(Math.log(39225) / Math.log(32)), + closeTo(Math.log(5686) / Math.log(36)), + closeTo(Math.log(32838) / Math.log(28)), + closeTo(Math.log(4180) / Math.log(33)), + closeTo(Math.log(16418) / Math.log(36)), + closeTo(Math.log(40540) / Math.log(39)), closeTo(Math.log(48086) / Math.log(34))); } @Test public void testLog10() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = log10(age) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = log10(age) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifyDataRows( - result, rows(Math.log10(32)), rows(Math.log10(36)), rows(Math.log10(28)), - rows(Math.log10(33)), rows(Math.log10(36)), rows(Math.log10(39)), rows(Math.log10(34))); + result, + rows(Math.log10(32)), + rows(Math.log10(36)), + rows(Math.log10(28)), + rows(Math.log10(33)), + rows(Math.log10(36)), + rows(Math.log10(39)), + rows(Math.log10(34))); } @Test public void testLog2() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = log2(age) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = log2(age) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifyDataRows( result, - closeTo(Math.log(32) / Math.log(2)), closeTo(Math.log(36) / Math.log(2)), - closeTo(Math.log(28) / Math.log(2)), closeTo(Math.log(33) / Math.log(2)), - closeTo(Math.log(36) / Math.log(2)), closeTo(Math.log(39) / Math.log(2)), + closeTo(Math.log(32) / Math.log(2)), + closeTo(Math.log(36) / Math.log(2)), + closeTo(Math.log(28) / Math.log(2)), + closeTo(Math.log(33) / Math.log(2)), + closeTo(Math.log(36) / Math.log(2)), + closeTo(Math.log(39) / Math.log(2)), closeTo(Math.log(34) / Math.log(2))); } @@ -170,168 +177,178 @@ public void testLog2() throws IOException { public void testConv() throws IOException { JSONObject result = executeQuery( - String.format( - "source=%s | eval f = conv(age, 10, 16) | fields f", TEST_INDEX_BANK)); + String.format("source=%s | eval f = conv(age, 10, 16) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "string")); verifyDataRows( - result, rows("20"), rows("24"), rows("1c"), rows("21"), - rows("24"), rows("27"), rows("22")); + result, rows("20"), rows("24"), rows("1c"), rows("21"), rows("24"), rows("27"), rows("22")); } @Test public void testCrc32() throws IOException { JSONObject result = executeQuery( - String.format( - "source=%s | eval f = crc32(firstname) | fields f", TEST_INDEX_BANK)); + String.format("source=%s | eval f = crc32(firstname) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "long")); verifyDataRows( - result, rows(324249283), rows(3369714977L), rows(1165568529), rows(2293694493L), - rows(3936131563L), rows(256963594), rows(824319315)); + result, + rows(324249283), + rows(3369714977L), + rows(1165568529), + rows(2293694493L), + rows(3936131563L), + rows(256963594), + rows(824319315)); } @Test public void testMod() throws IOException { JSONObject result = executeQuery( - String.format( - "source=%s | eval f = mod(age, 10) | fields f", TEST_INDEX_BANK)); + String.format("source=%s | eval f = mod(age, 10) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "integer")); - verifyDataRows( - result, rows(2), rows(6), rows(8), rows(3), rows(6), rows(9), rows(4)); + verifyDataRows(result, rows(2), rows(6), rows(8), rows(3), rows(6), rows(9), rows(4)); } @Test public void testPow() throws IOException { JSONObject pow = - executeQuery( - String.format( - "source=%s | eval f = pow(age, 2) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = pow(age, 2) | fields f", TEST_INDEX_BANK)); verifySchema(pow, schema("f", null, "double")); verifyDataRows( - pow, rows(1024.0), rows(1296.0), rows(784.0), rows(1089.0), rows(1296.0), rows(1521.0), rows(1156.0)); + pow, + rows(1024.0), + rows(1296.0), + rows(784.0), + rows(1089.0), + rows(1296.0), + rows(1521.0), + rows(1156.0)); JSONObject power = executeQuery( - String.format( - "source=%s | eval f = power(age, 2) | fields f", TEST_INDEX_BANK)); + String.format("source=%s | eval f = power(age, 2) | fields f", TEST_INDEX_BANK)); verifySchema(power, schema("f", null, "double")); verifyDataRows( - power, rows(1024.0), rows(1296.0), rows(784.0), rows(1089.0), rows(1296.0), rows(1521.0), rows(1156.0)); - + power, + rows(1024.0), + rows(1296.0), + rows(784.0), + rows(1089.0), + rows(1296.0), + rows(1521.0), + rows(1156.0)); } @Test public void testRound() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = round(age) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = round(age) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "long")); - verifyDataRows(result, - rows(32), rows(36), rows(28), rows(33), rows(36), rows(39), rows(34)); + verifyDataRows(result, rows(32), rows(36), rows(28), rows(33), rows(36), rows(39), rows(34)); result = executeQuery( - String.format( - "source=%s | eval f = round(age, -1) | fields f", TEST_INDEX_BANK)); + String.format("source=%s | eval f = round(age, -1) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "long")); - verifyDataRows(result, - rows(30), rows(40), rows(30), rows(30), rows(40), rows(40), rows(30)); + verifyDataRows(result, rows(30), rows(40), rows(30), rows(30), rows(40), rows(40), rows(30)); } @Test public void testSign() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = sign(age) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = sign(age) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "integer")); - verifyDataRows( - result, rows(1), rows(1), rows(1), rows(1), rows(1), rows(1), rows(1)); + verifyDataRows(result, rows(1), rows(1), rows(1), rows(1), rows(1), rows(1), rows(1)); } @Test public void testSqrt() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = sqrt(age) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = sqrt(age) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); - verifyDataRows(result, - rows(5.656854249492381), rows(6.0), rows(5.291502622129181), - rows(5.744562646538029), rows(6.0), rows(6.244997998398398), + verifyDataRows( + result, + rows(5.656854249492381), + rows(6.0), + rows(5.291502622129181), + rows(5.744562646538029), + rows(6.0), + rows(6.244997998398398), rows(5.830951894845301)); } @Test public void testCbrt() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = cbrt(num3) | fields f", TEST_INDEX_CALCS)); + executeQuery(String.format("source=%s | eval f = cbrt(num3) | fields f", TEST_INDEX_CALCS)); verifySchema(result, schema("f", null, "double")); - verifyDataRows(result, - closeTo(Math.cbrt(-11.52)), closeTo(Math.cbrt(-9.31)), closeTo(Math.cbrt(-12.17)), - closeTo(Math.cbrt(-7.25)), closeTo(Math.cbrt(12.93)), closeTo(Math.cbrt(-19.96)), - closeTo(Math.cbrt(10.93)), closeTo(Math.cbrt(3.64)), closeTo(Math.cbrt(-13.38)), - closeTo(Math.cbrt(-10.56)), closeTo(Math.cbrt(-4.79)), closeTo(Math.cbrt(-10.81)), - closeTo(Math.cbrt(-6.62)), closeTo(Math.cbrt(-18.43)), closeTo(Math.cbrt(6.84)), - closeTo(Math.cbrt(-10.98)), closeTo(Math.cbrt(-2.6))); + verifyDataRows( + result, + closeTo(Math.cbrt(-11.52)), + closeTo(Math.cbrt(-9.31)), + closeTo(Math.cbrt(-12.17)), + closeTo(Math.cbrt(-7.25)), + closeTo(Math.cbrt(12.93)), + closeTo(Math.cbrt(-19.96)), + closeTo(Math.cbrt(10.93)), + closeTo(Math.cbrt(3.64)), + closeTo(Math.cbrt(-13.38)), + closeTo(Math.cbrt(-10.56)), + closeTo(Math.cbrt(-4.79)), + closeTo(Math.cbrt(-10.81)), + closeTo(Math.cbrt(-6.62)), + closeTo(Math.cbrt(-18.43)), + closeTo(Math.cbrt(6.84)), + closeTo(Math.cbrt(-10.98)), + closeTo(Math.cbrt(-2.6))); } @Test public void testTruncate() throws IOException { JSONObject result = executeQuery( - String.format( - "source=%s | eval f = truncate(age, 1) | fields f", TEST_INDEX_BANK)); + String.format("source=%s | eval f = truncate(age, 1) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "long")); - verifyDataRows(result, - rows(32), rows(36), rows(28), rows(33), rows(36), rows(39), rows(34)); + verifyDataRows(result, rows(32), rows(36), rows(28), rows(33), rows(36), rows(39), rows(34)); result = executeQuery( - String.format( - "source=%s | eval f = truncate(age, -1) | fields f", TEST_INDEX_BANK)); + String.format("source=%s | eval f = truncate(age, -1) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "long")); - verifyDataRows(result, - rows(30), rows(30), rows(20), rows(30), rows(30), rows(30), rows(30)); + verifyDataRows(result, rows(30), rows(30), rows(20), rows(30), rows(30), rows(30), rows(30)); } @Test public void testPi() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = pi() | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = pi() | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifyDataRows( - result, rows(Math.PI), rows(Math.PI), rows(Math.PI), rows(Math.PI), - rows(Math.PI), rows(Math.PI), rows(Math.PI)); + result, + rows(Math.PI), + rows(Math.PI), + rows(Math.PI), + rows(Math.PI), + rows(Math.PI), + rows(Math.PI), + rows(Math.PI)); } @Test public void testRand() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = rand() | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = rand() | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "float")); result = - executeQuery( - String.format( - "source=%s | eval f = rand(5) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = rand(5) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "float")); } @Test public void testAcos() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = acos(0) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = acos(0) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifySome(result.getJSONArray("datarows"), rows(Math.acos(0))); } @@ -339,9 +356,7 @@ public void testAcos() throws IOException { @Test public void testAsin() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = asin(1) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = asin(1) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifySome(result.getJSONArray("datarows"), rows(Math.asin(1))); } @@ -349,16 +364,12 @@ public void testAsin() throws IOException { @Test public void testAtan() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = atan(2) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = atan(2) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifySome(result.getJSONArray("datarows"), rows(Math.atan(2))); result = - executeQuery( - String.format( - "source=%s | eval f = atan(2, 3) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = atan(2, 3) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifySome(result.getJSONArray("datarows"), rows(Math.atan2(2, 3))); } @@ -366,9 +377,7 @@ public void testAtan() throws IOException { @Test public void testAtan2() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = atan2(2, 3) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = atan2(2, 3) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifySome(result.getJSONArray("datarows"), rows(Math.atan2(2, 3))); } @@ -376,9 +385,7 @@ public void testAtan2() throws IOException { @Test public void testCos() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = cos(1.57) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = cos(1.57) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifySome(result.getJSONArray("datarows"), rows(Math.cos(1.57))); } @@ -386,9 +393,7 @@ public void testCos() throws IOException { @Test public void testCot() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = cot(2) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = cot(2) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifySome(result.getJSONArray("datarows"), closeTo(1 / Math.tan(2))); } @@ -397,8 +402,7 @@ public void testCot() throws IOException { public void testDegrees() throws IOException { JSONObject result = executeQuery( - String.format( - "source=%s | eval f = degrees(1.57) | fields f", TEST_INDEX_BANK)); + String.format("source=%s | eval f = degrees(1.57) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifySome(result.getJSONArray("datarows"), rows(Math.toDegrees(1.57))); } @@ -406,9 +410,7 @@ public void testDegrees() throws IOException { @Test public void testRadians() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = radians(90) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = radians(90) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifySome(result.getJSONArray("datarows"), rows(Math.toRadians(90))); } @@ -416,9 +418,7 @@ public void testRadians() throws IOException { @Test public void testSin() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | eval f = sin(1.57) | fields f", TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | eval f = sin(1.57) | fields f", TEST_INDEX_BANK)); verifySchema(result, schema("f", null, "double")); verifySome(result.getJSONArray("datarows"), rows(Math.sin(1.57))); } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/MetricsIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/MetricsIT.java index 41373afdc6..73882a4036 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/MetricsIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/MetricsIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.hamcrest.Matchers.equalTo; @@ -44,9 +43,7 @@ private void multiQueries(int n) throws IOException { } private Request makeStatRequest() { - return new Request( - "GET", "/_plugins/_ppl/stats" - ); + return new Request("GET", "/_plugins/_ppl/stats"); } private int pplRequestTotal() throws IOException { @@ -70,5 +67,4 @@ private String executeStatRequest(final Request request) throws IOException { return sb.toString(); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/MultiMatchIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/MultiMatchIT.java index 5bfd34e984..d760a8fe39 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/MultiMatchIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/MultiMatchIT.java @@ -3,13 +3,11 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BEER; import java.io.IOException; - import org.json.JSONObject; import org.junit.Test; @@ -22,36 +20,41 @@ public void init() throws IOException { @Test public void test_multi_match() throws IOException { - String query = "SOURCE=" + TEST_INDEX_BEER - + " | WHERE multi_match([\\\"Tags\\\" ^ 1.5, Title, `Body` 4.2], 'taste') | fields Id"; + String query = + "SOURCE=" + + TEST_INDEX_BEER + + " | WHERE multi_match([\\\"Tags\\\" ^ 1.5, Title, `Body` 4.2], 'taste') | fields Id"; var result = executeQuery(query); assertEquals(16, result.getInt("total")); } @Test public void test_multi_match_all_params() throws IOException { - String query = "SOURCE=" + TEST_INDEX_BEER - + " | WHERE multi_match(['Body', Tags], 'taste beer', operator='and', analyzer=english," - + "auto_generate_synonyms_phrase_query=true, boost = 0.77, cutoff_frequency=0.33," - + "fuzziness = 'AUTO:1,5', fuzzy_transpositions = false, lenient = true, max_expansions = 25," - + "minimum_should_match = '2<-25% 9<-3', prefix_length = 7, tie_breaker = 0.3," - + "type = most_fields, slop = 2, zero_terms_query = 'ALL') | fields Id"; + String query = + "SOURCE=" + + TEST_INDEX_BEER + + " | WHERE multi_match(['Body', Tags], 'taste beer', operator='and'," + + " analyzer=english,auto_generate_synonyms_phrase_query=true, boost = 0.77," + + " cutoff_frequency=0.33,fuzziness = 'AUTO:1,5', fuzzy_transpositions = false, lenient" + + " = true, max_expansions = 25,minimum_should_match = '2<-25% 9<-3', prefix_length =" + + " 7, tie_breaker = 0.3,type = most_fields, slop = 2, zero_terms_query = 'ALL') |" + + " fields Id"; var result = executeQuery(query); assertEquals(10, result.getInt("total")); } @Test public void test_wildcard_multi_match() throws IOException { - String query1 = "SOURCE=" + TEST_INDEX_BEER - + " | WHERE multi_match(['Tags'], 'taste') | fields Id"; + String query1 = + "SOURCE=" + TEST_INDEX_BEER + " | WHERE multi_match(['Tags'], 'taste') | fields Id"; var result1 = executeQuery(query1); - String query2 = "SOURCE=" + TEST_INDEX_BEER - + " | WHERE multi_match(['T*'], 'taste') | fields Id"; + String query2 = + "SOURCE=" + TEST_INDEX_BEER + " | WHERE multi_match(['T*'], 'taste') | fields Id"; var result2 = executeQuery(query2); assertNotEquals(result2.getInt("total"), result1.getInt("total")); - String query3 = "source=" + TEST_INDEX_BEER - + " | where simple_query_string(['*Date'], '2014-01-22')"; + String query3 = + "source=" + TEST_INDEX_BEER + " | where simple_query_string(['*Date'], '2014-01-22')"; JSONObject result3 = executeQuery(query3); assertEquals(10, result3.getInt("total")); } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/NowLikeFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/NowLikeFunctionIT.java index a330614d21..2d94dc6a3b 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/NowLikeFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/NowLikeFunctionIT.java @@ -72,8 +72,7 @@ public NowLikeFunctionIT( @Name("constValue") Boolean constValue, @Name("referenceGetter") Supplier referenceGetter, @Name("parser") BiFunction parser, - @Name("serializationPatternStr") String serializationPatternStr - ) { + @Name("serializationPatternStr") String serializationPatternStr) { this.name = name; this.hasFsp = hasFsp; this.hasShortcut = hasShortcut; @@ -85,56 +84,104 @@ public NowLikeFunctionIT( @ParametersFactory(argumentFormatting = "%1$s") public static Iterable compareTwoDates() { - return Arrays.asList($$( - $("now", false, false, true, - (Supplier) LocalDateTime::now, - (BiFunction) LocalDateTime::parse, - "uuuu-MM-dd HH:mm:ss"), - $("current_timestamp", false, false, true, - (Supplier) LocalDateTime::now, - (BiFunction) LocalDateTime::parse, - "uuuu-MM-dd HH:mm:ss"), - $("localtimestamp", false, false, true, - (Supplier) LocalDateTime::now, - (BiFunction) LocalDateTime::parse, - "uuuu-MM-dd HH:mm:ss"), - $("localtime", false, false, true, - (Supplier) LocalDateTime::now, - (BiFunction) LocalDateTime::parse, - "uuuu-MM-dd HH:mm:ss"), - $("sysdate", true, false, false, - (Supplier) LocalDateTime::now, - (BiFunction) LocalDateTime::parse, - "uuuu-MM-dd HH:mm:ss"), - $("curtime", false, false, false, - (Supplier) LocalTime::now, - (BiFunction) LocalTime::parse, - "HH:mm:ss"), - $("current_time", false, false, false, - (Supplier) LocalTime::now, - (BiFunction) LocalTime::parse, - "HH:mm:ss"), - $("curdate", false, false, false, - (Supplier) LocalDate::now, - (BiFunction) LocalDate::parse, - "uuuu-MM-dd"), - $("current_date", false, false, false, - (Supplier) LocalDate::now, - (BiFunction) LocalDate::parse, - "uuuu-MM-dd"), - $("utc_date", false, false, true, - (Supplier) (() -> utcDateTimeNow().toLocalDate()), - (BiFunction) LocalDate::parse, - "uuuu-MM-dd"), - $("utc_time", false, false, true, - (Supplier) (() -> utcDateTimeNow().toLocalTime()), - (BiFunction) LocalTime::parse, - "HH:mm:ss"), - $("utc_timestamp", false, false, true, - (Supplier) (org.opensearch.sql.sql.NowLikeFunctionIT::utcDateTimeNow), - (BiFunction) LocalDateTime::parse, - "uuuu-MM-dd HH:mm:ss") - )); + return Arrays.asList( + $$( + $( + "now", + false, + false, + true, + (Supplier) LocalDateTime::now, + (BiFunction) LocalDateTime::parse, + "uuuu-MM-dd HH:mm:ss"), + $( + "current_timestamp", + false, + false, + true, + (Supplier) LocalDateTime::now, + (BiFunction) LocalDateTime::parse, + "uuuu-MM-dd HH:mm:ss"), + $( + "localtimestamp", + false, + false, + true, + (Supplier) LocalDateTime::now, + (BiFunction) LocalDateTime::parse, + "uuuu-MM-dd HH:mm:ss"), + $( + "localtime", + false, + false, + true, + (Supplier) LocalDateTime::now, + (BiFunction) LocalDateTime::parse, + "uuuu-MM-dd HH:mm:ss"), + $( + "sysdate", + true, + false, + false, + (Supplier) LocalDateTime::now, + (BiFunction) LocalDateTime::parse, + "uuuu-MM-dd HH:mm:ss"), + $( + "curtime", + false, + false, + false, + (Supplier) LocalTime::now, + (BiFunction) LocalTime::parse, + "HH:mm:ss"), + $( + "current_time", + false, + false, + false, + (Supplier) LocalTime::now, + (BiFunction) LocalTime::parse, + "HH:mm:ss"), + $( + "curdate", + false, + false, + false, + (Supplier) LocalDate::now, + (BiFunction) LocalDate::parse, + "uuuu-MM-dd"), + $( + "current_date", + false, + false, + false, + (Supplier) LocalDate::now, + (BiFunction) LocalDate::parse, + "uuuu-MM-dd"), + $( + "utc_date", + false, + false, + true, + (Supplier) (() -> utcDateTimeNow().toLocalDate()), + (BiFunction) LocalDate::parse, + "uuuu-MM-dd"), + $( + "utc_time", + false, + false, + true, + (Supplier) (() -> utcDateTimeNow().toLocalTime()), + (BiFunction) LocalTime::parse, + "HH:mm:ss"), + $( + "utc_timestamp", + false, + false, + true, + (Supplier) (org.opensearch.sql.sql.NowLikeFunctionIT::utcDateTimeNow), + (BiFunction) LocalDateTime::parse, + "uuuu-MM-dd HH:mm:ss"))); } private long getDiff(Temporal sample, Temporal reference) { @@ -146,7 +193,8 @@ private long getDiff(Temporal sample, Temporal reference) { @Test public void testNowLikeFunctions() throws IOException { - var serializationPattern = new DateTimeFormatterBuilder() + var serializationPattern = + new DateTimeFormatterBuilder() .appendPattern(serializationPatternStr) .optionalStart() .appendFraction(ChronoField.NANO_OF_SECOND, 0, 9, true) @@ -156,42 +204,57 @@ public void testNowLikeFunctions() throws IOException { double delta = 2d; // acceptable time diff, secs if (reference instanceof LocalDate) delta = 1d; // Max date delta could be 1 if test runs on the very edge of two days - // We ignore probability of a test run on edge of month or year to simplify the checks + // We ignore probability of a test run on edge of month or year to simplify the checks - var calls = new ArrayList() {{ - add(name + "()"); - }}; - if (hasShortcut) - calls.add(name); - if (hasFsp) - calls.add(name + "(0)"); + var calls = + new ArrayList() { + { + add(name + "()"); + } + }; + if (hasShortcut) calls.add(name); + if (hasFsp) calls.add(name + "(0)"); // Column order is: func(), func, func(0) // shortcut ^ fsp ^ // Query looks like: // source=people2 | eval `now()`=now() | fields `now()`; - JSONObject result = executeQuery("source=" + TEST_INDEX_PEOPLE2 - + " | eval " + calls.stream().map(c -> String.format("`%s`=%s", c, c)).collect(Collectors.joining(",")) - + " | fields " + calls.stream().map(c -> String.format("`%s`", c)).collect(Collectors.joining(","))); + JSONObject result = + executeQuery( + "source=" + + TEST_INDEX_PEOPLE2 + + " | eval " + + calls.stream() + .map(c -> String.format("`%s`=%s", c, c)) + .collect(Collectors.joining(",")) + + " | fields " + + calls.stream() + .map(c -> String.format("`%s`", c)) + .collect(Collectors.joining(","))); var rows = result.getJSONArray("datarows"); JSONArray firstRow = rows.getJSONArray(0); for (int i = 0; i < rows.length(); i++) { var row = rows.getJSONArray(i); - if (constValue) - assertTrue(firstRow.similar(row)); + if (constValue) assertTrue(firstRow.similar(row)); int column = 0; - assertEquals(0, - getDiff(reference, parser.apply(row.getString(column++), serializationPattern)), delta); + assertEquals( + 0, + getDiff(reference, parser.apply(row.getString(column++), serializationPattern)), + delta); if (hasShortcut) { - assertEquals(0, - getDiff(reference, parser.apply(row.getString(column++), serializationPattern)), delta); + assertEquals( + 0, + getDiff(reference, parser.apply(row.getString(column++), serializationPattern)), + delta); } if (hasFsp) { - assertEquals(0, - getDiff(reference, parser.apply(row.getString(column), serializationPattern)), delta); + assertEquals( + 0, + getDiff(reference, parser.apply(row.getString(column), serializationPattern)), + delta); } } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/ObjectFieldOperateIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/ObjectFieldOperateIT.java index 6178552728..cc836b1896 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/ObjectFieldOperateIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/ObjectFieldOperateIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.SQLIntegTestCase.Index.DEEP_NESTED; @@ -26,55 +25,45 @@ public void init() throws IOException { @Test public void select_object_field() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | " - + "fields city.name, city.location.latitude", - TEST_INDEX_DEEP_NESTED)); - verifySchema(result, - schema("city.name", "string"), - schema("city.location.latitude", "double")); - verifyDataRows(result, - rows("Seattle", 10.5)); + JSONObject result = + executeQuery( + String.format( + "source=%s | " + "fields city.name, city.location.latitude", + TEST_INDEX_DEEP_NESTED)); + verifySchema(result, schema("city.name", "string"), schema("city.location.latitude", "double")); + verifyDataRows(result, rows("Seattle", 10.5)); } @Test public void compare_object_field_in_where() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s " - + "| where city.name = 'Seattle' " - + "| fields city.name, city.location.latitude", - TEST_INDEX_DEEP_NESTED)); - verifySchema(result, - schema("city.name", "string"), - schema("city.location.latitude", "double")); - verifyDataRows(result, - rows("Seattle", 10.5)); + JSONObject result = + executeQuery( + String.format( + "source=%s " + + "| where city.name = 'Seattle' " + + "| fields city.name, city.location.latitude", + TEST_INDEX_DEEP_NESTED)); + verifySchema(result, schema("city.name", "string"), schema("city.location.latitude", "double")); + verifyDataRows(result, rows("Seattle", 10.5)); } @Test public void group_object_field_in_stats() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s " - + "| stats count() by city.name", - TEST_INDEX_DEEP_NESTED)); - verifySchema(result, - schema("count()", "integer"), - schema("city.name", "string")); - verifyDataRows(result, - rows(1, "Seattle")); + JSONObject result = + executeQuery( + String.format("source=%s " + "| stats count() by city.name", TEST_INDEX_DEEP_NESTED)); + verifySchema(result, schema("count()", "integer"), schema("city.name", "string")); + verifyDataRows(result, rows(1, "Seattle")); } @Test public void sort_by_object_field() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s " - + "| sort city.name" - + "| fields city.name, city.location.latitude", - TEST_INDEX_DEEP_NESTED)); - verifySchema(result, - schema("city.name", "string"), - schema("city.location.latitude", "double")); - verifyDataRows(result, - rows("Seattle", 10.5)); + JSONObject result = + executeQuery( + String.format( + "source=%s " + "| sort city.name" + "| fields city.name, city.location.latitude", + TEST_INDEX_DEEP_NESTED)); + verifySchema(result, schema("city.name", "string"), schema("city.location.latitude", "double")); + verifyDataRows(result, rows("Seattle", 10.5)); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/OperatorIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/OperatorIT.java index e6ca958991..42ed08b00c 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/OperatorIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/OperatorIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; @@ -26,40 +25,28 @@ public void init() throws IOException { @Test public void testAddOperator() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | where age = 31 + 1 | fields age", - TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | where age = 31 + 1 | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(32)); } @Test public void testSubtractOperator() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | where age = 33 - 1 | fields age", - TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | where age = 33 - 1 | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(32)); } @Test public void testMultiplyOperator() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | where age = 16 * 2 | fields age", - TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | where age = 16 * 2 | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(32)); } @Test public void testDivideOperator() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s | where age / 2 = 16 | fields age", - TEST_INDEX_BANK)); + executeQuery(String.format("source=%s | where age / 2 = 16 | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(32), rows(33)); } @@ -67,9 +54,7 @@ public void testDivideOperator() throws IOException { public void testModuleOperator() throws IOException { JSONObject result = executeQuery( - String.format( - "source=%s | where age %s 32 = 0 | fields age", - TEST_INDEX_BANK, "%")); + String.format("source=%s | where age %s 32 = 0 | fields age", TEST_INDEX_BANK, "%")); verifyDataRows(result, rows(32)); } @@ -78,11 +63,9 @@ public void testArithmeticOperatorWithNullValue() throws IOException { JSONObject result = executeQuery( String.format( - "source=%s | eval f = age + 0 | fields f", - TEST_INDEX_BANK_WITH_NULL_VALUES)); + "source=%s | eval f = age + 0 | fields f", TEST_INDEX_BANK_WITH_NULL_VALUES)); verifyDataRows( - result, rows(32), rows(36), rows(28), rows(33), rows(36), rows(JSONObject.NULL), - rows(34)); + result, rows(32), rows(36), rows(28), rows(33), rows(36), rows(JSONObject.NULL), rows(34)); } @Test @@ -92,8 +75,14 @@ public void testArithmeticOperatorWithMissingValue() throws IOException { String.format( "source=%s | eval f = balance * 1 | fields f", TEST_INDEX_BANK_WITH_NULL_VALUES)); verifyDataRows( - result, rows(39225), rows(32838), rows(4180), rows(48086), rows(JSONObject.NULL), - rows(JSONObject.NULL), rows(JSONObject.NULL)); + result, + rows(39225), + rows(32838), + rows(4180), + rows(48086), + rows(JSONObject.NULL), + rows(JSONObject.NULL), + rows(JSONObject.NULL)); } @Test @@ -101,8 +90,7 @@ public void testMultipleArithmeticOperators() throws IOException { JSONObject result = executeQuery( String.format( - "source=%s | where (age+2) * 3 / 2 - 1 = 50 | fields age", - TEST_INDEX_BANK)); + "source=%s | where (age+2) * 3 / 2 - 1 = 50 | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(32)); } @@ -127,14 +115,12 @@ public void testAndOperator() throws IOException { public void testOrOperator() throws IOException { JSONObject result = executeQuery( - String.format( - "source=%s | where age=32 or age=34 | fields age", TEST_INDEX_BANK)); + String.format("source=%s | where age=32 or age=34 | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(32), rows(34)); result = executeQuery( - String.format( - "source=%s | where age=34 or age=32| fields age", TEST_INDEX_BANK)); + String.format("source=%s | where age=34 or age=32| fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(32), rows(34)); } @@ -158,92 +144,64 @@ public void testXorOperator() throws IOException { @Test public void testNotOperator() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s not age > 32 | fields age", - TEST_INDEX_BANK)); + executeQuery(String.format("source=%s not age > 32 | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(28), rows(32)); } @Test public void testEqualOperator() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s age = 32 | fields age", - TEST_INDEX_BANK)); + executeQuery(String.format("source=%s age = 32 | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(32)); - result = - executeQuery( - String.format( - "source=%s 32 = age | fields age", - TEST_INDEX_BANK)); + result = executeQuery(String.format("source=%s 32 = age | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(32)); } @Test public void testNotEqualOperator() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s age != 32 | fields age", - TEST_INDEX_BANK)); + executeQuery(String.format("source=%s age != 32 | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(28), rows(33), rows(34), rows(36), rows(36), rows(39)); - result = - executeQuery( - String.format( - "source=%s 32 != age | fields age", - TEST_INDEX_BANK)); + result = executeQuery(String.format("source=%s 32 != age | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(28), rows(33), rows(34), rows(36), rows(36), rows(39)); } @Test public void testLessOperator() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s age < 32 | fields age", - TEST_INDEX_BANK)); + executeQuery(String.format("source=%s age < 32 | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(28)); } @Test public void testLteOperator() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s age <= 32 | fields age", - TEST_INDEX_BANK)); + executeQuery(String.format("source=%s age <= 32 | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(28), rows(32)); } @Test public void testGreaterOperator() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s age > 36 | fields age", - TEST_INDEX_BANK)); + executeQuery(String.format("source=%s age > 36 | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(39)); } @Test public void testGteOperator() throws IOException { JSONObject result = - executeQuery( - String.format( - "source=%s age >= 36 | fields age", - TEST_INDEX_BANK)); + executeQuery(String.format("source=%s age >= 36 | fields age", TEST_INDEX_BANK)); verifyDataRows(result, rows(36), rows(36), rows(39)); } @Test public void testLikeFunction() throws IOException { JSONObject result = - executeQuery(String.format("source=%s like(firstname, 'Hatti_') | fields firstname", - TEST_INDEX_BANK)); + executeQuery( + String.format( + "source=%s like(firstname, 'Hatti_') | fields firstname", TEST_INDEX_BANK)); verifyDataRows(result, rows("Hattie")); } @@ -251,8 +209,8 @@ public void testLikeFunction() throws IOException { public void testBinaryPredicateWithNullValue() throws IOException { JSONObject result = executeQuery( - String.format("source=%s | where age >= 36 | fields age", - TEST_INDEX_BANK_WITH_NULL_VALUES)); + String.format( + "source=%s | where age >= 36 | fields age", TEST_INDEX_BANK_WITH_NULL_VALUES)); verifyDataRows(result, rows(36), rows(36)); } @@ -260,7 +218,8 @@ public void testBinaryPredicateWithNullValue() throws IOException { public void testBinaryPredicateWithMissingValue() throws IOException { JSONObject result = executeQuery( - String.format("source=%s | where balance > 40000 | fields balance", + String.format( + "source=%s | where balance > 40000 | fields balance", TEST_INDEX_BANK_WITH_NULL_VALUES)); verifyDataRows(result, rows(48086)); } @@ -269,12 +228,13 @@ private void queryExecutionShouldThrowExceptionDueToNullOrMissingValue( String query, String... errorMsgs) { try { executeQuery(query); - fail("Expected to throw ExpressionEvaluationException, but none was thrown for query: " - + query); + fail( + "Expected to throw ExpressionEvaluationException, but none was thrown for query: " + + query); } catch (ResponseException e) { String errorMsg = e.getMessage(); assertTrue(errorMsg.contains("ExpressionEvaluationException")); - for (String msg: errorMsgs) { + for (String msg : errorMsgs) { assertTrue(errorMsg.contains(msg)); } } catch (IOException e) { diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/PPLIntegTestCase.java b/integ-test/src/test/java/org/opensearch/sql/ppl/PPLIntegTestCase.java index bcf183e9c6..5f82af9bce 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/PPLIntegTestCase.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/PPLIntegTestCase.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestUtils.getResponseBody; @@ -20,9 +19,7 @@ import org.opensearch.client.Response; import org.opensearch.sql.legacy.SQLIntegTestCase; -/** - * OpenSearch Rest integration test base for PPL testing. - */ +/** OpenSearch Rest integration test base for PPL testing. */ public abstract class PPLIntegTestCase extends SQLIntegTestCase { protected JSONObject executeQuery(String query) throws IOException { @@ -42,8 +39,10 @@ protected String explainQueryToString(String query) throws IOException { } protected String executeCsvQuery(String query, boolean sanitize) throws IOException { - Request request = buildRequest(query, - QUERY_API_ENDPOINT + String.format(Locale.ROOT, "?format=csv&sanitize=%b", sanitize)); + Request request = + buildRequest( + query, + QUERY_API_ENDPOINT + String.format(Locale.ROOT, "?format=csv&sanitize=%b", sanitize)); Response response = client().performRequest(request); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); return getResponseBody(response, true); @@ -65,8 +64,9 @@ protected Request buildRequest(String query, String endpoint) { protected static JSONObject updateClusterSettings(ClusterSetting setting) throws IOException { Request request = new Request("PUT", "/_cluster/settings"); - String persistentSetting = String.format(Locale.ROOT, - "{\"%s\": {\"%s\": %s}}", setting.type, setting.name, setting.value); + String persistentSetting = + String.format( + Locale.ROOT, "{\"%s\": {\"%s\": %s}}", setting.type, setting.name, setting.value); request.setJsonEntity(persistentSetting); RequestOptions.Builder restOptionsBuilder = RequestOptions.DEFAULT.toBuilder(); restOptionsBuilder.addHeader("Content-Type", "application/json"); @@ -92,9 +92,15 @@ SQLIntegTestCase.ClusterSetting nullify() { @Override public String toString() { return "ClusterSetting{" - + "type='" + type + '\'' - + ", path='" + name + '\'' - + ", value='" + value + '\'' + + "type='" + + type + + '\'' + + ", path='" + + name + + '\'' + + ", value='" + + value + + '\'' + '}'; } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/PPLPluginIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/PPLPluginIT.java index df7b464118..0c638be1e7 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/PPLPluginIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/PPLPluginIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.hamcrest.Matchers.equalTo; @@ -28,8 +27,7 @@ import org.opensearch.sql.util.TestUtils; public class PPLPluginIT extends PPLIntegTestCase { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); + @Rule public ExpectedException exceptionRule = ExpectedException.none(); private static final String PERSISTENT = "persistent"; @@ -86,9 +84,11 @@ public void sqlEnableSettingsTest() throws IOException { assertThat(result.getInt("status"), equalTo(400)); JSONObject error = result.getJSONObject("error"); assertThat(error.getString("reason"), equalTo("Invalid Query")); - assertThat(error.getString("details"), equalTo( - "Either plugins.ppl.enabled or rest.action.multi.allow_explicit_index setting is " - + "false")); + assertThat( + error.getString("details"), + equalTo( + "Either plugins.ppl.enabled or rest.action.multi.allow_explicit_index setting is " + + "false")); assertThat(error.getString("type"), equalTo("IllegalAccessException")); // reset the setting diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/ParseCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/ParseCommandIT.java index 36fcb4bf3b..7f25f6f160 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/ParseCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/ParseCommandIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; @@ -23,9 +22,10 @@ public void init() throws IOException { @Test public void testParseCommand() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | parse email '.+@(?.+)' | fields email, host", - TEST_INDEX_BANK)); + JSONObject result = + executeQuery( + String.format( + "source=%s | parse email '.+@(?.+)' | fields email, host", TEST_INDEX_BANK)); verifyOrder( result, rows("amberduke@pyrami.com", "pyrami.com"), @@ -39,8 +39,10 @@ public void testParseCommand() throws IOException { @Test public void testParseCommandReplaceOriginalField() throws IOException { - JSONObject result = executeQuery( - String.format("source=%s | parse email '.+@(?.+)' | fields email", TEST_INDEX_BANK)); + JSONObject result = + executeQuery( + String.format( + "source=%s | parse email '.+@(?.+)' | fields email", TEST_INDEX_BANK)); verifyOrder( result, rows("pyrami.com"), @@ -54,8 +56,12 @@ public void testParseCommandReplaceOriginalField() throws IOException { @Test public void testParseCommandWithOtherRunTimeFields() throws IOException { - JSONObject result = executeQuery(String.format("source=%s | parse email '.+@(?.+)' | " - + "eval eval_result=1 | fields host, eval_result", TEST_INDEX_BANK)); + JSONObject result = + executeQuery( + String.format( + "source=%s | parse email '.+@(?.+)' | " + + "eval eval_result=1 | fields host, eval_result", + TEST_INDEX_BANK)); verifyOrder( result, rows("pyrami.com", 1), diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/PositionFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/PositionFunctionIT.java index 24319a0cb8..a7f638b3dd 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/PositionFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/PositionFunctionIT.java @@ -5,96 +5,91 @@ package org.opensearch.sql.ppl; -import org.junit.Test; - -import java.io.IOException; - import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_CALCS; import static org.opensearch.sql.util.MatcherUtils.rows; import static org.opensearch.sql.util.MatcherUtils.verifyDataRows; +import java.io.IOException; +import org.junit.Test; + public class PositionFunctionIT extends PPLIntegTestCase { - @Override - public void init() throws IOException { - loadIndex(Index.CALCS); - } - - @Test - public void test_position_function() throws IOException { - String query = "source=" + TEST_INDEX_CALCS - + " | eval f=position('ON', str1) | fields f"; - - var result = executeQuery(query); - - assertEquals(17, result.getInt("total")); - verifyDataRows(result, - rows(7), rows(7), - rows(2), rows(0), - rows(0), rows(0), - rows(0), rows(0), - rows(0), rows(0), - rows(0), rows(0), - rows(0), rows(0), - rows(0), rows(0), - rows(0)); - } - - @Test - public void test_position_function_with_fields_only() throws IOException { - String query = "source=" + TEST_INDEX_CALCS - + " | eval f=position(str3 IN str2) | where str2 IN ('one', 'two', 'three')| fields f"; - - var result = executeQuery(query); - - assertEquals(3, result.getInt("total")); - verifyDataRows(result, rows(3), rows(0), rows(4)); - } - - @Test - public void test_position_function_with_string_literals() throws IOException { - String query = "source=" + TEST_INDEX_CALCS - + " | eval f=position('world' IN 'hello world') | where str2='one' | fields f"; - - var result = executeQuery(query); - - assertEquals(1, result.getInt("total")); - verifyDataRows(result, rows(7)); - } - - @Test - public void test_position_function_with_nulls() throws IOException { - String query = "source=" + TEST_INDEX_CALCS - + " | eval f=position('ee' IN str2) | where isnull(str2) | fields str2,f"; - - var result = executeQuery(query); - - assertEquals(4, result.getInt("total")); - verifyDataRows(result, - rows(null, null), - rows(null, null), - rows(null, null), - rows(null, null)); - } - - @Test - public void test_position_function_with_function_as_arg() throws IOException { - String query = "source=" + TEST_INDEX_CALCS - + " | eval f=position(upper(str3) IN str1) | where like(str1, 'BINDING SUPPLIES') | fields f"; - - var result = executeQuery(query); - - assertEquals(1, result.getInt("total")); - verifyDataRows(result, rows(15)); - } - - @Test - public void test_position_function_with_function_in_where_clause() throws IOException { - String query = "source=" + TEST_INDEX_CALCS - + " | where position(str3 IN str2)=1 | fields str2"; - - var result = executeQuery(query); - - assertEquals(2, result.getInt("total")); - verifyDataRows(result, rows("eight"), rows("eleven")); - } + @Override + public void init() throws IOException { + loadIndex(Index.CALCS); + } + + @Test + public void test_position_function() throws IOException { + String query = "source=" + TEST_INDEX_CALCS + " | eval f=position('ON', str1) | fields f"; + + var result = executeQuery(query); + + assertEquals(17, result.getInt("total")); + verifyDataRows( + result, rows(7), rows(7), rows(2), rows(0), rows(0), rows(0), rows(0), rows(0), rows(0), + rows(0), rows(0), rows(0), rows(0), rows(0), rows(0), rows(0), rows(0)); + } + + @Test + public void test_position_function_with_fields_only() throws IOException { + String query = + "source=" + + TEST_INDEX_CALCS + + " | eval f=position(str3 IN str2) | where str2 IN ('one', 'two', 'three')| fields f"; + + var result = executeQuery(query); + + assertEquals(3, result.getInt("total")); + verifyDataRows(result, rows(3), rows(0), rows(4)); + } + + @Test + public void test_position_function_with_string_literals() throws IOException { + String query = + "source=" + + TEST_INDEX_CALCS + + " | eval f=position('world' IN 'hello world') | where str2='one' | fields f"; + + var result = executeQuery(query); + + assertEquals(1, result.getInt("total")); + verifyDataRows(result, rows(7)); + } + + @Test + public void test_position_function_with_nulls() throws IOException { + String query = + "source=" + + TEST_INDEX_CALCS + + " | eval f=position('ee' IN str2) | where isnull(str2) | fields str2,f"; + + var result = executeQuery(query); + + assertEquals(4, result.getInt("total")); + verifyDataRows(result, rows(null, null), rows(null, null), rows(null, null), rows(null, null)); + } + + @Test + public void test_position_function_with_function_as_arg() throws IOException { + String query = + "source=" + + TEST_INDEX_CALCS + + " | eval f=position(upper(str3) IN str1) | where like(str1, 'BINDING SUPPLIES') |" + + " fields f"; + + var result = executeQuery(query); + + assertEquals(1, result.getInt("total")); + verifyDataRows(result, rows(15)); + } + + @Test + public void test_position_function_with_function_in_where_clause() throws IOException { + String query = "source=" + TEST_INDEX_CALCS + " | where position(str3 IN str2)=1 | fields str2"; + + var result = executeQuery(query); + + assertEquals(2, result.getInt("total")); + verifyDataRows(result, rows("eight"), rows("eleven")); + } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/PrometheusDataSourceCommandsIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/PrometheusDataSourceCommandsIT.java index 1dbb0028f2..8d72f02e29 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/PrometheusDataSourceCommandsIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/PrometheusDataSourceCommandsIT.java @@ -23,7 +23,6 @@ import java.nio.file.Paths; import java.text.SimpleDateFormat; import java.util.Date; -import lombok.Data; import lombok.SneakyThrows; import org.apache.commons.lang3.StringUtils; import org.json.JSONArray; @@ -41,10 +40,10 @@ public class PrometheusDataSourceCommandsIT extends PPLIntegTestCase { /** - * Integ tests are dependent on self generated metrics in prometheus instance. - * When running individual integ tests there - * is no time for generation of metrics in the test prometheus instance. - * This method gives prometheus time to generate metrics on itself. + * Integ tests are dependent on self generated metrics in prometheus instance. When running + * individual integ tests there is no time for generation of metrics in the test prometheus + * instance. This method gives prometheus time to generate metrics on itself. + * * @throws InterruptedException */ @BeforeClass @@ -55,8 +54,11 @@ protected static void metricGenerationWait() throws InterruptedException { @Override protected void init() throws InterruptedException, IOException { DataSourceMetadata createDSM = - new DataSourceMetadata("my_prometheus", DataSourceType.PROMETHEUS, - ImmutableList.of(), ImmutableMap.of("prometheus.uri", "http://localhost:9090")); + new DataSourceMetadata( + "my_prometheus", + DataSourceType.PROMETHEUS, + ImmutableList.of(), + ImmutableMap.of("prometheus.uri", "http://localhost:9090")); Request createRequest = getCreateDataSourceRequest(createDSM); Response response = client().performRequest(createRequest); Assert.assertEquals(201, response.getStatusLine().getStatusCode()); @@ -72,15 +74,15 @@ protected void deleteDataSourceMetadata() throws IOException { @Test @SneakyThrows public void testSourceMetricCommand() { - JSONObject response = - executeQuery("source=my_prometheus.prometheus_http_requests_total"); - verifySchema(response, + JSONObject response = executeQuery("source=my_prometheus.prometheus_http_requests_total"); + verifySchema( + response, schema(VALUE, "double"), - schema(TIMESTAMP, "timestamp"), - schema("handler", "string"), - schema("code", "string"), - schema("instance", "string"), - schema("job", "string")); + schema(TIMESTAMP, "timestamp"), + schema("handler", "string"), + schema("code", "string"), + schema("instance", "string"), + schema("job", "string")); Assertions.assertTrue(response.getInt("size") > 0); Assertions.assertEquals(6, response.getJSONArray("datarows").getJSONArray(0).length()); JSONArray firstRow = response.getJSONArray("datarows").getJSONArray(0); @@ -94,19 +96,20 @@ public void testSourceMetricCommand() { @SneakyThrows public void testSourceMetricCommandWithTimestamp() { SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - String query = "source=my_prometheus.prometheus_http_requests_total | where @timestamp > '" - + format.format(new Date(System.currentTimeMillis() - 3600 * 1000)) - + "' | sort + @timestamp | head 5"; + String query = + "source=my_prometheus.prometheus_http_requests_total | where @timestamp > '" + + format.format(new Date(System.currentTimeMillis() - 3600 * 1000)) + + "' | sort + @timestamp | head 5"; - JSONObject response = - executeQuery(query); - verifySchema(response, + JSONObject response = executeQuery(query); + verifySchema( + response, schema(VALUE, "double"), - schema(TIMESTAMP, "timestamp"), - schema("handler", "string"), - schema("code", "string"), - schema("instance", "string"), - schema("job", "string")); + schema(TIMESTAMP, "timestamp"), + schema("handler", "string"), + schema("code", "string"), + schema("instance", "string"), + schema("job", "string")); // Currently, data is not injected into prometheus, // so asserting on result is not possible. Verifying only schema. } @@ -115,9 +118,12 @@ public void testSourceMetricCommandWithTimestamp() { @SneakyThrows public void testMetricAvgAggregationCommand() { JSONObject response = - executeQuery("source=`my_prometheus`.`prometheus_http_requests_total` | stats avg(@value) as `agg` by span(@timestamp, 15s), `handler`, `job`"); - verifySchema(response, - schema("agg", "double"), + executeQuery( + "source=`my_prometheus`.`prometheus_http_requests_total` | stats avg(@value) as `agg`" + + " by span(@timestamp, 15s), `handler`, `job`"); + verifySchema( + response, + schema("agg", "double"), schema("span(@timestamp,15s)", "timestamp"), schema("handler", "string"), schema("job", "string")); @@ -134,9 +140,12 @@ public void testMetricAvgAggregationCommand() { @SneakyThrows public void testMetricAvgAggregationCommandWithAlias() { JSONObject response = - executeQuery("source=my_prometheus.prometheus_http_requests_total | stats avg(@value) as agg by span(@timestamp, 15s), `handler`, job"); - verifySchema(response, - schema("agg", "double"), + executeQuery( + "source=my_prometheus.prometheus_http_requests_total | stats avg(@value) as agg by" + + " span(@timestamp, 15s), `handler`, job"); + verifySchema( + response, + schema("agg", "double"), schema("span(@timestamp,15s)", "timestamp"), schema("handler", "string"), schema("job", "string")); @@ -149,15 +158,15 @@ public void testMetricAvgAggregationCommandWithAlias() { } } - @Test @SneakyThrows public void testMetricMaxAggregationCommand() { JSONObject response = - executeQuery("source=my_prometheus.prometheus_http_requests_total | stats max(@value) by span(@timestamp, 15s)"); - verifySchema(response, - schema("max(@value)", "double"), - schema("span(@timestamp,15s)", "timestamp")); + executeQuery( + "source=my_prometheus.prometheus_http_requests_total | stats max(@value) by" + + " span(@timestamp, 15s)"); + verifySchema( + response, schema("max(@value)", "double"), schema("span(@timestamp,15s)", "timestamp")); Assertions.assertTrue(response.getInt("size") > 0); Assertions.assertEquals(2, response.getJSONArray("datarows").getJSONArray(0).length()); JSONArray firstRow = response.getJSONArray("datarows").getJSONArray(0); @@ -167,14 +176,16 @@ public void testMetricMaxAggregationCommand() { } } - @Test @SneakyThrows public void testMetricMinAggregationCommand() { JSONObject response = - executeQuery("source=my_prometheus.prometheus_http_requests_total | stats min(@value) by span(@timestamp, 15s), handler"); - verifySchema(response, - schema("min(@value)", "double"), + executeQuery( + "source=my_prometheus.prometheus_http_requests_total | stats min(@value) by" + + " span(@timestamp, 15s), handler"); + verifySchema( + response, + schema("min(@value)", "double"), schema("span(@timestamp,15s)", "timestamp"), schema("handler", "string")); Assertions.assertTrue(response.getInt("size") > 0); @@ -190,9 +201,12 @@ public void testMetricMinAggregationCommand() { @SneakyThrows public void testMetricCountAggregationCommand() { JSONObject response = - executeQuery("source=my_prometheus.prometheus_http_requests_total | stats count() by span(@timestamp, 15s), handler, job"); - verifySchema(response, - schema("count()", "integer"), + executeQuery( + "source=my_prometheus.prometheus_http_requests_total | stats count() by" + + " span(@timestamp, 15s), handler, job"); + verifySchema( + response, + schema("count()", "integer"), schema("span(@timestamp,15s)", "timestamp"), schema("handler", "string"), schema("job", "string")); @@ -209,9 +223,12 @@ public void testMetricCountAggregationCommand() { @SneakyThrows public void testMetricSumAggregationCommand() { JSONObject response = - executeQuery("source=my_prometheus.prometheus_http_requests_total | stats sum(@value) by span(@timestamp, 15s), handler, job"); - verifySchema(response, - schema("sum(@value)", "double"), + executeQuery( + "source=my_prometheus.prometheus_http_requests_total | stats sum(@value) by" + + " span(@timestamp, 15s), handler, job"); + verifySchema( + response, + schema("sum(@value)", "double"), schema("span(@timestamp,15s)", "timestamp"), schema("handler", "string"), schema("job", "string")); @@ -224,18 +241,21 @@ public void testMetricSumAggregationCommand() { } } - @Test @SneakyThrows public void testQueryRange() { long currentTimestamp = new Date().getTime(); JSONObject response = - executeQuery("source=my_prometheus.query_range('prometheus_http_requests_total'," - + ((currentTimestamp/1000)-3600) + "," + currentTimestamp/1000 + ", " + "'14'" + ")" ); - verifySchema(response, - schema(LABELS, "struct"), - schema(VALUE, "array"), - schema(TIMESTAMP, "array")); + executeQuery( + "source=my_prometheus.query_range('prometheus_http_requests_total'," + + ((currentTimestamp / 1000) - 3600) + + "," + + currentTimestamp / 1000 + + ", " + + "'14'" + + ")"); + verifySchema( + response, schema(LABELS, "struct"), schema(VALUE, "array"), schema(TIMESTAMP, "array")); Assertions.assertTrue(response.getInt("size") > 0); } @@ -244,24 +264,23 @@ public void explainQueryRange() throws Exception { String expected = loadFromFile("expectedOutput/ppl/explain_query_range.json"); assertJsonEquals( expected, - explainQueryToString("source = my_prometheus" - + ".query_range('prometheus_http_requests_total',1689281439,1689291439,14)") - ); + explainQueryToString( + "source = my_prometheus" + + ".query_range('prometheus_http_requests_total',1689281439,1689291439,14)")); } - @Test + @Test public void testExplainForQueryExemplars() throws Exception { String expected = loadFromFile("expectedOutput/ppl/explain_query_exemplars.json"); assertJsonEquals( expected, - explainQueryToString("source = my_prometheus." - + "query_exemplars('app_ads_ad_requests_total',1689228292,1689232299)") - ); + explainQueryToString( + "source = my_prometheus." + + "query_exemplars('app_ads_ad_requests_total',1689228292,1689232299)")); } String loadFromFile(String filename) throws Exception { URI uri = Resources.getResource(filename).toURI(); return new String(Files.readAllBytes(Paths.get(uri))); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/QueryAnalysisIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/QueryAnalysisIT.java index dd2fcb84c8..aaefbbe395 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/QueryAnalysisIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/QueryAnalysisIT.java @@ -3,13 +3,11 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT; import java.io.IOException; -import org.junit.Ignore; import org.junit.Test; import org.opensearch.client.ResponseException; import org.opensearch.sql.common.antlr.SyntaxCheckException; @@ -80,10 +78,7 @@ public void queryShouldBeCaseInsensitiveInKeywords() { queryShouldPassSyntaxAndSemanticCheck(query); } - /** - * Commands that fail syntax analysis should throw - * {@link SyntaxCheckException}. - */ + /** Commands that fail syntax analysis should throw {@link SyntaxCheckException}. */ @Test public void queryNotStartingWithSearchCommandShouldFailSyntaxCheck() { String query = "fields firstname"; @@ -108,14 +103,12 @@ public void unsupportedAggregationFunctionShouldFailSyntaxCheck() { queryShouldThrowSyntaxException(query, "Failed to parse query due to offending symbol"); } - /** - * Commands that fail semantic analysis should throw {@link SemanticCheckException}. - */ + /** Commands that fail semantic analysis should throw {@link SemanticCheckException}. */ @Test public void nonexistentFieldShouldFailSemanticCheck() { String query = String.format("search source=%s | fields name", TEST_INDEX_ACCOUNT); - queryShouldThrowSemanticException(query, "can't resolve Symbol(namespace=FIELD_NAME, " - + "name=name) in type env"); + queryShouldThrowSemanticException( + query, "can't resolve Symbol(namespace=FIELD_NAME, " + "name=name) in type env"); } private void queryShouldPassSyntaxAndSemanticCheck(String query) { @@ -135,7 +128,7 @@ private void queryShouldThrowSyntaxException(String query, String... messages) { } catch (ResponseException e) { String errorMsg = e.getMessage(); assertTrue(errorMsg.contains("SyntaxCheckException")); - for (String msg: messages) { + for (String msg : messages) { assertTrue(errorMsg.contains(msg)); } } catch (IOException e) { diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/QueryStringIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/QueryStringIT.java index d178910825..fc797983d2 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/QueryStringIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/QueryStringIT.java @@ -27,22 +27,29 @@ public void all_fields_test() throws IOException { @Test public void mandatory_params_test() throws IOException { - String query = "source=" + TEST_INDEX_BEER + " | where query_string([\\\"Tags\\\" ^ 1.5, Title, `Body` 4.2], 'taste')"; + String query = + "source=" + + TEST_INDEX_BEER + + " | where query_string([\\\"Tags\\\" ^ 1.5, Title, `Body` 4.2], 'taste')"; JSONObject result = executeQuery(query); assertEquals(16, result.getInt("total")); } @Test public void all_params_test() throws IOException { - String query = "source=" + TEST_INDEX_BEER + " | where query_string(['Body', Tags, Title], 'taste beer'," - + "allow_leading_wildcard=true, enable_position_increments=true, escape=false," - + "fuzziness= 1, fuzzy_rewrite='constant_score', max_determinized_states = 10000," - + "analyzer='english', analyze_wildcard = false, quote_field_suffix = '.exact'," - + "auto_generate_synonyms_phrase_query=true, boost = 0.77," - + "quote_analyzer='standard', phrase_slop=0, rewrite='constant_score', type='best_fields'," - + "tie_breaker=0.3, time_zone='Canada/Pacific', default_operator='or'," - + "fuzzy_transpositions = false, lenient = true, fuzzy_max_expansions = 25," - + "minimum_should_match = '2<-25% 9<-3', fuzzy_prefix_length = 7)"; + String query = + "source=" + + TEST_INDEX_BEER + + " | where query_string(['Body', Tags, Title], 'taste" + + " beer',allow_leading_wildcard=true, enable_position_increments=true," + + " escape=false,fuzziness= 1, fuzzy_rewrite='constant_score', max_determinized_states" + + " = 10000,analyzer='english', analyze_wildcard = false, quote_field_suffix =" + + " '.exact',auto_generate_synonyms_phrase_query=true, boost =" + + " 0.77,quote_analyzer='standard', phrase_slop=0, rewrite='constant_score'," + + " type='best_fields',tie_breaker=0.3, time_zone='Canada/Pacific'," + + " default_operator='or',fuzzy_transpositions = false, lenient = true," + + " fuzzy_max_expansions = 25,minimum_should_match = '2<-25% 9<-3', fuzzy_prefix_length" + + " = 7)"; JSONObject result = executeQuery(query); assertEquals(49, result.getInt("total")); } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/RareCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/RareCommandIT.java index f65941b8f7..e3ed1661cd 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/RareCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/RareCommandIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT; @@ -30,12 +29,8 @@ public void afterTest() throws IOException { @Test public void testRareWithoutGroup() throws IOException { - JSONObject result = - executeQuery(String.format("source=%s | rare gender", TEST_INDEX_ACCOUNT)); - verifyDataRows( - result, - rows("F"), - rows("M")); + JSONObject result = executeQuery(String.format("source=%s | rare gender", TEST_INDEX_ACCOUNT)); + verifyDataRows(result, rows("F"), rows("M")); } @Test @@ -65,6 +60,4 @@ public void testRareWithGroup() throws IOException { rows("M", "KY"), rows("M", "IN")); } - - } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/RelevanceFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/RelevanceFunctionIT.java index b72dc5230f..8e6614dfed 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/RelevanceFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/RelevanceFunctionIT.java @@ -6,15 +6,8 @@ package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BEER; -import static org.opensearch.sql.util.MatcherUtils.rows; -import static org.opensearch.sql.util.MatcherUtils.schema; -import static org.opensearch.sql.util.MatcherUtils.verifyDataRows; -import static org.opensearch.sql.util.MatcherUtils.verifySchema; -import static org.opensearch.sql.util.MatcherUtils.verifySome; import java.io.IOException; - -import org.json.JSONObject; import org.junit.Test; public class RelevanceFunctionIT extends PPLIntegTestCase { @@ -25,11 +18,11 @@ public void init() throws IOException { @Test public void test_wildcard_simple_query_string() throws IOException { - String query1 = "SOURCE=" + TEST_INDEX_BEER - + " | WHERE simple_query_string(['Tags'], 'taste') | fields Id"; + String query1 = + "SOURCE=" + TEST_INDEX_BEER + " | WHERE simple_query_string(['Tags'], 'taste') | fields Id"; var result1 = executeQuery(query1); - String query2 = "SOURCE=" + TEST_INDEX_BEER - + " | WHERE simple_query_string(['T*'], 'taste') | fields Id"; + String query2 = + "SOURCE=" + TEST_INDEX_BEER + " | WHERE simple_query_string(['T*'], 'taste') | fields Id"; var result2 = executeQuery(query2); assertNotEquals(result2.getInt("total"), result1.getInt("total")); } @@ -40,11 +33,15 @@ public void test_wildcard_simple_query_string() throws IOException { */ @Test public void verify_flags_in_simple_query_string() throws IOException { - String query1 = "SOURCE=" - + TEST_INDEX_BEER + " | WHERE simple_query_string(['Body'], '-free', flags='NONE|PREFIX|ESCAPE')"; + String query1 = + "SOURCE=" + + TEST_INDEX_BEER + + " | WHERE simple_query_string(['Body'], '-free', flags='NONE|PREFIX|ESCAPE')"; var result1 = executeQuery(query1); - String query2 = "SOURCE=" - + TEST_INDEX_BEER + " | WHERE simple_query_string([Body], '-free', flags='NOT|AND|OR')"; + String query2 = + "SOURCE=" + + TEST_INDEX_BEER + + " | WHERE simple_query_string([Body], '-free', flags='NOT|AND|OR')"; var result2 = executeQuery(query2); assertNotEquals(result2.getInt("total"), result1.getInt("total")); @@ -60,11 +57,11 @@ public void verify_flags_in_simple_query_string() throws IOException { */ @Test public void verify_escape_in_query_string() throws IOException { - String query1 = "SOURCE=" - + TEST_INDEX_BEER + " | WHERE query_string([Title], '?', escape=true);"; + String query1 = + "SOURCE=" + TEST_INDEX_BEER + " | WHERE query_string([Title], '?', escape=true);"; var result1 = executeQuery(query1); - String query2 = "SOURCE=" - + TEST_INDEX_BEER + " | WHERE query_string([Title], '?', escape=false);"; + String query2 = + "SOURCE=" + TEST_INDEX_BEER + " | WHERE query_string([Title], '?', escape=false);"; var result2 = executeQuery(query2); assertEquals(0, result1.getInt("total")); assertEquals(8, result2.getInt("total")); @@ -77,11 +74,15 @@ public void verify_escape_in_query_string() throws IOException { */ @Test public void verify_default_operator_in_query_string() throws IOException { - String query1 = "SOURCE=" - + TEST_INDEX_BEER + " | WHERE query_string([Title], 'beer taste', default_operator='OR')"; + String query1 = + "SOURCE=" + + TEST_INDEX_BEER + + " | WHERE query_string([Title], 'beer taste', default_operator='OR')"; var result1 = executeQuery(query1); - String query2 = "SOURCE=" - + TEST_INDEX_BEER + " | WHERE query_string([Title], 'beer taste', default_operator='AND')"; + String query2 = + "SOURCE=" + + TEST_INDEX_BEER + + " | WHERE query_string([Title], 'beer taste', default_operator='AND')"; var result2 = executeQuery(query2); assertEquals(16, result1.getInt("total")); assertEquals(4, result2.getInt("total")); @@ -89,11 +90,15 @@ public void verify_default_operator_in_query_string() throws IOException { @Test public void verify_default_operator_in_simple_query_string() throws IOException { - String query1 = "SOURCE=" - + TEST_INDEX_BEER + " | WHERE simple_query_string([Title], 'beer taste', default_operator='OR')"; + String query1 = + "SOURCE=" + + TEST_INDEX_BEER + + " | WHERE simple_query_string([Title], 'beer taste', default_operator='OR')"; var result1 = executeQuery(query1); - String query2 = "SOURCE=" - + TEST_INDEX_BEER + " | WHERE simple_query_string([Title], 'beer taste', default_operator='AND')"; + String query2 = + "SOURCE=" + + TEST_INDEX_BEER + + " | WHERE simple_query_string([Title], 'beer taste', default_operator='AND')"; var result2 = executeQuery(query2); assertEquals(16, result1.getInt("total")); assertEquals(4, result2.getInt("total")); @@ -101,11 +106,11 @@ public void verify_default_operator_in_simple_query_string() throws IOException @Test public void verify_default_operator_in_multi_match() throws IOException { - String query1 = "SOURCE=" - + TEST_INDEX_BEER + " | WHERE multi_match([Title], 'beer taste', operator='OR')"; + String query1 = + "SOURCE=" + TEST_INDEX_BEER + " | WHERE multi_match([Title], 'beer taste', operator='OR')"; var result1 = executeQuery(query1); - String query2 = "SOURCE=" - + TEST_INDEX_BEER + " | WHERE multi_match([Title], 'beer taste', operator='AND')"; + String query2 = + "SOURCE=" + TEST_INDEX_BEER + " | WHERE multi_match([Title], 'beer taste', operator='AND')"; var result2 = executeQuery(query2); assertEquals(16, result1.getInt("total")); assertEquals(4, result2.getInt("total")); @@ -113,11 +118,11 @@ public void verify_default_operator_in_multi_match() throws IOException { @Test public void verify_operator_in_match() throws IOException { - String query1 = "SOURCE=" - + TEST_INDEX_BEER + " | WHERE match(Title, 'beer taste', operator='OR')"; + String query1 = + "SOURCE=" + TEST_INDEX_BEER + " | WHERE match(Title, 'beer taste', operator='OR')"; var result1 = executeQuery(query1); - String query2 = "SOURCE=" - + TEST_INDEX_BEER + " | WHERE match(Title, 'beer taste', operator='AND')"; + String query2 = + "SOURCE=" + TEST_INDEX_BEER + " | WHERE match(Title, 'beer taste', operator='AND')"; var result2 = executeQuery(query2); assertEquals(16, result1.getInt("total")); assertEquals(4, result2.getInt("total")); diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/RenameCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/RenameCommandIT.java index ad1add4e12..ae06e75a06 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/RenameCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/RenameCommandIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT; @@ -43,7 +42,9 @@ public void testRenameMultiField() throws IOException { verifyColumn(result, columnName("FIRSTNAME"), columnName("AGE")); } - @Ignore("Wildcard is unsupported yet. Enable once https://github.com/opensearch-project/sql/issues/787 is resolved.") + @Ignore( + "Wildcard is unsupported yet. Enable once" + + " https://github.com/opensearch-project/sql/issues/787 is resolved.") @Test public void testRenameWildcardFields() throws IOException { JSONObject result = executeQuery("source=" + TEST_INDEX_ACCOUNT + " | rename %name as %NAME"); diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/ResourceMonitorIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/ResourceMonitorIT.java index e608e94512..56b54ba748 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/ResourceMonitorIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/ResourceMonitorIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_DOG; @@ -31,11 +30,11 @@ public void queryExceedResourceLimitShouldFail() throws IOException { new ClusterSetting("persistent", Settings.Key.QUERY_MEMORY_LIMIT.getKeyValue(), "1%")); String query = String.format("search source=%s age=20", TEST_INDEX_DOG); - ResponseException exception = - expectThrows(ResponseException.class, () -> executeQuery(query)); + ResponseException exception = expectThrows(ResponseException.class, () -> executeQuery(query)); assertEquals(503, exception.getResponse().getStatusLine().getStatusCode()); - assertThat(exception.getMessage(), Matchers.containsString("resource is not enough to run the" - + " query, quit.")); + assertThat( + exception.getMessage(), + Matchers.containsString("resource is not enough to run the" + " query, quit.")); // update plugins.ppl.query.memory_limit to default value 85% updateClusterSettings( diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/SearchCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/SearchCommandIT.java index 2e62b464bb..5d1b0203d7 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/SearchCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/SearchCommandIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/SettingsIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/SettingsIT.java index d012cce9e8..224afde4c5 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/SettingsIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/SettingsIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; @@ -26,17 +25,13 @@ public void init() throws IOException { public void testQuerySizeLimit() throws IOException { // Default setting, fetch 200 rows from source JSONObject result = - executeQuery( - String.format( - "search source=%s age>35 | fields firstname", TEST_INDEX_BANK)); + executeQuery(String.format("search source=%s age>35 | fields firstname", TEST_INDEX_BANK)); verifyDataRows(result, rows("Hattie"), rows("Elinor"), rows("Virginia")); // Fetch 1 rows from source setQuerySizeLimit(1); result = - executeQuery( - String.format( - "search source=%s age>35 | fields firstname", TEST_INDEX_BANK)); + executeQuery(String.format("search source=%s age>35 | fields firstname", TEST_INDEX_BANK)); verifyDataRows(result, rows("Hattie")); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/ShowDataSourcesCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/ShowDataSourcesCommandIT.java index 4190e4274b..c9c4854212 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/ShowDataSourcesCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/ShowDataSourcesCommandIT.java @@ -7,7 +7,6 @@ package org.opensearch.sql.ppl; -import static org.opensearch.sql.legacy.TestUtils.getResponseBody; import static org.opensearch.sql.util.MatcherUtils.columnName; import static org.opensearch.sql.util.MatcherUtils.rows; import static org.opensearch.sql.util.MatcherUtils.verifyColumn; @@ -18,7 +17,6 @@ import java.io.IOException; import org.json.JSONObject; import org.junit.After; -import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.jupiter.api.Test; @@ -30,10 +28,10 @@ public class ShowDataSourcesCommandIT extends PPLIntegTestCase { /** - * Integ tests are dependent on self generated metrics in prometheus instance. - * When running individual integ tests there - * is no time for generation of metrics in the test prometheus instance. - * This method gives prometheus time to generate metrics on itself. + * Integ tests are dependent on self generated metrics in prometheus instance. When running + * individual integ tests there is no time for generation of metrics in the test prometheus + * instance. This method gives prometheus time to generate metrics on itself. + * * @throws InterruptedException */ @BeforeClass @@ -44,8 +42,11 @@ protected static void metricGenerationWait() throws InterruptedException { @Override protected void init() throws InterruptedException, IOException { DataSourceMetadata createDSM = - new DataSourceMetadata("my_prometheus", DataSourceType.PROMETHEUS, - ImmutableList.of(), ImmutableMap.of("prometheus.uri", "http://localhost:9090")); + new DataSourceMetadata( + "my_prometheus", + DataSourceType.PROMETHEUS, + ImmutableList.of(), + ImmutableMap.of("prometheus.uri", "http://localhost:9090")); Request createRequest = getCreateDataSourceRequest(createDSM); Response response = client().performRequest(createRequest); Assert.assertEquals(201, response.getStatusLine().getStatusCode()); @@ -61,26 +62,14 @@ protected void deleteDataSourceMetadata() throws IOException { @Test public void testShowDataSourcesCommands() throws IOException { JSONObject result = executeQuery("show datasources"); - verifyDataRows(result, - rows("my_prometheus", "PROMETHEUS"), - rows("@opensearch", "OPENSEARCH")); - verifyColumn( - result, - columnName("DATASOURCE_NAME"), - columnName("CONNECTOR_TYPE") - ); + verifyDataRows(result, rows("my_prometheus", "PROMETHEUS"), rows("@opensearch", "OPENSEARCH")); + verifyColumn(result, columnName("DATASOURCE_NAME"), columnName("CONNECTOR_TYPE")); } @Test public void testShowDataSourcesCommandsWithWhereClause() throws IOException { JSONObject result = executeQuery("show datasources | where CONNECTOR_TYPE='PROMETHEUS'"); - verifyDataRows(result, - rows("my_prometheus", "PROMETHEUS")); - verifyColumn( - result, - columnName("DATASOURCE_NAME"), - columnName("CONNECTOR_TYPE") - ); + verifyDataRows(result, rows("my_prometheus", "PROMETHEUS")); + verifyColumn(result, columnName("DATASOURCE_NAME"), columnName("CONNECTOR_TYPE")); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/SimpleQueryStringIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/SimpleQueryStringIT.java index f13aaadc08..1ad4834d80 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/SimpleQueryStringIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/SimpleQueryStringIT.java @@ -6,14 +6,8 @@ package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BEER; -import static org.opensearch.sql.util.MatcherUtils.rows; -import static org.opensearch.sql.util.MatcherUtils.schema; -import static org.opensearch.sql.util.MatcherUtils.verifyDataRows; -import static org.opensearch.sql.util.MatcherUtils.verifySchema; -import static org.opensearch.sql.util.MatcherUtils.verifySome; import java.io.IOException; - import org.json.JSONObject; import org.junit.Test; @@ -25,36 +19,42 @@ public void init() throws IOException { @Test public void test_simple_query_string() throws IOException { - String query = "SOURCE=" + TEST_INDEX_BEER - + " | WHERE simple_query_string([\\\"Tags\\\" ^ 1.5, Title, `Body` 4.2], 'taste') | fields Id"; + String query = + "SOURCE=" + + TEST_INDEX_BEER + + " | WHERE simple_query_string([\\\"Tags\\\" ^ 1.5, Title, `Body` 4.2], 'taste') |" + + " fields Id"; var result = executeQuery(query); assertEquals(16, result.getInt("total")); } @Test public void test_simple_query_string_all_params() throws IOException { - String query = "SOURCE=" + TEST_INDEX_BEER - + " | WHERE simple_query_string(['Body', Tags, Title], 'taste beer', default_operator='or'," - + "analyzer=english, analyze_wildcard = false, quote_field_suffix = '.exact'," - + "auto_generate_synonyms_phrase_query=true, boost = 0.77, flags='PREFIX'," - + "fuzzy_transpositions = false, lenient = true, fuzzy_max_expansions = 25," - + "minimum_should_match = '2<-25% 9<-3', fuzzy_prefix_length = 7) | fields Id"; + String query = + "SOURCE=" + + TEST_INDEX_BEER + + " | WHERE simple_query_string(['Body', Tags, Title], 'taste beer'," + + " default_operator='or',analyzer=english, analyze_wildcard = false," + + " quote_field_suffix = '.exact',auto_generate_synonyms_phrase_query=true, boost =" + + " 0.77, flags='PREFIX',fuzzy_transpositions = false, lenient = true," + + " fuzzy_max_expansions = 25,minimum_should_match = '2<-25% 9<-3', fuzzy_prefix_length" + + " = 7) | fields Id"; var result = executeQuery(query); assertEquals(49, result.getInt("total")); } @Test public void test_wildcard_simple_query_string() throws IOException { - String query1 = "SOURCE=" + TEST_INDEX_BEER - + " | WHERE simple_query_string(['Tags'], 'taste') | fields Id"; + String query1 = + "SOURCE=" + TEST_INDEX_BEER + " | WHERE simple_query_string(['Tags'], 'taste') | fields Id"; var result1 = executeQuery(query1); - String query2 = "SOURCE=" + TEST_INDEX_BEER - + " | WHERE simple_query_string(['T*'], 'taste') | fields Id"; + String query2 = + "SOURCE=" + TEST_INDEX_BEER + " | WHERE simple_query_string(['T*'], 'taste') | fields Id"; var result2 = executeQuery(query2); assertNotEquals(result2.getInt("total"), result1.getInt("total")); - String query3 = "source=" + TEST_INDEX_BEER - + " | where simple_query_string(['*Date'], '2014-01-22')"; + String query3 = + "source=" + TEST_INDEX_BEER + " | where simple_query_string(['*Date'], '2014-01-22')"; JSONObject result3 = executeQuery(query3); assertEquals(10, result3.getInt("total")); } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/SortCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/SortCommandIT.java index 0fd4e9ec86..c90a506252 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/SortCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/SortCommandIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; @@ -14,7 +13,6 @@ import java.io.IOException; import org.json.JSONObject; -import org.junit.Ignore; import org.junit.Test; public class SortCommandIT extends PPLIntegTestCase { diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/StandaloneIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/StandaloneIT.java index b1fcbf7d1b..f81e1b6615 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/StandaloneIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/StandaloneIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.datasource.model.DataSourceMetadata.defaultOpenSearchDataSourceMetadata; @@ -31,11 +30,11 @@ import org.opensearch.sql.analysis.ExpressionAnalyzer; import org.opensearch.sql.common.response.ResponseListener; import org.opensearch.sql.common.setting.Settings; -import org.opensearch.sql.datasources.service.DataSourceMetadataStorage; import org.opensearch.sql.datasource.DataSourceService; -import org.opensearch.sql.datasources.service.DataSourceServiceImpl; -import org.opensearch.sql.datasources.auth.DataSourceUserAuthorizationHelper; import org.opensearch.sql.datasource.model.DataSourceMetadata; +import org.opensearch.sql.datasources.auth.DataSourceUserAuthorizationHelper; +import org.opensearch.sql.datasources.service.DataSourceMetadataStorage; +import org.opensearch.sql.datasources.service.DataSourceServiceImpl; import org.opensearch.sql.executor.ExecutionEngine; import org.opensearch.sql.executor.ExecutionEngine.QueryResponse; import org.opensearch.sql.executor.QueryManager; @@ -45,25 +44,25 @@ import org.opensearch.sql.expression.function.BuiltinFunctionRepository; import org.opensearch.sql.monitor.AlwaysHealthyMonitor; import org.opensearch.sql.monitor.ResourceMonitor; +import org.opensearch.sql.opensearch.client.OpenSearchClient; +import org.opensearch.sql.opensearch.client.OpenSearchRestClient; import org.opensearch.sql.opensearch.executor.OpenSearchExecutionEngine; import org.opensearch.sql.opensearch.executor.protector.ExecutionProtector; import org.opensearch.sql.opensearch.executor.protector.OpenSearchExecutionProtector; +import org.opensearch.sql.opensearch.security.SecurityAccess; +import org.opensearch.sql.opensearch.storage.OpenSearchDataSourceFactory; import org.opensearch.sql.opensearch.storage.OpenSearchStorageEngine; import org.opensearch.sql.planner.Planner; import org.opensearch.sql.planner.optimizer.LogicalPlanOptimizer; import org.opensearch.sql.ppl.antlr.PPLSyntaxParser; -import org.opensearch.sql.sql.SQLService; -import org.opensearch.sql.sql.antlr.SQLSyntaxParser; -import org.opensearch.sql.storage.StorageEngine; -import org.opensearch.sql.util.ExecuteOnCallerThreadQueryManager; -import org.opensearch.sql.opensearch.client.OpenSearchClient; -import org.opensearch.sql.opensearch.client.OpenSearchRestClient; -import org.opensearch.sql.opensearch.security.SecurityAccess; -import org.opensearch.sql.opensearch.storage.OpenSearchDataSourceFactory; import org.opensearch.sql.ppl.domain.PPLQueryRequest; import org.opensearch.sql.protocol.response.QueryResult; import org.opensearch.sql.protocol.response.format.SimpleJsonResponseFormatter; +import org.opensearch.sql.sql.SQLService; +import org.opensearch.sql.sql.antlr.SQLSyntaxParser; import org.opensearch.sql.storage.DataSourceFactory; +import org.opensearch.sql.storage.StorageEngine; +import org.opensearch.sql.util.ExecuteOnCallerThreadQueryManager; /** * Run PPL with query engine outside OpenSearch cluster. This IT doesn't require our plugin @@ -78,17 +77,21 @@ public class StandaloneIT extends PPLIntegTestCase { public void init() { RestHighLevelClient restClient = new InternalRestHighLevelClient(client()); OpenSearchClient client = new OpenSearchRestClient(restClient); - DataSourceService dataSourceService = new DataSourceServiceImpl( - new ImmutableSet.Builder() - .add(new OpenSearchDataSourceFactory(client, defaultSettings())) - .build(), getDataSourceMetadataStorage(), getDataSourceUserRoleHelper()); + DataSourceService dataSourceService = + new DataSourceServiceImpl( + new ImmutableSet.Builder() + .add(new OpenSearchDataSourceFactory(client, defaultSettings())) + .build(), + getDataSourceMetadataStorage(), + getDataSourceUserRoleHelper()); dataSourceService.createDataSource(defaultOpenSearchDataSourceMetadata()); ModulesBuilder modules = new ModulesBuilder(); - modules.add(new StandaloneModule(new InternalRestHighLevelClient(client()), defaultSettings(), dataSourceService)); + modules.add( + new StandaloneModule( + new InternalRestHighLevelClient(client()), defaultSettings(), dataSourceService)); Injector injector = modules.createInjector(); - pplService = - SecurityAccess.doPrivileged(() -> injector.getInstance(PPLService.class)); + pplService = SecurityAccess.doPrivileged(() -> injector.getInstance(PPLService.class)); } @Test @@ -146,9 +149,8 @@ public void onFailure(Exception e) { private Settings defaultSettings() { return new Settings() { - private final Map defaultSettings = new ImmutableMap.Builder() - .put(Key.QUERY_SIZE_LIMIT, 200) - .build(); + private final Map defaultSettings = + new ImmutableMap.Builder().put(Key.QUERY_SIZE_LIMIT, 200).build(); @Override public T getSettingValue(Key key) { @@ -162,9 +164,7 @@ public List getSettings() { }; } - /** - * Internal RestHighLevelClient only for testing purpose. - */ + /** Internal RestHighLevelClient only for testing purpose. */ static class InternalRestHighLevelClient extends RestHighLevelClient { public InternalRestHighLevelClient(RestClient restClient) { super(restClient, RestClient::close, Collections.emptyList()); @@ -197,8 +197,8 @@ public StorageEngine storageEngine(OpenSearchClient client) { } @Provides - public ExecutionEngine executionEngine(OpenSearchClient client, ExecutionProtector protector, - PlanSerializer planSerializer) { + public ExecutionEngine executionEngine( + OpenSearchClient client, ExecutionProtector protector, PlanSerializer planSerializer) { return new OpenSearchExecutionEngine(client, protector, planSerializer); } @@ -257,28 +257,20 @@ public Optional getDataSourceMetadata(String datasourceName) } @Override - public void createDataSourceMetadata(DataSourceMetadata dataSourceMetadata) { - - } + public void createDataSourceMetadata(DataSourceMetadata dataSourceMetadata) {} @Override - public void updateDataSourceMetadata(DataSourceMetadata dataSourceMetadata) { - - } + public void updateDataSourceMetadata(DataSourceMetadata dataSourceMetadata) {} @Override - public void deleteDataSourceMetadata(String datasourceName) { - - } + public void deleteDataSourceMetadata(String datasourceName) {} }; } public static DataSourceUserAuthorizationHelper getDataSourceUserRoleHelper() { return new DataSourceUserAuthorizationHelper() { @Override - public void authorizeDataSource(DataSourceMetadata dataSourceMetadata) { - - } + public void authorizeDataSource(DataSourceMetadata dataSourceMetadata) {} }; } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/StatsCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/StatsCommandIT.java index cf560c129c..92b9e309b8 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/StatsCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/StatsCommandIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT; @@ -15,10 +14,6 @@ import static org.opensearch.sql.util.MatcherUtils.verifySchema; import java.io.IOException; -import java.time.LocalDate; -import java.time.ZoneId; -import java.time.ZonedDateTime; -import java.time.format.DateTimeFormatter; import org.json.JSONObject; import org.junit.jupiter.api.Test; @@ -70,26 +65,23 @@ public void testStatsDistinctCount() throws IOException { verifySchema(response, schema("distinct_count(gender)", null, "integer")); verifyDataRows(response, rows(2)); - response = - executeQuery(String.format("source=%s | stats dc(age)", TEST_INDEX_ACCOUNT)); + response = executeQuery(String.format("source=%s | stats dc(age)", TEST_INDEX_ACCOUNT)); verifySchema(response, schema("dc(age)", null, "integer")); verifyDataRows(response, rows(21)); } @Test public void testStatsMin() throws IOException { - JSONObject response = executeQuery(String.format( - "source=%s | stats min(age)", - TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery(String.format("source=%s | stats min(age)", TEST_INDEX_ACCOUNT)); verifySchema(response, schema("min(age)", null, "long")); verifyDataRows(response, rows(20)); } @Test public void testStatsMax() throws IOException { - JSONObject response = executeQuery(String.format( - "source=%s | stats max(age)", - TEST_INDEX_ACCOUNT)); + JSONObject response = + executeQuery(String.format("source=%s | stats max(age)", TEST_INDEX_ACCOUNT)); verifySchema(response, schema("max(age)", null, "long")); verifyDataRows(response, rows(40)); } @@ -97,8 +89,8 @@ public void testStatsMax() throws IOException { @Test public void testStatsNested() throws IOException { JSONObject response = - executeQuery(String.format("source=%s | stats avg(abs(age) * 2) as AGE", - TEST_INDEX_ACCOUNT)); + executeQuery( + String.format("source=%s | stats avg(abs(age) * 2) as AGE", TEST_INDEX_ACCOUNT)); verifySchema(response, schema("AGE", null, "double")); verifyDataRows(response, rows(60.342)); } @@ -106,8 +98,7 @@ public void testStatsNested() throws IOException { @Test public void testStatsNestedDoubleValue() throws IOException { JSONObject response = - executeQuery(String.format("source=%s | stats avg(abs(age) * 2.0)", - TEST_INDEX_ACCOUNT)); + executeQuery(String.format("source=%s | stats avg(abs(age) * 2.0)", TEST_INDEX_ACCOUNT)); verifySchema(response, schema("avg(abs(age) * 2.0)", null, "double")); verifyDataRows(response, rows(60.342)); } @@ -115,88 +106,87 @@ public void testStatsNestedDoubleValue() throws IOException { @Test public void testStatsWhere() throws IOException { JSONObject response = - executeQuery(String.format( - "source=%s | stats sum(balance) as a by state | where a > 780000", - TEST_INDEX_ACCOUNT)); - verifySchema(response, schema("a", null, "long"), - schema("state", null, "string")); + executeQuery( + String.format( + "source=%s | stats sum(balance) as a by state | where a > 780000", + TEST_INDEX_ACCOUNT)); + verifySchema(response, schema("a", null, "long"), schema("state", null, "string")); verifyDataRows(response, rows(782199, "TX")); } @Test public void testGroupByNullValue() throws IOException { JSONObject response = - executeQuery(String.format( - "source=%s | stats avg(balance) as a by age", - TEST_INDEX_BANK_WITH_NULL_VALUES)); - verifySchema(response, schema("a", null, "double"), - schema("age", null, "integer")); - verifyDataRows(response, + executeQuery( + String.format( + "source=%s | stats avg(balance) as a by age", TEST_INDEX_BANK_WITH_NULL_VALUES)); + verifySchema(response, schema("a", null, "double"), schema("age", null, "integer")); + verifyDataRows( + response, rows(null, null), rows(32838D, 28), rows(39225D, 32), rows(4180D, 33), rows(48086D, 34), - rows(null, 36) - ); + rows(null, 36)); } - //Todo. The column of agg function is in random order. This is because we create the project + // Todo. The column of agg function is in random order. This is because we create the project // all operator from the symbol table which can't maintain the original column order. @Test public void testMultipleAggregationFunction() throws IOException { - JSONObject response = executeQuery(String.format( - "source=%s | stats min(age), max(age)", - TEST_INDEX_ACCOUNT)); - verifySchema(response, schema("min(age)", null, "long"), - schema("max(age)", null, "long")); + JSONObject response = + executeQuery(String.format("source=%s | stats min(age), max(age)", TEST_INDEX_ACCOUNT)); + verifySchema(response, schema("min(age)", null, "long"), schema("max(age)", null, "long")); verifyDataRows(response, rows(20, 40)); } @Test public void testStatsWithNull() throws IOException { JSONObject response = - executeQuery(String.format( - "source=%s | stats avg(age)", - TEST_INDEX_BANK_WITH_NULL_VALUES)); + executeQuery(String.format("source=%s | stats avg(age)", TEST_INDEX_BANK_WITH_NULL_VALUES)); verifySchema(response, schema("avg(age)", null, "double")); verifyDataRows(response, rows(33.166666666666664)); } @Test public void testStatsWithMissing() throws IOException { - JSONObject response = executeQuery(String.format( - "source=%s | stats avg(balance)", - TEST_INDEX_BANK_WITH_NULL_VALUES)); + JSONObject response = + executeQuery( + String.format("source=%s | stats avg(balance)", TEST_INDEX_BANK_WITH_NULL_VALUES)); verifySchema(response, schema("avg(balance)", null, "double")); verifyDataRows(response, rows(31082.25)); } @Test public void testStatsBySpan() throws IOException { - JSONObject response = executeQuery(String.format( - "source=%s | stats count() by span(age,10)", - TEST_INDEX_BANK)); - verifySchema(response, schema("count()", null, "integer"), schema("span(age,10)", null, "integer")); + JSONObject response = + executeQuery(String.format("source=%s | stats count() by span(age,10)", TEST_INDEX_BANK)); + verifySchema( + response, schema("count()", null, "integer"), schema("span(age,10)", null, "integer")); verifyDataRows(response, rows(1, 20), rows(6, 30)); } @Test public void testStatsTimeSpan() throws IOException { - JSONObject response = executeQuery(String.format( - "source=%s | stats count() by span(birthdate,1y)", - TEST_INDEX_BANK)); - verifySchema(response, schema("count()", null, "integer"), schema( - "span(birthdate,1y)", null, "timestamp")); + JSONObject response = + executeQuery( + String.format("source=%s | stats count() by span(birthdate,1y)", TEST_INDEX_BANK)); + verifySchema( + response, + schema("count()", null, "integer"), + schema("span(birthdate,1y)", null, "timestamp")); verifyDataRows(response, rows(2, "2017-01-01 00:00:00"), rows(5, "2018-01-01 00:00:00")); } @Test public void testStatsAliasedSpan() throws IOException { - JSONObject response = executeQuery(String.format( - "source=%s | stats count() by span(age,10) as age_bucket", - TEST_INDEX_BANK)); - verifySchema(response, schema("count()", null, "integer"), schema("age_bucket", null, "integer")); + JSONObject response = + executeQuery( + String.format( + "source=%s | stats count() by span(age,10) as age_bucket", TEST_INDEX_BANK)); + verifySchema( + response, schema("count()", null, "integer"), schema("age_bucket", null, "integer")); verifyDataRows(response, rows(1, 20), rows(6, 30)); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/SystemFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/SystemFunctionIT.java index de13aa5488..565c267f7f 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/SystemFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/SystemFunctionIT.java @@ -26,49 +26,60 @@ public void init() throws IOException { @Test public void typeof_sql_types() throws IOException { - JSONObject response = executeQuery(String.format("source=%s | eval " - + "`str` = typeof('pewpew'), `double` = typeof(1.0)," - + "`int` = typeof(12345), `long` = typeof(1234567891011), `interval` = typeof(INTERVAL 2 DAY)" - + " | fields `str`, `double`, `int`, `long`, `interval`", - TEST_INDEX_DATATYPE_NUMERIC)); + JSONObject response = + executeQuery( + String.format( + "source=%s | eval `str` = typeof('pewpew'), `double` = typeof(1.0),`int` =" + + " typeof(12345), `long` = typeof(1234567891011), `interval` = typeof(INTERVAL" + + " 2 DAY) | fields `str`, `double`, `int`, `long`, `interval`", + TEST_INDEX_DATATYPE_NUMERIC)); // TODO: test null in PPL - verifyDataRows(response, - rows("KEYWORD", "DOUBLE", "INTEGER", "LONG", "INTERVAL")); + verifyDataRows(response, rows("KEYWORD", "DOUBLE", "INTEGER", "LONG", "INTERVAL")); - response = executeQuery(String.format("source=%s | eval " - + "`timestamp` = typeof(CAST('1961-04-12 09:07:00' AS TIMESTAMP))," - + "`time` = typeof(CAST('09:07:00' AS TIME))," - + "`date` = typeof(CAST('1961-04-12' AS DATE))," - + "`datetime` = typeof(DATETIME('1961-04-12 09:07:00'))" - + " | fields `timestamp`, `time`, `date`, `datetime`", - TEST_INDEX_DATATYPE_NUMERIC)); - verifyDataRows(response, - rows("TIMESTAMP", "TIME", "DATE", "DATETIME")); + response = + executeQuery( + String.format( + "source=%s | eval " + + "`timestamp` = typeof(CAST('1961-04-12 09:07:00' AS TIMESTAMP))," + + "`time` = typeof(CAST('09:07:00' AS TIME))," + + "`date` = typeof(CAST('1961-04-12' AS DATE))," + + "`datetime` = typeof(DATETIME('1961-04-12 09:07:00'))" + + " | fields `timestamp`, `time`, `date`, `datetime`", + TEST_INDEX_DATATYPE_NUMERIC)); + verifyDataRows(response, rows("TIMESTAMP", "TIME", "DATE", "DATETIME")); } @Test public void typeof_opensearch_types() throws IOException { - JSONObject response = executeQuery(String.format("source=%s | eval " - + "`double` = typeof(double_number), `long` = typeof(long_number)," - + "`integer` = typeof(integer_number), `byte` = typeof(byte_number)," - + "`short` = typeof(short_number), `float` = typeof(float_number)," - + "`half_float` = typeof(half_float_number), `scaled_float` = typeof(scaled_float_number)" - + " | fields `double`, `long`, `integer`, `byte`, `short`, `float`, `half_float`, `scaled_float`", - TEST_INDEX_DATATYPE_NUMERIC)); - verifyDataRows(response, - rows("DOUBLE", "LONG", "INTEGER", "BYTE", "SHORT", "FLOAT", "FLOAT", "DOUBLE")); + JSONObject response = + executeQuery( + String.format( + "source=%s | eval `double` = typeof(double_number), `long` =" + + " typeof(long_number),`integer` = typeof(integer_number), `byte` =" + + " typeof(byte_number),`short` = typeof(short_number), `float` =" + + " typeof(float_number),`half_float` = typeof(half_float_number)," + + " `scaled_float` = typeof(scaled_float_number) | fields `double`, `long`," + + " `integer`, `byte`, `short`, `float`, `half_float`, `scaled_float`", + TEST_INDEX_DATATYPE_NUMERIC)); + verifyDataRows( + response, rows("DOUBLE", "LONG", "INTEGER", "BYTE", "SHORT", "FLOAT", "FLOAT", "DOUBLE")); - response = executeQuery(String.format("source=%s | eval " - + "`text` = typeof(text_value), `date` = typeof(date_value)," - + "`boolean` = typeof(boolean_value), `object` = typeof(object_value)," - + "`keyword` = typeof(keyword_value), `ip` = typeof(ip_value)," - + "`binary` = typeof(binary_value), `geo_point` = typeof(geo_point_value)" - // TODO activate this test once `ARRAY` type supported, see ExpressionAnalyzer::isTypeNotSupported - //+ ", `nested` = typeof(nested_value)" - + " | fields `text`, `date`, `boolean`, `object`, `keyword`, `ip`, `binary`, `geo_point`", - TEST_INDEX_DATATYPE_NONNUMERIC)); - verifyDataRows(response, - rows("TEXT", "TIMESTAMP", "BOOLEAN", "OBJECT", "KEYWORD", - "IP", "BINARY", "GEO_POINT")); + response = + executeQuery( + String.format( + "source=%s | eval " + + "`text` = typeof(text_value), `date` = typeof(date_value)," + + "`boolean` = typeof(boolean_value), `object` = typeof(object_value)," + + "`keyword` = typeof(keyword_value), `ip` = typeof(ip_value)," + + "`binary` = typeof(binary_value), `geo_point` = typeof(geo_point_value)" + // TODO activate this test once `ARRAY` type supported, see + // ExpressionAnalyzer::isTypeNotSupported + // + ", `nested` = typeof(nested_value)" + + " | fields `text`, `date`, `boolean`, `object`, `keyword`, `ip`, `binary`," + + " `geo_point`", + TEST_INDEX_DATATYPE_NONNUMERIC)); + verifyDataRows( + response, + rows("TEXT", "TIMESTAMP", "BOOLEAN", "OBJECT", "KEYWORD", "IP", "BINARY", "GEO_POINT")); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/TextFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/TextFunctionIT.java index 024f190bee..dc9f1d98d2 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/TextFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/TextFunctionIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_STRINGS; @@ -23,27 +22,45 @@ public void init() throws IOException { loadIndex(Index.BANK_WITH_STRING_VALUES); } - void verifyQuery(String command, String initialArgs, String additionalArgs, - String outputRow1, String outputRow2, String outputRow3) throws IOException { - String query = String.format( - "source=%s | eval f=%s(%sname%s) | fields f", TEST_INDEX_STRINGS, command, initialArgs, additionalArgs); + void verifyQuery( + String command, + String initialArgs, + String additionalArgs, + String outputRow1, + String outputRow2, + String outputRow3) + throws IOException { + String query = + String.format( + "source=%s | eval f=%s(%sname%s) | fields f", + TEST_INDEX_STRINGS, command, initialArgs, additionalArgs); JSONObject result = executeQuery(query); verifySchema(result, schema("f", null, "string")); verifyDataRows(result, rows(outputRow1), rows(outputRow2), rows(outputRow3)); } - void verifyQuery(String command, String initialArgs, String additionalArgs, - Integer outputRow1, Integer outputRow2, Integer outputRow3) throws IOException { - String query = String.format( - "source=%s | eval f=%s(%sname%s) | fields f", TEST_INDEX_STRINGS, command, initialArgs, additionalArgs); + void verifyQuery( + String command, + String initialArgs, + String additionalArgs, + Integer outputRow1, + Integer outputRow2, + Integer outputRow3) + throws IOException { + String query = + String.format( + "source=%s | eval f=%s(%sname%s) | fields f", + TEST_INDEX_STRINGS, command, initialArgs, additionalArgs); JSONObject result = executeQuery(query); verifySchema(result, schema("f", null, "integer")); verifyDataRows(result, rows(outputRow1), rows(outputRow2), rows(outputRow3)); } - void verifyRegexQuery(String pattern, Integer outputRow1, Integer outputRow2, Integer outputRow3) throws IOException { - String query = String.format( - "source=%s | eval f=name regexp '%s' | fields f", TEST_INDEX_STRINGS, pattern); + void verifyRegexQuery(String pattern, Integer outputRow1, Integer outputRow2, Integer outputRow3) + throws IOException { + String query = + String.format( + "source=%s | eval f=name regexp '%s' | fields f", TEST_INDEX_STRINGS, pattern); JSONObject result = executeQuery(query); verifySchema(result, schema("f", null, "integer")); verifyDataRows(result, rows(outputRow1), rows(outputRow2), rows(outputRow3)); @@ -55,7 +72,7 @@ public void testRegexp() throws IOException { verifyRegexQuery(".*", 1, 1, 1); } - @Test + @Test public void testSubstr() throws IOException { verifyQuery("substr", "", ", 2", "ello", "orld", "elloworld"); verifyQuery("substr", "", ", 2, 2", "el", "or", "el"); @@ -99,14 +116,19 @@ public void testLtrim() throws IOException { @Test public void testConcat() throws IOException { - verifyQuery("concat", "", ", 'there', 'all', '!'", - "hellothereall!", "worldthereall!", "helloworldthereall!"); + verifyQuery( + "concat", + "", + ", 'there', 'all', '!'", + "hellothereall!", + "worldthereall!", + "helloworldthereall!"); } @Test public void testConcat_ws() throws IOException { - verifyQuery("concat_ws", "',', ", ", 'there'", - "hello,there", "world,there", "helloworld,there"); + verifyQuery( + "concat_ws", "',', ", ", 'there'", "hello,there", "world,there", "helloworld,there"); } @Test @@ -137,7 +159,8 @@ public void testLocate() throws IOException { @Test public void testReplace() throws IOException { - verifyQuery("replace", "", ", 'world', ' opensearch'", "hello", " opensearch", "hello opensearch"); + verifyQuery( + "replace", "", ", 'world', ' opensearch'", "hello", " opensearch", "hello opensearch"); } @Test diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/TopCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/TopCommandIT.java index 054ff303a1..f9587e4b63 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/TopCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/TopCommandIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT; @@ -14,7 +13,7 @@ import org.json.JSONObject; import org.junit.jupiter.api.Test; -public class TopCommandIT extends PPLIntegTestCase{ +public class TopCommandIT extends PPLIntegTestCase { @Override public void init() throws IOException { @@ -24,30 +23,20 @@ public void init() throws IOException { @Test public void testTopWithoutGroup() throws IOException { - JSONObject result = - executeQuery(String.format("source=%s | top gender", TEST_INDEX_ACCOUNT)); - verifyDataRows( - result, - rows("M"), - rows("F")); + JSONObject result = executeQuery(String.format("source=%s | top gender", TEST_INDEX_ACCOUNT)); + verifyDataRows(result, rows("M"), rows("F")); } @Test - public void testTopNWithoutGroup() throws IOException{ - JSONObject result = - executeQuery(String.format("source=%s | top 1 gender", TEST_INDEX_ACCOUNT)); - verifyDataRows( - result, - rows("M")); + public void testTopNWithoutGroup() throws IOException { + JSONObject result = executeQuery(String.format("source=%s | top 1 gender", TEST_INDEX_ACCOUNT)); + verifyDataRows(result, rows("M")); } @Test public void testTopNWithGroup() throws IOException { JSONObject result = executeQuery(String.format("source=%s | top 1 state by gender", TEST_INDEX_ACCOUNT)); - verifyDataRows( - result, - rows("F", "TX"), - rows("M", "MD")); + verifyDataRows(result, rows("F", "TX"), rows("M", "MD")); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/VisualizationFormatIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/VisualizationFormatIT.java index d530b4140d..263ed502ed 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/VisualizationFormatIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/VisualizationFormatIT.java @@ -24,8 +24,9 @@ public void init() throws IOException { @Test void format() throws IOException { - String result = executeVizQuery( - String.format(Locale.ROOT, "source=%s | fields firstname, age", TEST_INDEX_BANK), true); + String result = + executeVizQuery( + String.format(Locale.ROOT, "source=%s | fields firstname, age", TEST_INDEX_BANK), true); assertEquals( "{\n" + " \"data\": {\n" @@ -67,8 +68,10 @@ void format() throws IOException { } private String executeVizQuery(String query, boolean pretty) throws IOException { - Request request = buildRequest(query, - QUERY_API_ENDPOINT + String.format(Locale.ROOT, "?format=csv&pretty=%b", pretty)); + Request request = + buildRequest( + query, + QUERY_API_ENDPOINT + String.format(Locale.ROOT, "?format=csv&pretty=%b", pretty)); Response response = client().performRequest(request); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); return getResponseBody(response, true); diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/WhereCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/WhereCommandIT.java index ba870732fd..d56f9ffe32 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/WhereCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/WhereCommandIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ACCOUNT; diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/AdminIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/AdminIT.java index ed7ec600a3..04a8dd9c08 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/AdminIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/AdminIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.hamcrest.Matchers.equalTo; @@ -63,10 +62,7 @@ public void explainShow() throws Exception { String expected = loadFromFile("expectedOutput/sql/explain_show.json"); final String actual = explainQuery("SHOW TABLES LIKE %"); - assertJsonEquals( - expected, - explainQuery("SHOW TABLES LIKE %") - ); + assertJsonEquals(expected, explainQuery("SHOW TABLES LIKE %")); } private void addAlias(String index, String alias) throws IOException { diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/AggregationIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/AggregationIT.java index 1075b14431..339cd56370 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/AggregationIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/AggregationIT.java @@ -37,17 +37,19 @@ protected void init() throws Exception { @Test public void testFilteredAggregatePushDown() throws IOException { - JSONObject response = executeQuery( - "SELECT COUNT(*) FILTER(WHERE age > 35) FROM " + TEST_INDEX_BANK); + JSONObject response = + executeQuery("SELECT COUNT(*) FILTER(WHERE age > 35) FROM " + TEST_INDEX_BANK); verifySchema(response, schema("COUNT(*) FILTER(WHERE age > 35)", null, "integer")); verifyDataRows(response, rows(3)); } @Test public void testFilteredAggregateNotPushDown() throws IOException { - JSONObject response = executeQuery( - "SELECT COUNT(*) FILTER(WHERE age > 35) FROM (SELECT * FROM " + TEST_INDEX_BANK - + ") AS a"); + JSONObject response = + executeQuery( + "SELECT COUNT(*) FILTER(WHERE age > 35) FROM (SELECT * FROM " + + TEST_INDEX_BANK + + ") AS a"); verifySchema(response, schema("COUNT(*) FILTER(WHERE age > 35)", null, "integer")); verifyDataRows(response, rows(3)); } @@ -55,45 +57,65 @@ public void testFilteredAggregateNotPushDown() throws IOException { @Test public void testPushDownAggregationOnNullValues() throws IOException { // OpenSearch aggregation query (MetricAggregation) - var response = executeQuery(String.format( - "SELECT min(`int`), max(`int`), avg(`int`), min(`dbl`), max(`dbl`), avg(`dbl`) " + - "FROM %s WHERE `key` = 'null'", TEST_INDEX_NULL_MISSING)); - verifySchema(response, - schema("min(`int`)", null, "integer"), schema("max(`int`)", null, "integer"), - schema("avg(`int`)", null, "double"), schema("min(`dbl`)", null, "double"), - schema("max(`dbl`)", null, "double"), schema("avg(`dbl`)", null, "double")); + var response = + executeQuery( + String.format( + "SELECT min(`int`), max(`int`), avg(`int`), min(`dbl`), max(`dbl`), avg(`dbl`) " + + "FROM %s WHERE `key` = 'null'", + TEST_INDEX_NULL_MISSING)); + verifySchema( + response, + schema("min(`int`)", null, "integer"), + schema("max(`int`)", null, "integer"), + schema("avg(`int`)", null, "double"), + schema("min(`dbl`)", null, "double"), + schema("max(`dbl`)", null, "double"), + schema("avg(`dbl`)", null, "double")); verifyDataRows(response, rows(null, null, null, null, null, null)); } @Test public void testPushDownAggregationOnMissingValues() throws IOException { // OpenSearch aggregation query (MetricAggregation) - var response = executeQuery(String.format( - "SELECT min(`int`), max(`int`), avg(`int`), min(`dbl`), max(`dbl`), avg(`dbl`) " + - "FROM %s WHERE `key` = 'null'", TEST_INDEX_NULL_MISSING)); - verifySchema(response, - schema("min(`int`)", null, "integer"), schema("max(`int`)", null, "integer"), - schema("avg(`int`)", null, "double"), schema("min(`dbl`)", null, "double"), - schema("max(`dbl`)", null, "double"), schema("avg(`dbl`)", null, "double")); + var response = + executeQuery( + String.format( + "SELECT min(`int`), max(`int`), avg(`int`), min(`dbl`), max(`dbl`), avg(`dbl`) " + + "FROM %s WHERE `key` = 'null'", + TEST_INDEX_NULL_MISSING)); + verifySchema( + response, + schema("min(`int`)", null, "integer"), + schema("max(`int`)", null, "integer"), + schema("avg(`int`)", null, "double"), + schema("min(`dbl`)", null, "double"), + schema("max(`dbl`)", null, "double"), + schema("avg(`dbl`)", null, "double")); verifyDataRows(response, rows(null, null, null, null, null, null)); } @Test public void testInMemoryAggregationOnNullValues() throws IOException { // In-memory aggregation performed by the plugin - var response = executeQuery(String.format("SELECT" - + " min(`int`) over (PARTITION BY `key`), max(`int`) over (PARTITION BY `key`)," - + " avg(`int`) over (PARTITION BY `key`), min(`dbl`) over (PARTITION BY `key`)," - + " max(`dbl`) over (PARTITION BY `key`), avg(`dbl`) over (PARTITION BY `key`)" - + " FROM %s WHERE `key` = 'null'", TEST_INDEX_NULL_MISSING)); - verifySchema(response, + var response = + executeQuery( + String.format( + "SELECT" + + " min(`int`) over (PARTITION BY `key`), max(`int`) over (PARTITION BY `key`)," + + " avg(`int`) over (PARTITION BY `key`), min(`dbl`) over (PARTITION BY `key`)," + + " max(`dbl`) over (PARTITION BY `key`), avg(`dbl`) over (PARTITION BY `key`)" + + " FROM %s WHERE `key` = 'null'", + TEST_INDEX_NULL_MISSING)); + verifySchema( + response, schema("min(`int`) over (PARTITION BY `key`)", null, "integer"), schema("max(`int`) over (PARTITION BY `key`)", null, "integer"), schema("avg(`int`) over (PARTITION BY `key`)", null, "double"), schema("min(`dbl`) over (PARTITION BY `key`)", null, "double"), schema("max(`dbl`) over (PARTITION BY `key`)", null, "double"), schema("avg(`dbl`) over (PARTITION BY `key`)", null, "double")); - verifyDataRows(response, // 4 rows with null values + verifyDataRows( + response, // 4 rows with null values rows(null, null, null, null, null, null), rows(null, null, null, null, null, null), rows(null, null, null, null, null, null), @@ -103,19 +125,25 @@ public void testInMemoryAggregationOnNullValues() throws IOException { @Test public void testInMemoryAggregationOnMissingValues() throws IOException { // In-memory aggregation performed by the plugin - var response = executeQuery(String.format("SELECT" - + " min(`int`) over (PARTITION BY `key`), max(`int`) over (PARTITION BY `key`)," - + " avg(`int`) over (PARTITION BY `key`), min(`dbl`) over (PARTITION BY `key`)," - + " max(`dbl`) over (PARTITION BY `key`), avg(`dbl`) over (PARTITION BY `key`)" - + " FROM %s WHERE `key` = 'missing'", TEST_INDEX_NULL_MISSING)); - verifySchema(response, + var response = + executeQuery( + String.format( + "SELECT" + + " min(`int`) over (PARTITION BY `key`), max(`int`) over (PARTITION BY `key`)," + + " avg(`int`) over (PARTITION BY `key`), min(`dbl`) over (PARTITION BY `key`)," + + " max(`dbl`) over (PARTITION BY `key`), avg(`dbl`) over (PARTITION BY `key`)" + + " FROM %s WHERE `key` = 'missing'", + TEST_INDEX_NULL_MISSING)); + verifySchema( + response, schema("min(`int`) over (PARTITION BY `key`)", null, "integer"), schema("max(`int`) over (PARTITION BY `key`)", null, "integer"), schema("avg(`int`) over (PARTITION BY `key`)", null, "double"), schema("min(`dbl`) over (PARTITION BY `key`)", null, "double"), schema("max(`dbl`) over (PARTITION BY `key`)", null, "double"), schema("avg(`dbl`) over (PARTITION BY `key`)", null, "double")); - verifyDataRows(response, // 4 rows with null values + verifyDataRows( + response, // 4 rows with null values rows(null, null, null, null, null, null), rows(null, null, null, null, null, null), rows(null, null, null, null, null, null), @@ -124,12 +152,17 @@ public void testInMemoryAggregationOnMissingValues() throws IOException { @Test public void testInMemoryAggregationOnNullValuesReturnsNull() throws IOException { - var response = executeQuery(String.format("SELECT " - + " max(int0) over (PARTITION BY `datetime1`)," - + " min(int0) over (PARTITION BY `datetime1`)," - + " avg(int0) over (PARTITION BY `datetime1`)" - + "from %s where int0 IS NULL;", TEST_INDEX_CALCS)); - verifySchema(response, + var response = + executeQuery( + String.format( + "SELECT " + + " max(int0) over (PARTITION BY `datetime1`)," + + " min(int0) over (PARTITION BY `datetime1`)," + + " avg(int0) over (PARTITION BY `datetime1`)" + + "from %s where int0 IS NULL;", + TEST_INDEX_CALCS)); + verifySchema( + response, schema("max(int0) over (PARTITION BY `datetime1`)", null, "integer"), schema("min(int0) over (PARTITION BY `datetime1`)", null, "integer"), schema("avg(int0) over (PARTITION BY `datetime1`)", null, "double")); @@ -138,21 +171,31 @@ public void testInMemoryAggregationOnNullValuesReturnsNull() throws IOException @Test public void testInMemoryAggregationOnAllValuesAndOnNotNullReturnsSameResult() throws IOException { - var responseNotNulls = executeQuery(String.format("SELECT " - + " max(int0) over (PARTITION BY `datetime1`)," - + " min(int0) over (PARTITION BY `datetime1`)," - + " avg(int0) over (PARTITION BY `datetime1`)" - + "from %s where int0 IS NOT NULL;", TEST_INDEX_CALCS)); - var responseAllValues = executeQuery(String.format("SELECT " - + " max(int0) over (PARTITION BY `datetime1`)," - + " min(int0) over (PARTITION BY `datetime1`)," - + " avg(int0) over (PARTITION BY `datetime1`)" - + "from %s;", TEST_INDEX_CALCS)); - verifySchema(responseNotNulls, + var responseNotNulls = + executeQuery( + String.format( + "SELECT " + + " max(int0) over (PARTITION BY `datetime1`)," + + " min(int0) over (PARTITION BY `datetime1`)," + + " avg(int0) over (PARTITION BY `datetime1`)" + + "from %s where int0 IS NOT NULL;", + TEST_INDEX_CALCS)); + var responseAllValues = + executeQuery( + String.format( + "SELECT " + + " max(int0) over (PARTITION BY `datetime1`)," + + " min(int0) over (PARTITION BY `datetime1`)," + + " avg(int0) over (PARTITION BY `datetime1`)" + + "from %s;", + TEST_INDEX_CALCS)); + verifySchema( + responseNotNulls, schema("max(int0) over (PARTITION BY `datetime1`)", null, "integer"), schema("min(int0) over (PARTITION BY `datetime1`)", null, "integer"), schema("avg(int0) over (PARTITION BY `datetime1`)", null, "double")); - verifySchema(responseAllValues, + verifySchema( + responseAllValues, schema("max(int0) over (PARTITION BY `datetime1`)", null, "integer"), schema("min(int0) over (PARTITION BY `datetime1`)", null, "integer"), schema("avg(int0) over (PARTITION BY `datetime1`)", null, "double")); @@ -163,9 +206,13 @@ public void testInMemoryAggregationOnAllValuesAndOnNotNullReturnsSameResult() th @Test public void testPushDownAggregationOnNullNumericValuesReturnsNull() throws IOException { - var response = executeQuery(String.format("SELECT " - + "max(int0), min(int0), avg(int0) from %s where int0 IS NULL;", TEST_INDEX_CALCS)); - verifySchema(response, + var response = + executeQuery( + String.format( + "SELECT " + "max(int0), min(int0), avg(int0) from %s where int0 IS NULL;", + TEST_INDEX_CALCS)); + verifySchema( + response, schema("max(int0)", null, "integer"), schema("min(int0)", null, "integer"), schema("avg(int0)", null, "double")); @@ -174,9 +221,13 @@ public void testPushDownAggregationOnNullNumericValuesReturnsNull() throws IOExc @Test public void testPushDownAggregationOnNullDateTimeValuesFromTableReturnsNull() throws IOException { - var response = executeQuery(String.format("SELECT " - + "max(datetime1), min(datetime1), avg(datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, + var response = + executeQuery( + String.format( + "SELECT " + "max(datetime1), min(datetime1), avg(datetime1) from %s", + TEST_INDEX_CALCS)); + verifySchema( + response, schema("max(datetime1)", null, "timestamp"), schema("min(datetime1)", null, "timestamp"), schema("avg(datetime1)", null, "timestamp")); @@ -185,9 +236,14 @@ public void testPushDownAggregationOnNullDateTimeValuesFromTableReturnsNull() th @Test public void testPushDownAggregationOnNullDateValuesReturnsNull() throws IOException { - var response = executeQuery(String.format("SELECT " - + "max(CAST(NULL AS date)), min(CAST(NULL AS date)), avg(CAST(NULL AS date)) from %s", TEST_INDEX_CALCS)); - verifySchema(response, + var response = + executeQuery( + String.format( + "SELECT max(CAST(NULL AS date)), min(CAST(NULL AS date)), avg(CAST(NULL AS date))" + + " from %s", + TEST_INDEX_CALCS)); + verifySchema( + response, schema("max(CAST(NULL AS date))", null, "date"), schema("min(CAST(NULL AS date))", null, "date"), schema("avg(CAST(NULL AS date))", null, "date")); @@ -196,9 +252,14 @@ public void testPushDownAggregationOnNullDateValuesReturnsNull() throws IOExcept @Test public void testPushDownAggregationOnNullTimeValuesReturnsNull() throws IOException { - var response = executeQuery(String.format("SELECT " - + "max(CAST(NULL AS time)), min(CAST(NULL AS time)), avg(CAST(NULL AS time)) from %s", TEST_INDEX_CALCS)); - verifySchema(response, + var response = + executeQuery( + String.format( + "SELECT max(CAST(NULL AS time)), min(CAST(NULL AS time)), avg(CAST(NULL AS time))" + + " from %s", + TEST_INDEX_CALCS)); + verifySchema( + response, schema("max(CAST(NULL AS time))", null, "time"), schema("min(CAST(NULL AS time))", null, "time"), schema("avg(CAST(NULL AS time))", null, "time")); @@ -207,9 +268,14 @@ public void testPushDownAggregationOnNullTimeValuesReturnsNull() throws IOExcept @Test public void testPushDownAggregationOnNullTimeStampValuesReturnsNull() throws IOException { - var response = executeQuery(String.format("SELECT " - + "max(CAST(NULL AS timestamp)), min(CAST(NULL AS timestamp)), avg(CAST(NULL AS timestamp)) from %s", TEST_INDEX_CALCS)); - verifySchema(response, + var response = + executeQuery( + String.format( + "SELECT max(CAST(NULL AS timestamp)), min(CAST(NULL AS timestamp)), avg(CAST(NULL" + + " AS timestamp)) from %s", + TEST_INDEX_CALCS)); + verifySchema( + response, schema("max(CAST(NULL AS timestamp))", null, "timestamp"), schema("min(CAST(NULL AS timestamp))", null, "timestamp"), schema("avg(CAST(NULL AS timestamp))", null, "timestamp")); @@ -218,9 +284,13 @@ public void testPushDownAggregationOnNullTimeStampValuesReturnsNull() throws IOE @Test public void testPushDownAggregationOnNullDateTimeValuesReturnsNull() throws IOException { - var response = executeQuery(String.format("SELECT " - + "max(datetime(NULL)), min(datetime(NULL)), avg(datetime(NULL)) from %s", TEST_INDEX_CALCS)); - verifySchema(response, + var response = + executeQuery( + String.format( + "SELECT " + "max(datetime(NULL)), min(datetime(NULL)), avg(datetime(NULL)) from %s", + TEST_INDEX_CALCS)); + verifySchema( + response, schema("max(datetime(NULL))", null, "datetime"), schema("min(datetime(NULL))", null, "datetime"), schema("avg(datetime(NULL))", null, "datetime")); @@ -229,15 +299,22 @@ public void testPushDownAggregationOnNullDateTimeValuesReturnsNull() throws IOEx @Test public void testPushDownAggregationOnAllValuesAndOnNotNullReturnsSameResult() throws IOException { - var responseNotNulls = executeQuery(String.format("SELECT " - + "max(int0), min(int0), avg(int0) from %s where int0 IS NOT NULL;", TEST_INDEX_CALCS)); - var responseAllValues = executeQuery(String.format("SELECT " - + "max(int0), min(int0), avg(int0) from %s;", TEST_INDEX_CALCS)); - verifySchema(responseNotNulls, + var responseNotNulls = + executeQuery( + String.format( + "SELECT " + "max(int0), min(int0), avg(int0) from %s where int0 IS NOT NULL;", + TEST_INDEX_CALCS)); + var responseAllValues = + executeQuery( + String.format( + "SELECT " + "max(int0), min(int0), avg(int0) from %s;", TEST_INDEX_CALCS)); + verifySchema( + responseNotNulls, schema("max(int0)", null, "integer"), schema("min(int0)", null, "integer"), schema("avg(int0)", null, "double")); - verifySchema(responseAllValues, + verifySchema( + responseAllValues, schema("max(int0)", null, "integer"), schema("min(int0)", null, "integer"), schema("avg(int0)", null, "double")); @@ -248,18 +325,21 @@ public void testPushDownAggregationOnAllValuesAndOnNotNullReturnsSameResult() th @Test public void testPushDownAndInMemoryAggregationReturnTheSameResult() throws IOException { - // Playing with 'over (PARTITION BY `datetime1`)' - `datetime1` column has the same value for all rows + // Playing with 'over (PARTITION BY `datetime1`)' - `datetime1` column has the same value for + // all rows // so partitioning by this column has no sense and doesn't (shouldn't) affect the results // Aggregations with `OVER` clause are executed in memory (in SQL plugin memory), // Aggregations without it are performed the OpenSearch node itself (pushed down to opensearch) - // Going to compare results of `min`, `max` and `avg` aggregation on all numeric columns in `calcs` + // Going to compare results of `min`, `max` and `avg` aggregation on all numeric columns in + // `calcs` var columns = List.of("int0", "int1", "int2", "int3", "num0", "num1", "num2", "num3", "num4"); var aggregations = List.of("min", "max", "avg"); var inMemoryAggregQuery = new StringBuilder("SELECT "); var pushDownAggregQuery = new StringBuilder("SELECT "); for (var col : columns) { for (var aggreg : aggregations) { - inMemoryAggregQuery.append(String.format(" %s(%s) over (PARTITION BY `datetime1`),", aggreg, col)); + inMemoryAggregQuery.append( + String.format(" %s(%s) over (PARTITION BY `datetime1`),", aggreg, col)); pushDownAggregQuery.append(String.format(" %s(%s),", aggreg, col)); } } @@ -267,313 +347,362 @@ public void testPushDownAndInMemoryAggregationReturnTheSameResult() throws IOExc inMemoryAggregQuery.deleteCharAt(inMemoryAggregQuery.length() - 1); pushDownAggregQuery.deleteCharAt(pushDownAggregQuery.length() - 1); - var responseInMemory = executeQuery( - inMemoryAggregQuery.append("from " + TEST_INDEX_CALCS).toString()); - var responsePushDown = executeQuery( - pushDownAggregQuery.append("from " + TEST_INDEX_CALCS).toString()); + var responseInMemory = + executeQuery(inMemoryAggregQuery.append("from " + TEST_INDEX_CALCS).toString()); + var responsePushDown = + executeQuery(pushDownAggregQuery.append("from " + TEST_INDEX_CALCS).toString()); for (int i = 0; i < columns.size() * aggregations.size(); i++) { assertEquals( - ((Number)responseInMemory.query("/datarows/0/" + i)).doubleValue(), - ((Number)responsePushDown.query("/datarows/0/" + i)).doubleValue(), + ((Number) responseInMemory.query("/datarows/0/" + i)).doubleValue(), + ((Number) responsePushDown.query("/datarows/0/" + i)).doubleValue(), 0.0000001); // a minor delta is affordable } } public void testMinIntegerPushedDown() throws IOException { - var response = executeQuery(String.format("SELECT min(int2)" - + " from %s", TEST_INDEX_CALCS)); + var response = executeQuery(String.format("SELECT min(int2)" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("min(int2)", null, "integer")); verifyDataRows(response, rows(-9)); } @Test public void testMaxIntegerPushedDown() throws IOException { - var response = executeQuery(String.format("SELECT max(int2)" - + " from %s", TEST_INDEX_CALCS)); + var response = executeQuery(String.format("SELECT max(int2)" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("max(int2)", null, "integer")); verifyDataRows(response, rows(9)); } @Test public void testAvgIntegerPushedDown() throws IOException { - var response = executeQuery(String.format("SELECT avg(int2)" - + " from %s", TEST_INDEX_CALCS)); + var response = executeQuery(String.format("SELECT avg(int2)" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("avg(int2)", null, "double")); verifyDataRows(response, rows(-0.8235294117647058D)); } @Test public void testMinDoublePushedDown() throws IOException { - var response = executeQuery(String.format("SELECT min(num3)" - + " from %s", TEST_INDEX_CALCS)); + var response = executeQuery(String.format("SELECT min(num3)" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("min(num3)", null, "double")); verifyDataRows(response, rows(-19.96D)); } @Test public void testMaxDoublePushedDown() throws IOException { - var response = executeQuery(String.format("SELECT max(num3)" - + " from %s", TEST_INDEX_CALCS)); + var response = executeQuery(String.format("SELECT max(num3)" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("max(num3)", null, "double")); verifyDataRows(response, rows(12.93D)); } @Test public void testAvgDoublePushedDown() throws IOException { - var response = executeQuery(String.format("SELECT avg(num3)" - + " from %s", TEST_INDEX_CALCS)); + var response = executeQuery(String.format("SELECT avg(num3)" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("avg(num3)", null, "double")); verifyDataRows(response, rows(-6.12D)); } @Test public void testMinIntegerInMemory() throws IOException { - var response = executeQuery(String.format("SELECT min(int2)" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("min(int2) OVER(PARTITION BY datetime1)", null, "integer")); + var response = + executeQuery( + String.format( + "SELECT min(int2)" + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); + verifySchema(response, schema("min(int2) OVER(PARTITION BY datetime1)", null, "integer")); verifySome(response.getJSONArray("datarows"), rows(-9)); } @Test public void testMaxIntegerInMemory() throws IOException { - var response = executeQuery(String.format("SELECT max(int2)" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("max(int2) OVER(PARTITION BY datetime1)", null, "integer")); + var response = + executeQuery( + String.format( + "SELECT max(int2)" + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); + verifySchema(response, schema("max(int2) OVER(PARTITION BY datetime1)", null, "integer")); verifySome(response.getJSONArray("datarows"), rows(9)); } @Test public void testAvgIntegerInMemory() throws IOException { - var response = executeQuery(String.format("SELECT avg(int2)" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("avg(int2) OVER(PARTITION BY datetime1)", null, "double")); + var response = + executeQuery( + String.format( + "SELECT avg(int2)" + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); + verifySchema(response, schema("avg(int2) OVER(PARTITION BY datetime1)", null, "double")); verifySome(response.getJSONArray("datarows"), rows(-0.8235294117647058D)); } @Test public void testMinDoubleInMemory() throws IOException { - var response = executeQuery(String.format("SELECT min(num3)" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("min(num3) OVER(PARTITION BY datetime1)", null, "double")); + var response = + executeQuery( + String.format( + "SELECT min(num3)" + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); + verifySchema(response, schema("min(num3) OVER(PARTITION BY datetime1)", null, "double")); verifySome(response.getJSONArray("datarows"), rows(-19.96D)); } @Test public void testMaxDoubleInMemory() throws IOException { - var response = executeQuery(String.format("SELECT max(num3)" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("max(num3) OVER(PARTITION BY datetime1)", null, "double")); + var response = + executeQuery( + String.format( + "SELECT max(num3)" + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); + verifySchema(response, schema("max(num3) OVER(PARTITION BY datetime1)", null, "double")); verifySome(response.getJSONArray("datarows"), rows(12.93D)); } @Test public void testAvgDoubleInMemory() throws IOException { - var response = executeQuery(String.format("SELECT avg(num3)" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("avg(num3) OVER(PARTITION BY datetime1)", null, "double")); + var response = + executeQuery( + String.format( + "SELECT avg(num3)" + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); + verifySchema(response, schema("avg(num3) OVER(PARTITION BY datetime1)", null, "double")); verifySome(response.getJSONArray("datarows"), rows(-6.12D)); } @Test public void testMaxDatePushedDown() throws IOException { - var response = executeQuery(String.format("SELECT max(date0)" - + " from %s", TEST_INDEX_CALCS)); + var response = executeQuery(String.format("SELECT max(date0)" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("max(date0)", null, "date")); verifyDataRows(response, rows("2004-06-19")); } @Test public void testAvgDatePushedDown() throws IOException { - var response = executeQuery(String.format("SELECT avg(date0)" - + " from %s", TEST_INDEX_CALCS)); + var response = executeQuery(String.format("SELECT avg(date0)" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("avg(date0)", null, "date")); verifyDataRows(response, rows("1992-04-23")); } @Test public void testMinDateTimePushedDown() throws IOException { - var response = executeQuery(String.format("SELECT min(datetime(CAST(time0 AS STRING)))" - + " from %s", TEST_INDEX_CALCS)); + var response = + executeQuery( + String.format( + "SELECT min(datetime(CAST(time0 AS STRING)))" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("min(datetime(CAST(time0 AS STRING)))", null, "datetime")); verifyDataRows(response, rows("1899-12-30 21:07:32")); } @Test public void testMaxDateTimePushedDown() throws IOException { - var response = executeQuery(String.format("SELECT max(datetime(CAST(time0 AS STRING)))" - + " from %s", TEST_INDEX_CALCS)); + var response = + executeQuery( + String.format( + "SELECT max(datetime(CAST(time0 AS STRING)))" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("max(datetime(CAST(time0 AS STRING)))", null, "datetime")); verifyDataRows(response, rows("1900-01-01 20:36:00")); } @Test public void testAvgDateTimePushedDown() throws IOException { - var response = executeQuery(String.format("SELECT avg(datetime(CAST(time0 AS STRING)))" - + " from %s", TEST_INDEX_CALCS)); + var response = + executeQuery( + String.format( + "SELECT avg(datetime(CAST(time0 AS STRING)))" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("avg(datetime(CAST(time0 AS STRING)))", null, "datetime")); verifyDataRows(response, rows("1900-01-01 03:35:00.236")); } @Test public void testMinTimePushedDown() throws IOException { - var response = executeQuery(String.format("SELECT min(time1)" - + " from %s", TEST_INDEX_CALCS)); + var response = executeQuery(String.format("SELECT min(time1)" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("min(time1)", null, "time")); verifyDataRows(response, rows("00:05:57")); } @Test public void testMaxTimePushedDown() throws IOException { - var response = executeQuery(String.format("SELECT max(time1)" - + " from %s", TEST_INDEX_CALCS)); + var response = executeQuery(String.format("SELECT max(time1)" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("max(time1)", null, "time")); verifyDataRows(response, rows("22:50:16")); } @Test public void testAvgTimePushedDown() throws IOException { - var response = executeQuery(String.format("SELECT avg(time1)" - + " from %s", TEST_INDEX_CALCS)); + var response = executeQuery(String.format("SELECT avg(time1)" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("avg(time1)", null, "time")); verifyDataRows(response, rows("13:06:36.25")); } @Test public void testMinTimeStampPushedDown() throws IOException { - var response = executeQuery(String.format("SELECT min(CAST(datetime0 AS timestamp))" - + " from %s", TEST_INDEX_CALCS)); + var response = + executeQuery( + String.format( + "SELECT min(CAST(datetime0 AS timestamp))" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("min(CAST(datetime0 AS timestamp))", null, "timestamp")); verifyDataRows(response, rows("2004-07-04 22:49:28")); } @Test public void testMaxTimeStampPushedDown() throws IOException { - var response = executeQuery(String.format("SELECT max(CAST(datetime0 AS timestamp))" - + " from %s", TEST_INDEX_CALCS)); + var response = + executeQuery( + String.format( + "SELECT max(CAST(datetime0 AS timestamp))" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("max(CAST(datetime0 AS timestamp))", null, "timestamp")); verifyDataRows(response, rows("2004-08-02 07:59:23")); } @Test public void testAvgTimeStampPushedDown() throws IOException { - var response = executeQuery(String.format("SELECT avg(CAST(datetime0 AS timestamp))" - + " from %s", TEST_INDEX_CALCS)); + var response = + executeQuery( + String.format( + "SELECT avg(CAST(datetime0 AS timestamp))" + " from %s", TEST_INDEX_CALCS)); verifySchema(response, schema("avg(CAST(datetime0 AS timestamp))", null, "timestamp")); verifyDataRows(response, rows("2004-07-20 10:38:09.705")); } @Test public void testMinDateInMemory() throws IOException { - var response = executeQuery(String.format("SELECT min(date0)" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("min(date0) OVER(PARTITION BY datetime1)", null, "date")); + var response = + executeQuery( + String.format( + "SELECT min(date0)" + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); + verifySchema(response, schema("min(date0) OVER(PARTITION BY datetime1)", null, "date")); verifySome(response.getJSONArray("datarows"), rows("1972-07-04")); } @Test public void testMaxDateInMemory() throws IOException { - var response = executeQuery(String.format("SELECT max(date0)" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("max(date0) OVER(PARTITION BY datetime1)", null, "date")); + var response = + executeQuery( + String.format( + "SELECT max(date0)" + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); + verifySchema(response, schema("max(date0) OVER(PARTITION BY datetime1)", null, "date")); verifySome(response.getJSONArray("datarows"), rows("2004-06-19")); } @Test public void testAvgDateInMemory() throws IOException { - var response = executeQuery(String.format("SELECT avg(date0)" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("avg(date0) OVER(PARTITION BY datetime1)", null, "date")); + var response = + executeQuery( + String.format( + "SELECT avg(date0)" + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); + verifySchema(response, schema("avg(date0) OVER(PARTITION BY datetime1)", null, "date")); verifySome(response.getJSONArray("datarows"), rows("1992-04-23")); } @Test public void testMinDateTimeInMemory() throws IOException { - var response = executeQuery(String.format("SELECT min(datetime(CAST(time0 AS STRING)))" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("min(datetime(CAST(time0 AS STRING))) OVER(PARTITION BY datetime1)", null, "datetime")); + var response = + executeQuery( + String.format( + "SELECT min(datetime(CAST(time0 AS STRING)))" + + " OVER(PARTITION BY datetime1) from %s", + TEST_INDEX_CALCS)); + verifySchema( + response, + schema( + "min(datetime(CAST(time0 AS STRING))) OVER(PARTITION BY datetime1)", null, "datetime")); verifySome(response.getJSONArray("datarows"), rows("1899-12-30 21:07:32")); } @Test public void testMaxDateTimeInMemory() throws IOException { - var response = executeQuery(String.format("SELECT max(datetime(CAST(time0 AS STRING)))" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("max(datetime(CAST(time0 AS STRING))) OVER(PARTITION BY datetime1)", null, "datetime")); + var response = + executeQuery( + String.format( + "SELECT max(datetime(CAST(time0 AS STRING)))" + + " OVER(PARTITION BY datetime1) from %s", + TEST_INDEX_CALCS)); + verifySchema( + response, + schema( + "max(datetime(CAST(time0 AS STRING))) OVER(PARTITION BY datetime1)", null, "datetime")); verifySome(response.getJSONArray("datarows"), rows("1900-01-01 20:36:00")); } @Test public void testAvgDateTimeInMemory() throws IOException { - var response = executeQuery(String.format("SELECT avg(datetime(CAST(time0 AS STRING)))" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("avg(datetime(CAST(time0 AS STRING))) OVER(PARTITION BY datetime1)", null, "datetime")); + var response = + executeQuery( + String.format( + "SELECT avg(datetime(CAST(time0 AS STRING)))" + + " OVER(PARTITION BY datetime1) from %s", + TEST_INDEX_CALCS)); + verifySchema( + response, + schema( + "avg(datetime(CAST(time0 AS STRING))) OVER(PARTITION BY datetime1)", null, "datetime")); verifySome(response.getJSONArray("datarows"), rows("1900-01-01 03:35:00.236")); } @Test public void testMinTimeInMemory() throws IOException { - var response = executeQuery(String.format("SELECT min(time1)" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("min(time1) OVER(PARTITION BY datetime1)", null, "time")); + var response = + executeQuery( + String.format( + "SELECT min(time1)" + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); + verifySchema(response, schema("min(time1) OVER(PARTITION BY datetime1)", null, "time")); verifySome(response.getJSONArray("datarows"), rows("00:05:57")); } @Test public void testMaxTimeInMemory() throws IOException { - var response = executeQuery(String.format("SELECT max(time1)" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("max(time1) OVER(PARTITION BY datetime1)", null, "time")); + var response = + executeQuery( + String.format( + "SELECT max(time1)" + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); + verifySchema(response, schema("max(time1) OVER(PARTITION BY datetime1)", null, "time")); verifySome(response.getJSONArray("datarows"), rows("22:50:16")); } @Test public void testAvgTimeInMemory() throws IOException { - var response = executeQuery(String.format("SELECT avg(time1)" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("avg(time1) OVER(PARTITION BY datetime1)", null, "time")); + var response = + executeQuery( + String.format( + "SELECT avg(time1)" + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); + verifySchema(response, schema("avg(time1) OVER(PARTITION BY datetime1)", null, "time")); verifySome(response.getJSONArray("datarows"), rows("13:06:36.25")); } @Test public void testMinTimeStampInMemory() throws IOException { - var response = executeQuery(String.format("SELECT min(CAST(datetime0 AS timestamp))" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("min(CAST(datetime0 AS timestamp)) OVER(PARTITION BY datetime1)", null, "timestamp")); + var response = + executeQuery( + String.format( + "SELECT min(CAST(datetime0 AS timestamp))" + + " OVER(PARTITION BY datetime1) from %s", + TEST_INDEX_CALCS)); + verifySchema( + response, + schema( + "min(CAST(datetime0 AS timestamp)) OVER(PARTITION BY datetime1)", null, "timestamp")); verifySome(response.getJSONArray("datarows"), rows("2004-07-04 22:49:28")); } @Test public void testMaxTimeStampInMemory() throws IOException { - var response = executeQuery(String.format("SELECT max(CAST(datetime0 AS timestamp))" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("max(CAST(datetime0 AS timestamp)) OVER(PARTITION BY datetime1)", null, "timestamp")); + var response = + executeQuery( + String.format( + "SELECT max(CAST(datetime0 AS timestamp))" + + " OVER(PARTITION BY datetime1) from %s", + TEST_INDEX_CALCS)); + verifySchema( + response, + schema( + "max(CAST(datetime0 AS timestamp)) OVER(PARTITION BY datetime1)", null, "timestamp")); verifySome(response.getJSONArray("datarows"), rows("2004-08-02 07:59:23")); } @Test public void testAvgTimeStampInMemory() throws IOException { - var response = executeQuery(String.format("SELECT avg(CAST(datetime0 AS timestamp))" - + " OVER(PARTITION BY datetime1) from %s", TEST_INDEX_CALCS)); - verifySchema(response, - schema("avg(CAST(datetime0 AS timestamp)) OVER(PARTITION BY datetime1)", null, "timestamp")); + var response = + executeQuery( + String.format( + "SELECT avg(CAST(datetime0 AS timestamp))" + + " OVER(PARTITION BY datetime1) from %s", + TEST_INDEX_CALCS)); + verifySchema( + response, + schema( + "avg(CAST(datetime0 AS timestamp)) OVER(PARTITION BY datetime1)", null, "timestamp")); verifySome(response.getJSONArray("datarows"), rows("2004-07-20 10:38:09.705")); } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/ArithmeticFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/ArithmeticFunctionIT.java index 5b6c742e28..7c91c42197 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/ArithmeticFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/ArithmeticFunctionIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.legacy.plugin.RestSqlAction.QUERY_API_ENDPOINT; @@ -15,7 +14,6 @@ import java.io.IOException; import java.util.Locale; - import org.json.JSONObject; import org.junit.jupiter.api.Test; import org.opensearch.client.Request; @@ -25,222 +23,223 @@ public class ArithmeticFunctionIT extends SQLIntegTestCase { - @Override - public void init() throws Exception { - super.init(); - loadIndex(Index.BANK); - } - - public void testAdd() throws IOException { - JSONObject result = executeQuery("select 3 + 2"); - verifySchema(result, schema("3 + 2", null, "integer")); - verifyDataRows(result, rows(3 + 2)); - - result = executeQuery("select 2.5 + 2"); - verifySchema(result, schema("2.5 + 2", null, "double")); - verifyDataRows(result, rows(2.5D + 2)); - - result = executeQuery("select 3000000000 + 2"); - verifySchema(result, schema("3000000000 + 2", null, "long")); - verifyDataRows(result, rows(3000000000L + 2)); - - result = executeQuery("select CAST(6.666666 AS FLOAT) + 2"); - verifySchema(result, schema("CAST(6.666666 AS FLOAT) + 2", null, "float")); - verifyDataRows(result, rows(6.666666 + 2)); - } - - @Test - public void testAddFunction() throws IOException { - JSONObject result = executeQuery("select add(3, 2)"); - verifySchema(result, schema("add(3, 2)", null, "integer")); - verifyDataRows(result, rows(3 + 2)); - - result = executeQuery("select add(2.5, 2)"); - verifySchema(result, schema("add(2.5, 2)", null, "double")); - verifyDataRows(result, rows(2.5D + 2)); - - result = executeQuery("select add(3000000000, 2)"); - verifySchema(result, schema("add(3000000000, 2)", null, "long")); - verifyDataRows(result, rows(3000000000L + 2)); - - result = executeQuery("select add(CAST(6.666666 AS FLOAT), 2)"); - verifySchema(result, schema("add(CAST(6.666666 AS FLOAT), 2)", null, "float")); - verifyDataRows(result, rows(6.666666 + 2)); - } - - public void testDivide() throws IOException { - JSONObject result = executeQuery("select 3 / 2"); - verifySchema(result, schema("3 / 2", null, "integer")); - verifyDataRows(result, rows(3 / 2)); - - result = executeQuery("select 2.5 / 2"); - verifySchema(result, schema("2.5 / 2", null, "double")); - verifyDataRows(result, rows(2.5D / 2)); - - result = executeQuery("select 6000000000 / 2"); - verifySchema(result, schema("6000000000 / 2", null, "long")); - verifyDataRows(result, rows(6000000000L / 2)); - - result = executeQuery("select cast(1.6 AS float) / 2"); - verifySchema(result, schema("cast(1.6 AS float) / 2", null, "float")); - verifyDataRows(result, rows(1.6 / 2)); - } - - public void testDivideFunction() throws IOException { - JSONObject result = executeQuery("select divide(3, 2)"); - verifySchema(result, schema("divide(3, 2)", null, "integer")); - verifyDataRows(result, rows(3 / 2)); - - result = executeQuery("select divide(2.5, 2)"); - verifySchema(result, schema("divide(2.5, 2)", null, "double")); - verifyDataRows(result, rows(2.5D / 2)); - - result = executeQuery("select divide(6000000000, 2)"); - verifySchema(result, schema("divide(6000000000, 2)", null, "long")); - verifyDataRows(result, rows(6000000000L / 2)); - - result = executeQuery("select divide(cast(1.6 AS float), 2)"); - verifySchema(result, schema("divide(cast(1.6 AS float), 2)", null, "float")); - verifyDataRows(result, rows(1.6 / 2)); - } - - public void testMod() throws IOException { - JSONObject result = executeQuery("select mod(3, 2)"); - verifySchema(result, schema("mod(3, 2)", null, "integer")); - verifyDataRows(result, rows(3 % 2)); - - result = executeQuery("select mod(2.5, 2)"); - verifySchema(result, schema("mod(2.5, 2)", null, "double")); - verifyDataRows(result, rows(2.5D % 2)); - - result = executeQuery("select mod(cast(300001 as long), 2)"); - verifySchema(result, schema("mod(cast(300001 as long), 2)", null, "long")); - verifyDataRows(result, rows(3000001 % 2)); - - result = executeQuery("select mod(cast(1.6 AS float), 2)"); - verifySchema(result, schema("mod(cast(1.6 AS float), 2)", null, "float")); - verifyDataRows(result, rows(1.6 % 2)); - } - - public void testModulus() throws IOException { - JSONObject result = executeQuery("select 3 % 2"); - verifySchema(result, schema("3 % 2", null, "integer")); - verifyDataRows(result, rows(3 % 2)); - - result = executeQuery("select 2.5 % 2"); - verifySchema(result, schema("2.5 % 2", null, "double")); - verifyDataRows(result, rows(2.5D % 2)); - - result = executeQuery("select cast(300001 as long) % 2"); - verifySchema(result, schema("cast(300001 as long) % 2", null, "long")); - verifyDataRows(result, rows(300001 % 2)); - - result = executeQuery("select cast(1.6 AS float) % 2"); - verifySchema(result, schema("cast(1.6 AS float) % 2", null, "float")); - verifyDataRows(result, rows(1.6 % 2)); - } - - public void testModulusFunction() throws IOException { - JSONObject result = executeQuery("select modulus(3, 2)"); - verifySchema(result, schema("modulus(3, 2)", null, "integer")); - verifyDataRows(result, rows(3 % 2)); - - result = executeQuery("select modulus(2.5, 2)"); - verifySchema(result, schema("modulus(2.5, 2)", null, "double")); - verifyDataRows(result, rows(2.5D % 2)); - - result = executeQuery("select modulus(cast(300001 as long), 2)"); - verifySchema(result, schema("modulus(cast(300001 as long), 2)", null, "long")); - verifyDataRows(result, rows(300001 % 2)); - - result = executeQuery("select modulus(cast(1.6 AS float), 2)"); - verifySchema(result, schema("modulus(cast(1.6 AS float), 2)", null, "float")); - verifyDataRows(result, rows(1.6 % 2)); - } - - public void testMultiply() throws IOException { - JSONObject result = executeQuery("select 3 * 2"); - verifySchema(result, schema("3 * 2", null, "integer")); - verifyDataRows(result, rows(3 * 2)); - - result = executeQuery("select 2.5 * 2"); - verifySchema(result, schema("2.5 * 2", null, "double")); - verifyDataRows(result, rows(2.5D * 2)); - - result = executeQuery("select 3000000000 * 2"); - verifySchema(result, schema("3000000000 * 2", null, "long")); - verifyDataRows(result, rows(3000000000L * 2)); - - result = executeQuery("select CAST(1.6 AS FLOAT) * 2"); - verifySchema(result, schema("CAST(1.6 AS FLOAT) * 2", null, "float")); - verifyDataRows(result, rows(1.6 * 2)); - } - - @Test - public void testMultiplyFunction() throws IOException { - JSONObject result = executeQuery("select multiply(3, 2)"); - verifySchema(result, schema("multiply(3, 2)", null, "integer")); - verifyDataRows(result, rows(3 * 2)); - - result = executeQuery("select multiply(2.5, 2)"); - verifySchema(result, schema("multiply(2.5, 2)", null, "double")); - verifyDataRows(result, rows(2.5D * 2)); - - result = executeQuery("select multiply(3000000000, 2)"); - verifySchema(result, schema("multiply(3000000000, 2)", null, "long")); - verifyDataRows(result, rows(3000000000L * 2)); - - result = executeQuery("select multiply(CAST(1.6 AS FLOAT), 2)"); - verifySchema(result, schema("multiply(CAST(1.6 AS FLOAT), 2)", null, "float")); - verifyDataRows(result, rows(1.6 * 2)); - } - - public void testSubtract() throws IOException { - JSONObject result = executeQuery("select 3 - 2"); - verifySchema(result, schema("3 - 2", null, "integer")); - verifyDataRows(result, rows(3 - 2)); - - result = executeQuery("select 2.5 - 2"); - verifySchema(result, schema("2.5 - 2", null, "double")); - verifyDataRows(result, rows(2.5D - 2)); - - result = executeQuery("select 3000000000 - 2"); - verifySchema(result, schema("3000000000 - 2", null, "long")); - verifyDataRows(result, rows(3000000000L - 2)); - - result = executeQuery("select CAST(6.666666 AS FLOAT) - 2"); - verifySchema(result, schema("CAST(6.666666 AS FLOAT) - 2", null, "float")); - verifyDataRows(result, rows(6.666666 - 2)); - } - - @Test - public void testSubtractFunction() throws IOException { - JSONObject result = executeQuery("select subtract(3, 2)"); - verifySchema(result, schema("subtract(3, 2)", null, "integer")); - verifyDataRows(result, rows(3 - 2)); - - result = executeQuery("select subtract(2.5, 2)"); - verifySchema(result, schema("subtract(2.5, 2)", null, "double")); - verifyDataRows(result, rows(2.5D - 2)); - - result = executeQuery("select subtract(3000000000, 2)"); - verifySchema(result, schema("subtract(3000000000, 2)", null, "long")); - verifyDataRows(result, rows(3000000000L - 2)); - - result = executeQuery("select cast(subtract(cast(6.666666 as float), 2) as float)"); - verifySchema(result, schema("cast(subtract(cast(6.666666 as float), 2) as float)", null, "float")); - verifyDataRows(result, rows(6.666666 - 2)); - } - - protected JSONObject executeQuery(String query) throws IOException { - Request request = new Request("POST", QUERY_API_ENDPOINT); - request.setJsonEntity(String.format(Locale.ROOT, "{\n" + " \"query\": \"%s\"\n" + "}", query)); - - RequestOptions.Builder restOptionsBuilder = RequestOptions.DEFAULT.toBuilder(); - restOptionsBuilder.addHeader("Content-Type", "application/json"); - request.setOptions(restOptionsBuilder); - - Response response = client().performRequest(request); - return new JSONObject(getResponseBody(response)); - } + @Override + public void init() throws Exception { + super.init(); + loadIndex(Index.BANK); + } + + public void testAdd() throws IOException { + JSONObject result = executeQuery("select 3 + 2"); + verifySchema(result, schema("3 + 2", null, "integer")); + verifyDataRows(result, rows(3 + 2)); + + result = executeQuery("select 2.5 + 2"); + verifySchema(result, schema("2.5 + 2", null, "double")); + verifyDataRows(result, rows(2.5D + 2)); + + result = executeQuery("select 3000000000 + 2"); + verifySchema(result, schema("3000000000 + 2", null, "long")); + verifyDataRows(result, rows(3000000000L + 2)); + + result = executeQuery("select CAST(6.666666 AS FLOAT) + 2"); + verifySchema(result, schema("CAST(6.666666 AS FLOAT) + 2", null, "float")); + verifyDataRows(result, rows(6.666666 + 2)); + } + + @Test + public void testAddFunction() throws IOException { + JSONObject result = executeQuery("select add(3, 2)"); + verifySchema(result, schema("add(3, 2)", null, "integer")); + verifyDataRows(result, rows(3 + 2)); + + result = executeQuery("select add(2.5, 2)"); + verifySchema(result, schema("add(2.5, 2)", null, "double")); + verifyDataRows(result, rows(2.5D + 2)); + + result = executeQuery("select add(3000000000, 2)"); + verifySchema(result, schema("add(3000000000, 2)", null, "long")); + verifyDataRows(result, rows(3000000000L + 2)); + + result = executeQuery("select add(CAST(6.666666 AS FLOAT), 2)"); + verifySchema(result, schema("add(CAST(6.666666 AS FLOAT), 2)", null, "float")); + verifyDataRows(result, rows(6.666666 + 2)); + } + + public void testDivide() throws IOException { + JSONObject result = executeQuery("select 3 / 2"); + verifySchema(result, schema("3 / 2", null, "integer")); + verifyDataRows(result, rows(3 / 2)); + + result = executeQuery("select 2.5 / 2"); + verifySchema(result, schema("2.5 / 2", null, "double")); + verifyDataRows(result, rows(2.5D / 2)); + + result = executeQuery("select 6000000000 / 2"); + verifySchema(result, schema("6000000000 / 2", null, "long")); + verifyDataRows(result, rows(6000000000L / 2)); + + result = executeQuery("select cast(1.6 AS float) / 2"); + verifySchema(result, schema("cast(1.6 AS float) / 2", null, "float")); + verifyDataRows(result, rows(1.6 / 2)); + } + + public void testDivideFunction() throws IOException { + JSONObject result = executeQuery("select divide(3, 2)"); + verifySchema(result, schema("divide(3, 2)", null, "integer")); + verifyDataRows(result, rows(3 / 2)); + + result = executeQuery("select divide(2.5, 2)"); + verifySchema(result, schema("divide(2.5, 2)", null, "double")); + verifyDataRows(result, rows(2.5D / 2)); + + result = executeQuery("select divide(6000000000, 2)"); + verifySchema(result, schema("divide(6000000000, 2)", null, "long")); + verifyDataRows(result, rows(6000000000L / 2)); + + result = executeQuery("select divide(cast(1.6 AS float), 2)"); + verifySchema(result, schema("divide(cast(1.6 AS float), 2)", null, "float")); + verifyDataRows(result, rows(1.6 / 2)); + } + + public void testMod() throws IOException { + JSONObject result = executeQuery("select mod(3, 2)"); + verifySchema(result, schema("mod(3, 2)", null, "integer")); + verifyDataRows(result, rows(3 % 2)); + + result = executeQuery("select mod(2.5, 2)"); + verifySchema(result, schema("mod(2.5, 2)", null, "double")); + verifyDataRows(result, rows(2.5D % 2)); + + result = executeQuery("select mod(cast(300001 as long), 2)"); + verifySchema(result, schema("mod(cast(300001 as long), 2)", null, "long")); + verifyDataRows(result, rows(3000001 % 2)); + + result = executeQuery("select mod(cast(1.6 AS float), 2)"); + verifySchema(result, schema("mod(cast(1.6 AS float), 2)", null, "float")); + verifyDataRows(result, rows(1.6 % 2)); + } + + public void testModulus() throws IOException { + JSONObject result = executeQuery("select 3 % 2"); + verifySchema(result, schema("3 % 2", null, "integer")); + verifyDataRows(result, rows(3 % 2)); + + result = executeQuery("select 2.5 % 2"); + verifySchema(result, schema("2.5 % 2", null, "double")); + verifyDataRows(result, rows(2.5D % 2)); + + result = executeQuery("select cast(300001 as long) % 2"); + verifySchema(result, schema("cast(300001 as long) % 2", null, "long")); + verifyDataRows(result, rows(300001 % 2)); + + result = executeQuery("select cast(1.6 AS float) % 2"); + verifySchema(result, schema("cast(1.6 AS float) % 2", null, "float")); + verifyDataRows(result, rows(1.6 % 2)); + } + + public void testModulusFunction() throws IOException { + JSONObject result = executeQuery("select modulus(3, 2)"); + verifySchema(result, schema("modulus(3, 2)", null, "integer")); + verifyDataRows(result, rows(3 % 2)); + + result = executeQuery("select modulus(2.5, 2)"); + verifySchema(result, schema("modulus(2.5, 2)", null, "double")); + verifyDataRows(result, rows(2.5D % 2)); + + result = executeQuery("select modulus(cast(300001 as long), 2)"); + verifySchema(result, schema("modulus(cast(300001 as long), 2)", null, "long")); + verifyDataRows(result, rows(300001 % 2)); + + result = executeQuery("select modulus(cast(1.6 AS float), 2)"); + verifySchema(result, schema("modulus(cast(1.6 AS float), 2)", null, "float")); + verifyDataRows(result, rows(1.6 % 2)); + } + + public void testMultiply() throws IOException { + JSONObject result = executeQuery("select 3 * 2"); + verifySchema(result, schema("3 * 2", null, "integer")); + verifyDataRows(result, rows(3 * 2)); + + result = executeQuery("select 2.5 * 2"); + verifySchema(result, schema("2.5 * 2", null, "double")); + verifyDataRows(result, rows(2.5D * 2)); + + result = executeQuery("select 3000000000 * 2"); + verifySchema(result, schema("3000000000 * 2", null, "long")); + verifyDataRows(result, rows(3000000000L * 2)); + + result = executeQuery("select CAST(1.6 AS FLOAT) * 2"); + verifySchema(result, schema("CAST(1.6 AS FLOAT) * 2", null, "float")); + verifyDataRows(result, rows(1.6 * 2)); + } + + @Test + public void testMultiplyFunction() throws IOException { + JSONObject result = executeQuery("select multiply(3, 2)"); + verifySchema(result, schema("multiply(3, 2)", null, "integer")); + verifyDataRows(result, rows(3 * 2)); + + result = executeQuery("select multiply(2.5, 2)"); + verifySchema(result, schema("multiply(2.5, 2)", null, "double")); + verifyDataRows(result, rows(2.5D * 2)); + + result = executeQuery("select multiply(3000000000, 2)"); + verifySchema(result, schema("multiply(3000000000, 2)", null, "long")); + verifyDataRows(result, rows(3000000000L * 2)); + + result = executeQuery("select multiply(CAST(1.6 AS FLOAT), 2)"); + verifySchema(result, schema("multiply(CAST(1.6 AS FLOAT), 2)", null, "float")); + verifyDataRows(result, rows(1.6 * 2)); + } + + public void testSubtract() throws IOException { + JSONObject result = executeQuery("select 3 - 2"); + verifySchema(result, schema("3 - 2", null, "integer")); + verifyDataRows(result, rows(3 - 2)); + + result = executeQuery("select 2.5 - 2"); + verifySchema(result, schema("2.5 - 2", null, "double")); + verifyDataRows(result, rows(2.5D - 2)); + + result = executeQuery("select 3000000000 - 2"); + verifySchema(result, schema("3000000000 - 2", null, "long")); + verifyDataRows(result, rows(3000000000L - 2)); + + result = executeQuery("select CAST(6.666666 AS FLOAT) - 2"); + verifySchema(result, schema("CAST(6.666666 AS FLOAT) - 2", null, "float")); + verifyDataRows(result, rows(6.666666 - 2)); + } + + @Test + public void testSubtractFunction() throws IOException { + JSONObject result = executeQuery("select subtract(3, 2)"); + verifySchema(result, schema("subtract(3, 2)", null, "integer")); + verifyDataRows(result, rows(3 - 2)); + + result = executeQuery("select subtract(2.5, 2)"); + verifySchema(result, schema("subtract(2.5, 2)", null, "double")); + verifyDataRows(result, rows(2.5D - 2)); + + result = executeQuery("select subtract(3000000000, 2)"); + verifySchema(result, schema("subtract(3000000000, 2)", null, "long")); + verifyDataRows(result, rows(3000000000L - 2)); + + result = executeQuery("select cast(subtract(cast(6.666666 as float), 2) as float)"); + verifySchema( + result, schema("cast(subtract(cast(6.666666 as float), 2) as float)", null, "float")); + verifyDataRows(result, rows(6.666666 - 2)); + } + + protected JSONObject executeQuery(String query) throws IOException { + Request request = new Request("POST", QUERY_API_ENDPOINT); + request.setJsonEntity(String.format(Locale.ROOT, "{\n" + " \"query\": \"%s\"\n" + "}", query)); + + RequestOptions.Builder restOptionsBuilder = RequestOptions.DEFAULT.toBuilder(); + restOptionsBuilder.addHeader("Content-Type", "application/json"); + request.setOptions(restOptionsBuilder); + + Response response = client().performRequest(request); + return new JSONObject(getResponseBody(response)); + } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/ConditionalIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/ConditionalIT.java index ab0900784d..deb41653e2 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/ConditionalIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/ConditionalIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.hamcrest.Matchers.equalTo; @@ -42,9 +41,11 @@ public void init() throws Exception { @Test public void ifnullShouldPassJDBC() throws IOException { - JSONObject response = executeJdbcRequest( - "SELECT IFNULL(lastname, 'unknown') AS name FROM " + TEST_INDEX_ACCOUNT - + " GROUP BY name"); + JSONObject response = + executeJdbcRequest( + "SELECT IFNULL(lastname, 'unknown') AS name FROM " + + TEST_INDEX_ACCOUNT + + " GROUP BY name"); assertEquals("IFNULL(lastname, 'unknown')", response.query("/schema/0/name")); assertEquals("name", response.query("/schema/0/alias")); assertEquals("keyword", response.query("/schema/0/type")); @@ -52,87 +53,95 @@ public void ifnullShouldPassJDBC() throws IOException { @Test public void ifnullWithNullInputTest() { - JSONObject response = new JSONObject(executeQuery( - "SELECT IFNULL(null, firstname) as IFNULL1 ," + JSONObject response = + new JSONObject( + executeQuery( + "SELECT IFNULL(null, firstname) as IFNULL1 ," + " IFNULL(firstname, null) as IFNULL2 ," + " IFNULL(null, null) as IFNULL3 " - + " FROM " + TEST_INDEX_BANK_WITH_NULL_VALUES - + " WHERE balance is null limit 2", "jdbc")); - - verifySchema(response, - schema("IFNULL(null, firstname)", "IFNULL1", "keyword"), - schema("IFNULL(firstname, null)", "IFNULL2", "keyword"), - schema("IFNULL(null, null)", "IFNULL3", "byte")); - verifyDataRows(response, - rows("Hattie", "Hattie", LITERAL_NULL.value()), - rows( "Elinor", "Elinor", LITERAL_NULL.value()) - ); + + " FROM " + + TEST_INDEX_BANK_WITH_NULL_VALUES + + " WHERE balance is null limit 2", + "jdbc")); + + verifySchema( + response, + schema("IFNULL(null, firstname)", "IFNULL1", "keyword"), + schema("IFNULL(firstname, null)", "IFNULL2", "keyword"), + schema("IFNULL(null, null)", "IFNULL3", "byte")); + verifyDataRows( + response, + rows("Hattie", "Hattie", LITERAL_NULL.value()), + rows("Elinor", "Elinor", LITERAL_NULL.value())); } @Test public void ifnullWithMissingInputTest() { - JSONObject response = new JSONObject(executeQuery( - "SELECT IFNULL(balance, 100) as IFNULL1, " + JSONObject response = + new JSONObject( + executeQuery( + "SELECT IFNULL(balance, 100) as IFNULL1, " + " IFNULL(200, balance) as IFNULL2, " + " IFNULL(balance, balance) as IFNULL3 " - + " FROM " + TEST_INDEX_BANK_WITH_NULL_VALUES - + " WHERE balance is null limit 3", "jdbc")); - verifySchema(response, - schema("IFNULL(balance, 100)", "IFNULL1", "long"), - schema("IFNULL(200, balance)", "IFNULL2", "long"), - schema("IFNULL(balance, balance)", "IFNULL3", "long")); - verifyDataRows(response, - rows(100, 200, null), - rows(100, 200, null), - rows(100, 200, null) - ); + + " FROM " + + TEST_INDEX_BANK_WITH_NULL_VALUES + + " WHERE balance is null limit 3", + "jdbc")); + verifySchema( + response, + schema("IFNULL(balance, 100)", "IFNULL1", "long"), + schema("IFNULL(200, balance)", "IFNULL2", "long"), + schema("IFNULL(balance, balance)", "IFNULL3", "long")); + verifyDataRows(response, rows(100, 200, null), rows(100, 200, null), rows(100, 200, null)); } @Test public void nullifShouldPassJDBC() throws IOException { - JSONObject response = executeJdbcRequest( - "SELECT NULLIF(lastname, 'unknown') AS name FROM " + TEST_INDEX_ACCOUNT); + JSONObject response = + executeJdbcRequest("SELECT NULLIF(lastname, 'unknown') AS name FROM " + TEST_INDEX_ACCOUNT); assertEquals("NULLIF(lastname, 'unknown')", response.query("/schema/0/name")); assertEquals("name", response.query("/schema/0/alias")); assertEquals("keyword", response.query("/schema/0/type")); } @Test - public void nullifWithNotNullInputTestOne(){ - JSONObject response = new JSONObject(executeQuery( - "SELECT NULLIF(firstname, 'Amber JOHnny') as testnullif " - + "FROM " + TEST_INDEX_BANK_WITH_NULL_VALUES - + " limit 2 ", "jdbc")); - verifySchema(response, - schema("NULLIF(firstname, 'Amber JOHnny')", "testnullif", "keyword")); - verifyDataRows(response, - rows(LITERAL_NULL.value()), - rows("Hattie") - ); + public void nullifWithNotNullInputTestOne() { + JSONObject response = + new JSONObject( + executeQuery( + "SELECT NULLIF(firstname, 'Amber JOHnny') as testnullif " + + "FROM " + + TEST_INDEX_BANK_WITH_NULL_VALUES + + " limit 2 ", + "jdbc")); + verifySchema(response, schema("NULLIF(firstname, 'Amber JOHnny')", "testnullif", "keyword")); + verifyDataRows(response, rows(LITERAL_NULL.value()), rows("Hattie")); } @Test public void nullifWithNullInputTest() { - JSONObject response = new JSONObject(executeQuery( - "SELECT NULLIF(1/0, 123) as nullif1 ," + JSONObject response = + new JSONObject( + executeQuery( + "SELECT NULLIF(1/0, 123) as nullif1 ," + " NULLIF(123, 1/0) as nullif2 ," + " NULLIF(1/0, 1/0) as nullif3 " - + " FROM " + TEST_INDEX_BANK_WITH_NULL_VALUES - + " WHERE balance is null limit 1", "jdbc")); - verifySchema(response, - schema("NULLIF(1/0, 123)", "nullif1", "integer"), - schema("NULLIF(123, 1/0)", "nullif2", "integer"), - schema("NULLIF(1/0, 1/0)", "nullif3", "integer")); - verifyDataRows(response, - rows(LITERAL_NULL.value(), 123, LITERAL_NULL.value() - ) - ); + + " FROM " + + TEST_INDEX_BANK_WITH_NULL_VALUES + + " WHERE balance is null limit 1", + "jdbc")); + verifySchema( + response, + schema("NULLIF(1/0, 123)", "nullif1", "integer"), + schema("NULLIF(123, 1/0)", "nullif2", "integer"), + schema("NULLIF(1/0, 1/0)", "nullif3", "integer")); + verifyDataRows(response, rows(LITERAL_NULL.value(), 123, LITERAL_NULL.value())); } @Test public void isnullShouldPassJDBC() throws IOException { - JSONObject response = executeJdbcRequest( - "SELECT ISNULL(lastname) AS name FROM " + TEST_INDEX_ACCOUNT); + JSONObject response = + executeJdbcRequest("SELECT ISNULL(lastname) AS name FROM " + TEST_INDEX_ACCOUNT); assertEquals("ISNULL(lastname)", response.query("/schema/0/name")); assertEquals("name", response.query("/schema/0/alias")); assertEquals("boolean", response.query("/schema/0/type")); @@ -141,47 +150,48 @@ public void isnullShouldPassJDBC() throws IOException { @Test public void isnullWithNotNullInputTest() throws IOException { assertThat( - executeQuery("SELECT ISNULL('elastic') AS isnull FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/isnull/0", equalTo(0))) - ); + executeQuery("SELECT ISNULL('elastic') AS isnull FROM " + TEST_INDEX_ACCOUNT), + hitAny(kvInt("/fields/isnull/0", equalTo(0)))); assertThat( - executeQuery("SELECT ISNULL('') AS isnull FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/isnull/0", equalTo(0))) - ); + executeQuery("SELECT ISNULL('') AS isnull FROM " + TEST_INDEX_ACCOUNT), + hitAny(kvInt("/fields/isnull/0", equalTo(0)))); } @Test public void isnullWithNullInputTest() { - JSONObject response = new JSONObject(executeQuery( - "SELECT ISNULL(1/0) as ISNULL1 ," + JSONObject response = + new JSONObject( + executeQuery( + "SELECT ISNULL(1/0) as ISNULL1 ," + " ISNULL(firstname) as ISNULL2 " - + " FROM " + TEST_INDEX_BANK_WITH_NULL_VALUES - + " WHERE balance is null limit 2", "jdbc")); - verifySchema(response, - schema("ISNULL(1/0)", "ISNULL1", "boolean"), - schema("ISNULL(firstname)", "ISNULL2", "boolean")); - verifyDataRows(response, - rows(LITERAL_TRUE.value(), LITERAL_FALSE.value()), - rows(LITERAL_TRUE.value(), LITERAL_FALSE.value()) - ); + + " FROM " + + TEST_INDEX_BANK_WITH_NULL_VALUES + + " WHERE balance is null limit 2", + "jdbc")); + verifySchema( + response, + schema("ISNULL(1/0)", "ISNULL1", "boolean"), + schema("ISNULL(firstname)", "ISNULL2", "boolean")); + verifyDataRows( + response, + rows(LITERAL_TRUE.value(), LITERAL_FALSE.value()), + rows(LITERAL_TRUE.value(), LITERAL_FALSE.value())); } @Test - public void isnullWithMathExpr() throws IOException{ + public void isnullWithMathExpr() throws IOException { assertThat( - executeQuery("SELECT ISNULL(1+1) AS isnull FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/isnull/0", equalTo(0))) - ); + executeQuery("SELECT ISNULL(1+1) AS isnull FROM " + TEST_INDEX_ACCOUNT), + hitAny(kvInt("/fields/isnull/0", equalTo(0)))); assertThat( - executeQuery("SELECT ISNULL(1+1*1/0) AS isnull FROM " + TEST_INDEX_ACCOUNT), - hitAny(kvInt("/fields/isnull/0", equalTo(1))) - ); + executeQuery("SELECT ISNULL(1+1*1/0) AS isnull FROM " + TEST_INDEX_ACCOUNT), + hitAny(kvInt("/fields/isnull/0", equalTo(1)))); } @Test public void ifShouldPassJDBC() throws IOException { - JSONObject response = executeJdbcRequest( - "SELECT IF(2 > 0, 'hello', 'world') AS name FROM " + TEST_INDEX_ACCOUNT); + JSONObject response = + executeJdbcRequest("SELECT IF(2 > 0, 'hello', 'world') AS name FROM " + TEST_INDEX_ACCOUNT); assertEquals("IF(2 > 0, 'hello', 'world')", response.query("/schema/0/name")); assertEquals("name", response.query("/schema/0/alias")); assertEquals("keyword", response.query("/schema/0/type")); @@ -189,33 +199,37 @@ public void ifShouldPassJDBC() throws IOException { @Test public void ifWithTrueAndFalseCondition() throws IOException { - JSONObject response = new JSONObject(executeQuery( - "SELECT IF(2 < 0, firstname, lastname) as IF0, " - + " IF(2 > 0, firstname, lastname) as IF1, " - + " firstname as IF2, " - + " lastname as IF3 " - + " FROM " + TEST_INDEX_BANK_WITH_NULL_VALUES - + " limit 2 ", "jdbc" )); - verifySchema(response, - schema("IF(2 < 0, firstname, lastname)", "IF0", "keyword"), - schema("IF(2 > 0, firstname, lastname)", "IF1", "keyword"), - schema("firstname", "IF2", "text"), - schema("lastname", "IF3", "keyword") - ); - verifyDataRows(response, - rows("Duke Willmington", "Amber JOHnny", "Amber JOHnny", "Duke Willmington"), - rows("Bond", "Hattie", "Hattie", "Bond") - ); - + JSONObject response = + new JSONObject( + executeQuery( + "SELECT IF(2 < 0, firstname, lastname) as IF0, " + + " IF(2 > 0, firstname, lastname) as IF1, " + + " firstname as IF2, " + + " lastname as IF3 " + + " FROM " + + TEST_INDEX_BANK_WITH_NULL_VALUES + + " limit 2 ", + "jdbc")); + verifySchema( + response, + schema("IF(2 < 0, firstname, lastname)", "IF0", "keyword"), + schema("IF(2 > 0, firstname, lastname)", "IF1", "keyword"), + schema("firstname", "IF2", "text"), + schema("lastname", "IF3", "keyword")); + verifyDataRows( + response, + rows("Duke Willmington", "Amber JOHnny", "Amber JOHnny", "Duke Willmington"), + rows("Bond", "Hattie", "Hattie", "Bond")); } private SearchHits query(String query) throws IOException { final String rsp = executeQueryWithStringOutput(query); - final XContentParser parser = new JsonXContentParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - new JsonFactory().createParser(rsp)); + final XContentParser parser = + new JsonXContentParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + new JsonFactory().createParser(rsp)); return SearchResponse.fromXContent(parser).getHits(); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/ConvertTZFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/ConvertTZFunctionIT.java index 308fe7cdcd..76600b6561 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/ConvertTZFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/ConvertTZFunctionIT.java @@ -1,20 +1,20 @@ - /* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ package org.opensearch.sql.sql; -import org.junit.Test; -import org.opensearch.sql.legacy.SQLIntegTestCase; -import java.io.IOException; import static org.opensearch.sql.util.MatcherUtils.rows; import static org.opensearch.sql.util.MatcherUtils.schema; import static org.opensearch.sql.util.MatcherUtils.verifyDataRows; import static org.opensearch.sql.util.MatcherUtils.verifySchema; -public class ConvertTZFunctionIT extends SQLIntegTestCase { +import java.io.IOException; +import org.junit.Test; +import org.opensearch.sql.legacy.SQLIntegTestCase; +public class ConvertTZFunctionIT extends SQLIntegTestCase { @Override public void init() throws Exception { @@ -22,103 +22,91 @@ public void init() throws Exception { loadIndex(Index.BANK); } - @Test public void inRangeZeroToPositive() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2008-05-15 12:00:00','+00:00','+10:00')"); - verifySchema(result, - schema("convert_tz('2008-05-15 12:00:00','+00:00','+10:00')", null, "datetime")); + var result = executeJdbcRequest("SELECT convert_tz('2008-05-15 12:00:00','+00:00','+10:00')"); + verifySchema( + result, schema("convert_tz('2008-05-15 12:00:00','+00:00','+10:00')", null, "datetime")); verifyDataRows(result, rows("2008-05-15 22:00:00")); } @Test public void inRangeNegativeZeroToPositiveZero() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-05-12 00:00:00','-00:00','+00:00')"); - verifySchema(result, - schema("convert_tz('2021-05-12 00:00:00','-00:00','+00:00')", null, "datetime")); + var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 00:00:00','-00:00','+00:00')"); + verifySchema( + result, schema("convert_tz('2021-05-12 00:00:00','-00:00','+00:00')", null, "datetime")); verifyDataRows(result, rows("2021-05-12 00:00:00")); } @Test public void inRangePositiveToPositive() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-05-12 00:00:00','+10:00','+11:00')"); - verifySchema(result, - schema("convert_tz('2021-05-12 00:00:00','+10:00','+11:00')", null, "datetime")); + var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 00:00:00','+10:00','+11:00')"); + verifySchema( + result, schema("convert_tz('2021-05-12 00:00:00','+10:00','+11:00')", null, "datetime")); verifyDataRows(result, rows("2021-05-12 01:00:00")); } @Test public void inRangeNegativeToPositive() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-05-12 11:34:50','-08:00','+09:00')"); - verifySchema(result, - schema("convert_tz('2021-05-12 11:34:50','-08:00','+09:00')", null, "datetime")); + var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 11:34:50','-08:00','+09:00')"); + verifySchema( + result, schema("convert_tz('2021-05-12 11:34:50','-08:00','+09:00')", null, "datetime")); verifyDataRows(result, rows("2021-05-13 04:34:50")); } @Test public void inRangeSameTimeZone() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-05-12 11:34:50','+09:00','+09:00')"); - verifySchema(result, - schema("convert_tz('2021-05-12 11:34:50','+09:00','+09:00')", null, "datetime")); + var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 11:34:50','+09:00','+09:00')"); + verifySchema( + result, schema("convert_tz('2021-05-12 11:34:50','+09:00','+09:00')", null, "datetime")); verifyDataRows(result, rows("2021-05-12 11:34:50")); } @Test public void inRangeTwentyFourHourTimeOffset() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-05-12 11:34:50','-12:00','+12:00')"); - verifySchema(result, - schema("convert_tz('2021-05-12 11:34:50','-12:00','+12:00')", null, "datetime")); + var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 11:34:50','-12:00','+12:00')"); + verifySchema( + result, schema("convert_tz('2021-05-12 11:34:50','-12:00','+12:00')", null, "datetime")); verifyDataRows(result, rows("2021-05-13 11:34:50")); } @Test public void inRangeFifteenMinuteTimeZones() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-05-12 13:00:00','+09:30','+05:45')"); - verifySchema(result, - schema("convert_tz('2021-05-12 13:00:00','+09:30','+05:45')", null, "datetime")); + var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 13:00:00','+09:30','+05:45')"); + verifySchema( + result, schema("convert_tz('2021-05-12 13:00:00','+09:30','+05:45')", null, "datetime")); verifyDataRows(result, rows("2021-05-12 09:15:00")); } @Test public void inRangeRandomTimes() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-05-12 13:00:00','+09:31','+05:11')"); - verifySchema(result, - schema("convert_tz('2021-05-12 13:00:00','+09:31','+05:11')", null, "datetime")); + var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 13:00:00','+09:31','+05:11')"); + verifySchema( + result, schema("convert_tz('2021-05-12 13:00:00','+09:31','+05:11')", null, "datetime")); verifyDataRows(result, rows("2021-05-12 08:40:00")); } @Test public void nullField2Under() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-05-30 11:34:50','-14:00','+08:00')"); - verifySchema(result, - schema("convert_tz('2021-05-30 11:34:50','-14:00','+08:00')", null, "datetime")); - verifyDataRows(result, rows(new Object[]{null})); + var result = executeJdbcRequest("SELECT convert_tz('2021-05-30 11:34:50','-14:00','+08:00')"); + verifySchema( + result, schema("convert_tz('2021-05-30 11:34:50','-14:00','+08:00')", null, "datetime")); + verifyDataRows(result, rows(new Object[] {null})); } @Test public void nullField3Over() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-05-12 11:34:50','-12:00','+14:01')"); - verifySchema(result, - schema("convert_tz('2021-05-12 11:34:50','-12:00','+14:01')", null, "datetime")); - verifyDataRows(result, rows(new Object[]{null})); + var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 11:34:50','-12:00','+14:01')"); + verifySchema( + result, schema("convert_tz('2021-05-12 11:34:50','-12:00','+14:01')", null, "datetime")); + verifyDataRows(result, rows(new Object[] {null})); } @Test public void inRangeMinOnPoint() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-05-12 15:00:00','-13:59','-13:59')"); - verifySchema(result, - schema("convert_tz('2021-05-12 15:00:00','-13:59','-13:59')", null, "datetime")); + var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 15:00:00','-13:59','-13:59')"); + verifySchema( + result, schema("convert_tz('2021-05-12 15:00:00','-13:59','-13:59')", null, "datetime")); verifyDataRows(result, rows("2021-05-12 15:00:00")); } @@ -128,57 +116,50 @@ public void inRangeMinOnPoint() throws IOException { // Invalid input returns null. @Test public void nullField3InvalidInput() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-05-12 11:34:50','+10:0','+14:01')"); - verifySchema(result, - schema("convert_tz('2021-05-12 11:34:50','+10:0','+14:01')", null, "datetime")); - verifyDataRows(result, rows(new Object[]{null})); + var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 11:34:50','+10:0','+14:01')"); + verifySchema( + result, schema("convert_tz('2021-05-12 11:34:50','+10:0','+14:01')", null, "datetime")); + verifyDataRows(result, rows(new Object[] {null})); } @Test public void nullField2InvalidInput() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-05-12 11:34:50','+14:01','****')"); - verifySchema(result, - schema("convert_tz('2021-05-12 11:34:50','+14:01','****')", null, "datetime")); - verifyDataRows(result, rows(new Object[]{null})); + var result = executeJdbcRequest("SELECT convert_tz('2021-05-12 11:34:50','+14:01','****')"); + verifySchema( + result, schema("convert_tz('2021-05-12 11:34:50','+14:01','****')", null, "datetime")); + verifyDataRows(result, rows(new Object[] {null})); } // Invalid input in the datetime field of CONVERT_TZ results in a null field. It is any input // which is not of the format `yyyy-MM-dd HH:mm:ss` @Test public void nullDateTimeInvalidInput() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021----','+00:00','+00:00')"); - verifySchema(result, - schema("convert_tz('2021----','+00:00','+00:00')", null, "datetime")); - verifyDataRows(result, rows(new Object[]{null})); + var result = executeJdbcRequest("SELECT convert_tz('2021----','+00:00','+00:00')"); + verifySchema(result, schema("convert_tz('2021----','+00:00','+00:00')", null, "datetime")); + verifyDataRows(result, rows(new Object[] {null})); } @Test public void nullDateTimeInvalidDateValueFebruary() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-02-30 10:00:00','+00:00','+00:00')"); - verifySchema(result, - schema("convert_tz('2021-02-30 10:00:00','+00:00','+00:00')", null, "datetime")); - verifyDataRows(result, rows(new Object[]{null})); + var result = executeJdbcRequest("SELECT convert_tz('2021-02-30 10:00:00','+00:00','+00:00')"); + verifySchema( + result, schema("convert_tz('2021-02-30 10:00:00','+00:00','+00:00')", null, "datetime")); + verifyDataRows(result, rows(new Object[] {null})); } @Test public void nullDateTimeInvalidDateValueApril() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-04-31 10:00:00','+00:00','+00:00')"); - verifySchema(result, - schema("convert_tz('2021-04-31 10:00:00','+00:00','+00:00')", null, "datetime")); - verifyDataRows(result, rows(new Object[]{null})); + var result = executeJdbcRequest("SELECT convert_tz('2021-04-31 10:00:00','+00:00','+00:00')"); + verifySchema( + result, schema("convert_tz('2021-04-31 10:00:00','+00:00','+00:00')", null, "datetime")); + verifyDataRows(result, rows(new Object[] {null})); } @Test public void nullDateTimeInvalidDateValueMonth() throws IOException { - var result = executeJdbcRequest( - "SELECT convert_tz('2021-13-03 10:00:00','+00:00','+00:00')"); - verifySchema(result, - schema("convert_tz('2021-13-03 10:00:00','+00:00','+00:00')", null, "datetime")); - verifyDataRows(result, rows(new Object[]{null})); + var result = executeJdbcRequest("SELECT convert_tz('2021-13-03 10:00:00','+00:00','+00:00')"); + verifySchema( + result, schema("convert_tz('2021-13-03 10:00:00','+00:00','+00:00')", null, "datetime")); + verifyDataRows(result, rows(new Object[] {null})); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/CorrectnessTestBase.java b/integ-test/src/test/java/org/opensearch/sql/sql/CorrectnessTestBase.java index cd5765e0ce..33c9c0687f 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/CorrectnessTestBase.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/CorrectnessTestBase.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static java.util.Collections.emptyMap; @@ -25,15 +24,13 @@ import org.opensearch.sql.legacy.utils.StringUtils; /** - * SQL integration test base class. This is very similar to CorrectnessIT though - * enforce the success of all tests rather than report failures only. + * SQL integration test base class. This is very similar to CorrectnessIT though enforce the success + * of all tests rather than report failures only. */ @ThreadLeakScope(ThreadLeakScope.Scope.NONE) public abstract class CorrectnessTestBase extends RestIntegTestCase { - /** - * Comparison test runner shared by all methods in this IT class. - */ + /** Comparison test runner shared by all methods in this IT class. */ private static ComparisonTest runner; @Override @@ -43,8 +40,7 @@ protected void init() throws Exception { } TestConfig config = new TestConfig(emptyMap()); - runner = new ComparisonTest(getOpenSearchConnection(), - getOtherDBConnections(config)); + runner = new ComparisonTest(getOpenSearchConnection(), getOtherDBConnections(config)); runner.connect(); for (TestDataSet dataSet : config.getTestDataSets()) { @@ -52,9 +48,7 @@ protected void init() throws Exception { } } - /** - * Clean up test data and close other database connection. - */ + /** Clean up test data and close other database connection. */ @AfterClass public static void cleanUp() { if (runner == null) { @@ -74,33 +68,29 @@ public static void cleanUp() { } /** - * Execute the given queries and compare result with other database. - * The queries will be considered as one test batch. + * Execute the given queries and compare result with other database. The queries will be + * considered as one test batch. */ protected void verify(String... queries) { TestReport result = runner.verify(new TestQuerySet(queries)); TestSummary summary = result.getSummary(); - Assert.assertEquals(StringUtils.format( - "Comparison test failed on queries: %s", new JSONObject(result).toString(2)), - 0, summary.getFailure()); + Assert.assertEquals( + StringUtils.format( + "Comparison test failed on queries: %s", new JSONObject(result).toString(2)), + 0, + summary.getFailure()); } - /** - * Use OpenSearch cluster initialized by OpenSearch Gradle task. - */ + /** Use OpenSearch cluster initialized by OpenSearch Gradle task. */ private DBConnection getOpenSearchConnection() { String openSearchHost = client().getNodes().get(0).getHost().toString(); return new OpenSearchConnection("jdbc:opensearch://" + openSearchHost, client()); } - /** - * Create database connection with database name and connect URL. - */ + /** Create database connection with database name and connect URL. */ private DBConnection[] getOtherDBConnections(TestConfig config) { - return config.getOtherDbConnectionNameAndUrls() - .entrySet().stream() - .map(e -> new JDBCConnection(e.getKey(), e.getValue())) - .toArray(DBConnection[]::new); + return config.getOtherDbConnectionNameAndUrls().entrySet().stream() + .map(e -> new JDBCConnection(e.getKey(), e.getValue())) + .toArray(DBConnection[]::new); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/CsvFormatIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/CsvFormatIT.java index a551ebabc1..55b70ed589 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/CsvFormatIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/CsvFormatIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK_CSV_SANITIZE; @@ -11,7 +10,6 @@ import java.io.IOException; import java.util.Locale; - import org.junit.Test; import org.opensearch.client.Request; import org.opensearch.client.Response; @@ -27,36 +25,45 @@ public void init() throws IOException { @Test public void sanitizeTest() { - String result = executeQuery( - String.format(Locale.ROOT, "SELECT firstname, lastname FROM %s", TEST_INDEX_BANK_CSV_SANITIZE), "csv"); - assertEquals(StringUtils.format( - "firstname,lastname%n" - + "'+Amber JOHnny,Duke Willmington+%n" - + "'-Hattie,Bond-%n" - + "'=Nanette,Bates=%n" - + "'@Dale,Adams@%n" - + "\",Elinor\",\"Ratliff,,,\"%n"), + String result = + executeQuery( + String.format( + Locale.ROOT, "SELECT firstname, lastname FROM %s", TEST_INDEX_BANK_CSV_SANITIZE), + "csv"); + assertEquals( + StringUtils.format( + "firstname,lastname%n" + + "'+Amber JOHnny,Duke Willmington+%n" + + "'-Hattie,Bond-%n" + + "'=Nanette,Bates=%n" + + "'@Dale,Adams@%n" + + "\",Elinor\",\"Ratliff,,,\"%n"), result); } @Test public void escapeSanitizeTest() { - String result = executeQuery( - String.format(Locale.ROOT, "SELECT firstname, lastname FROM %s", TEST_INDEX_BANK_CSV_SANITIZE), - "csv&sanitize=false"); - assertEquals(StringUtils.format( - "firstname,lastname%n" - + "+Amber JOHnny,Duke Willmington+%n" - + "-Hattie,Bond-%n" - + "=Nanette,Bates=%n" - + "@Dale,Adams@%n" - + "\",Elinor\",\"Ratliff,,,\"%n"), + String result = + executeQuery( + String.format( + Locale.ROOT, "SELECT firstname, lastname FROM %s", TEST_INDEX_BANK_CSV_SANITIZE), + "csv&sanitize=false"); + assertEquals( + StringUtils.format( + "firstname,lastname%n" + + "+Amber JOHnny,Duke Willmington+%n" + + "-Hattie,Bond-%n" + + "=Nanette,Bates=%n" + + "@Dale,Adams@%n" + + "\",Elinor\",\"Ratliff,,,\"%n"), result); } @Test public void contentHeaderTest() throws IOException { - String query = String.format(Locale.ROOT, "SELECT firstname, lastname FROM %s", TEST_INDEX_BANK_CSV_SANITIZE); + String query = + String.format( + Locale.ROOT, "SELECT firstname, lastname FROM %s", TEST_INDEX_BANK_CSV_SANITIZE); String requestBody = makeRequest(query); Request sqlRequest = new Request("POST", "/_plugins/_sql?format=csv"); diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeComparisonIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeComparisonIT.java index 108687da27..432daef82f 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeComparisonIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeComparisonIT.java @@ -14,14 +14,13 @@ import static org.opensearch.sql.util.MatcherUtils.verifySchema; import static org.opensearch.sql.util.TestUtils.getResponseBody; +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import java.io.IOException; import java.time.LocalDate; import java.util.Arrays; import java.util.Locale; import java.util.TimeZone; - -import com.carrotsearch.randomizedtesting.annotations.Name; -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.json.JSONObject; import org.junit.After; import org.junit.Before; @@ -55,9 +54,10 @@ public void resetTimeZone() { private String name; private Boolean expectedResult; - public DateTimeComparisonIT(@Name("functionCall") String functionCall, - @Name("name") String name, - @Name("expectedResult") Boolean expectedResult) { + public DateTimeComparisonIT( + @Name("functionCall") String functionCall, + @Name("name") String name, + @Name("expectedResult") Boolean expectedResult) { this.functionCall = functionCall; this.name = name; this.expectedResult = expectedResult; @@ -65,542 +65,698 @@ public DateTimeComparisonIT(@Name("functionCall") String functionCall, @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareTwoDates() { - return Arrays.asList($$( - $("DATE('2020-09-16') = DATE('2020-09-16')", "eq1", true), - $("DATE('2020-09-16') = DATE('1961-04-12')", "eq2", false), - $("DATE('2020-09-16') != DATE('1984-12-15')", "neq1", true), - $("DATE('1961-04-12') != DATE('1984-12-15')", "neq2", true), - $("DATE('1961-04-12') != DATE('1961-04-12')", "neq3", false), - $("DATE('1984-12-15') > DATE('1961-04-12')", "gt1", true), - $("DATE('1984-12-15') > DATE('2020-09-16')", "gt2", false), - $("DATE('1961-04-12') < DATE('1984-12-15')", "lt1", true), - $("DATE('1984-12-15') < DATE('1961-04-12')", "lt2", false), - $("DATE('1984-12-15') >= DATE('1961-04-12')", "gte1", true), - $("DATE('1984-12-15') >= DATE('1984-12-15')", "gte2", true), - $("DATE('1984-12-15') >= DATE('2020-09-16')", "gte3", false), - $("DATE('1961-04-12') <= DATE('1984-12-15')", "lte1", true), - $("DATE('1961-04-12') <= DATE('1961-04-12')", "lte2", true), - $("DATE('2020-09-16') <= DATE('1961-04-12')", "lte3", false) - )); + return Arrays.asList( + $$( + $("DATE('2020-09-16') = DATE('2020-09-16')", "eq1", true), + $("DATE('2020-09-16') = DATE('1961-04-12')", "eq2", false), + $("DATE('2020-09-16') != DATE('1984-12-15')", "neq1", true), + $("DATE('1961-04-12') != DATE('1984-12-15')", "neq2", true), + $("DATE('1961-04-12') != DATE('1961-04-12')", "neq3", false), + $("DATE('1984-12-15') > DATE('1961-04-12')", "gt1", true), + $("DATE('1984-12-15') > DATE('2020-09-16')", "gt2", false), + $("DATE('1961-04-12') < DATE('1984-12-15')", "lt1", true), + $("DATE('1984-12-15') < DATE('1961-04-12')", "lt2", false), + $("DATE('1984-12-15') >= DATE('1961-04-12')", "gte1", true), + $("DATE('1984-12-15') >= DATE('1984-12-15')", "gte2", true), + $("DATE('1984-12-15') >= DATE('2020-09-16')", "gte3", false), + $("DATE('1961-04-12') <= DATE('1984-12-15')", "lte1", true), + $("DATE('1961-04-12') <= DATE('1961-04-12')", "lte2", true), + $("DATE('2020-09-16') <= DATE('1961-04-12')", "lte3", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareTwoTimes() { - return Arrays.asList($$( - $("TIME('09:16:37') = TIME('09:16:37')", "eq1", true), - $("TIME('09:16:37') = TIME('04:12:42')", "eq2", false), - $("TIME('09:16:37') != TIME('12:15:22')", "neq1", true), - $("TIME('04:12:42') != TIME('12:15:22')", "neq2", true), - $("TIME('04:12:42') != TIME('04:12:42')", "neq3", false), - $("TIME('12:15:22') > TIME('04:12:42')", "gt1", true), - $("TIME('12:15:22') > TIME('19:16:03')", "gt2", false), - $("TIME('04:12:42') < TIME('12:15:22')", "lt1", true), - $("TIME('14:12:38') < TIME('12:15:22')", "lt2", false), - $("TIME('12:15:22') >= TIME('04:12:42')", "gte1", true), - $("TIME('12:15:22') >= TIME('12:15:22')", "gte2", true), - $("TIME('12:15:22') >= TIME('19:16:03')", "gte3", false), - $("TIME('04:12:42') <= TIME('12:15:22')", "lte1", true), - $("TIME('04:12:42') <= TIME('04:12:42')", "lte2", true), - $("TIME('19:16:03') <= TIME('04:12:42')", "lte3", false) - )); + return Arrays.asList( + $$( + $("TIME('09:16:37') = TIME('09:16:37')", "eq1", true), + $("TIME('09:16:37') = TIME('04:12:42')", "eq2", false), + $("TIME('09:16:37') != TIME('12:15:22')", "neq1", true), + $("TIME('04:12:42') != TIME('12:15:22')", "neq2", true), + $("TIME('04:12:42') != TIME('04:12:42')", "neq3", false), + $("TIME('12:15:22') > TIME('04:12:42')", "gt1", true), + $("TIME('12:15:22') > TIME('19:16:03')", "gt2", false), + $("TIME('04:12:42') < TIME('12:15:22')", "lt1", true), + $("TIME('14:12:38') < TIME('12:15:22')", "lt2", false), + $("TIME('12:15:22') >= TIME('04:12:42')", "gte1", true), + $("TIME('12:15:22') >= TIME('12:15:22')", "gte2", true), + $("TIME('12:15:22') >= TIME('19:16:03')", "gte3", false), + $("TIME('04:12:42') <= TIME('12:15:22')", "lte1", true), + $("TIME('04:12:42') <= TIME('04:12:42')", "lte2", true), + $("TIME('19:16:03') <= TIME('04:12:42')", "lte3", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareTwoDateTimes() { - return Arrays.asList($$( - $("DATETIME('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", "eq1", true), - $("DATETIME('2020-09-16 10:20:30') = DATETIME('1961-04-12 09:07:00')", "eq2", false), - $("DATETIME('2020-09-16 10:20:30') != DATETIME('1984-12-15 22:15:07')", "neq1", true), - $("DATETIME('1984-12-15 22:15:08') != DATETIME('1984-12-15 22:15:07')", "neq2", true), - $("DATETIME('1961-04-12 09:07:00') != DATETIME('1961-04-12 09:07:00')", "neq3", false), - $("DATETIME('1984-12-15 22:15:07') > DATETIME('1961-04-12 22:15:07')", "gt1", true), - $("DATETIME('1984-12-15 22:15:07') > DATETIME('1984-12-15 22:15:06')", "gt2", true), - $("DATETIME('1984-12-15 22:15:07') > DATETIME('2020-09-16 10:20:30')", "gt3", false), - $("DATETIME('1961-04-12 09:07:00') < DATETIME('1984-12-15 09:07:00')", "lt1", true), - $("DATETIME('1984-12-15 22:15:07') < DATETIME('1984-12-15 22:15:08')", "lt2", true), - $("DATETIME('1984-12-15 22:15:07') < DATETIME('1961-04-12 09:07:00')", "lt3", false), - $("DATETIME('1984-12-15 22:15:07') >= DATETIME('1961-04-12 09:07:00')", "gte1", true), - $("DATETIME('1984-12-15 22:15:07') >= DATETIME('1984-12-15 22:15:07')", "gte2", true), - $("DATETIME('1984-12-15 22:15:07') >= DATETIME('2020-09-16 10:20:30')", "gte3", false), - $("DATETIME('1961-04-12 09:07:00') <= DATETIME('1984-12-15 22:15:07')", "lte1", true), - $("DATETIME('1961-04-12 09:07:00') <= DATETIME('1961-04-12 09:07:00')", "lte2", true), - $("DATETIME('2020-09-16 10:20:30') <= DATETIME('1961-04-12 09:07:00')", "lte3", false) - )); + return Arrays.asList( + $$( + $("DATETIME('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", "eq1", true), + $("DATETIME('2020-09-16 10:20:30') = DATETIME('1961-04-12 09:07:00')", "eq2", false), + $("DATETIME('2020-09-16 10:20:30') != DATETIME('1984-12-15 22:15:07')", "neq1", true), + $("DATETIME('1984-12-15 22:15:08') != DATETIME('1984-12-15 22:15:07')", "neq2", true), + $("DATETIME('1961-04-12 09:07:00') != DATETIME('1961-04-12 09:07:00')", "neq3", false), + $("DATETIME('1984-12-15 22:15:07') > DATETIME('1961-04-12 22:15:07')", "gt1", true), + $("DATETIME('1984-12-15 22:15:07') > DATETIME('1984-12-15 22:15:06')", "gt2", true), + $("DATETIME('1984-12-15 22:15:07') > DATETIME('2020-09-16 10:20:30')", "gt3", false), + $("DATETIME('1961-04-12 09:07:00') < DATETIME('1984-12-15 09:07:00')", "lt1", true), + $("DATETIME('1984-12-15 22:15:07') < DATETIME('1984-12-15 22:15:08')", "lt2", true), + $("DATETIME('1984-12-15 22:15:07') < DATETIME('1961-04-12 09:07:00')", "lt3", false), + $("DATETIME('1984-12-15 22:15:07') >= DATETIME('1961-04-12 09:07:00')", "gte1", true), + $("DATETIME('1984-12-15 22:15:07') >= DATETIME('1984-12-15 22:15:07')", "gte2", true), + $("DATETIME('1984-12-15 22:15:07') >= DATETIME('2020-09-16 10:20:30')", "gte3", false), + $("DATETIME('1961-04-12 09:07:00') <= DATETIME('1984-12-15 22:15:07')", "lte1", true), + $("DATETIME('1961-04-12 09:07:00') <= DATETIME('1961-04-12 09:07:00')", "lte2", true), + $( + "DATETIME('2020-09-16 10:20:30') <= DATETIME('1961-04-12 09:07:00')", + "lte3", + false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareTwoTimestamps() { - return Arrays.asList($$( - $("TIMESTAMP('2020-09-16 10:20:30') = TIMESTAMP('2020-09-16 10:20:30')", "eq1", true), - $("TIMESTAMP('2020-09-16 10:20:30') = TIMESTAMP('1961-04-12 09:07:00')", "eq2", false), - $("TIMESTAMP('2020-09-16 10:20:30') != TIMESTAMP('1984-12-15 22:15:07')", "neq1", true), - $("TIMESTAMP('1984-12-15 22:15:08') != TIMESTAMP('1984-12-15 22:15:07')", "neq2", true), - $("TIMESTAMP('1961-04-12 09:07:00') != TIMESTAMP('1961-04-12 09:07:00')", "neq3", false), - $("TIMESTAMP('1984-12-15 22:15:07') > TIMESTAMP('1961-04-12 22:15:07')", "gt1", true), - $("TIMESTAMP('1984-12-15 22:15:07') > TIMESTAMP('1984-12-15 22:15:06')", "gt2", true), - $("TIMESTAMP('1984-12-15 22:15:07') > TIMESTAMP('2020-09-16 10:20:30')", "gt3", false), - $("TIMESTAMP('1961-04-12 09:07:00') < TIMESTAMP('1984-12-15 09:07:00')", "lt1", true), - $("TIMESTAMP('1984-12-15 22:15:07') < TIMESTAMP('1984-12-15 22:15:08')", "lt2", true), - $("TIMESTAMP('1984-12-15 22:15:07') < TIMESTAMP('1961-04-12 09:07:00')", "lt3", false), - $("TIMESTAMP('1984-12-15 22:15:07') >= TIMESTAMP('1961-04-12 09:07:00')", "gte1", true), - $("TIMESTAMP('1984-12-15 22:15:07') >= TIMESTAMP('1984-12-15 22:15:07')", "gte2", true), - $("TIMESTAMP('1984-12-15 22:15:07') >= TIMESTAMP('2020-09-16 10:20:30')", "gte3", false), - $("TIMESTAMP('1961-04-12 09:07:00') <= TIMESTAMP('1984-12-15 22:15:07')", "lte1", true), - $("TIMESTAMP('1961-04-12 09:07:00') <= TIMESTAMP('1961-04-12 09:07:00')", "lte2", true), - $("TIMESTAMP('2020-09-16 10:20:30') <= TIMESTAMP('1961-04-12 09:07:00')", "lte3", false) - )); + return Arrays.asList( + $$( + $("TIMESTAMP('2020-09-16 10:20:30') = TIMESTAMP('2020-09-16 10:20:30')", "eq1", true), + $("TIMESTAMP('2020-09-16 10:20:30') = TIMESTAMP('1961-04-12 09:07:00')", "eq2", false), + $("TIMESTAMP('2020-09-16 10:20:30') != TIMESTAMP('1984-12-15 22:15:07')", "neq1", true), + $("TIMESTAMP('1984-12-15 22:15:08') != TIMESTAMP('1984-12-15 22:15:07')", "neq2", true), + $( + "TIMESTAMP('1961-04-12 09:07:00') != TIMESTAMP('1961-04-12 09:07:00')", + "neq3", + false), + $("TIMESTAMP('1984-12-15 22:15:07') > TIMESTAMP('1961-04-12 22:15:07')", "gt1", true), + $("TIMESTAMP('1984-12-15 22:15:07') > TIMESTAMP('1984-12-15 22:15:06')", "gt2", true), + $("TIMESTAMP('1984-12-15 22:15:07') > TIMESTAMP('2020-09-16 10:20:30')", "gt3", false), + $("TIMESTAMP('1961-04-12 09:07:00') < TIMESTAMP('1984-12-15 09:07:00')", "lt1", true), + $("TIMESTAMP('1984-12-15 22:15:07') < TIMESTAMP('1984-12-15 22:15:08')", "lt2", true), + $("TIMESTAMP('1984-12-15 22:15:07') < TIMESTAMP('1961-04-12 09:07:00')", "lt3", false), + $("TIMESTAMP('1984-12-15 22:15:07') >= TIMESTAMP('1961-04-12 09:07:00')", "gte1", true), + $("TIMESTAMP('1984-12-15 22:15:07') >= TIMESTAMP('1984-12-15 22:15:07')", "gte2", true), + $( + "TIMESTAMP('1984-12-15 22:15:07') >= TIMESTAMP('2020-09-16 10:20:30')", + "gte3", + false), + $("TIMESTAMP('1961-04-12 09:07:00') <= TIMESTAMP('1984-12-15 22:15:07')", "lte1", true), + $("TIMESTAMP('1961-04-12 09:07:00') <= TIMESTAMP('1961-04-12 09:07:00')", "lte2", true), + $( + "TIMESTAMP('2020-09-16 10:20:30') <= TIMESTAMP('1961-04-12 09:07:00')", + "lte3", + false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareEqTimestampWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIMESTAMP('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') = TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') = DATETIME('1961-04-12 09:07:00')", "ts_dt_f", false), - $("DATETIME('1961-04-12 09:07:00') = TIMESTAMP('1984-12-15 22:15:07')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 00:00:00') = DATE('2020-09-16')", "ts_d_t", true), - $("DATE('2020-09-16') = TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') = DATE('1961-04-12')", "ts_d_f", false), - $("DATE('1961-04-12') = TIMESTAMP('1984-12-15 22:15:07')", "d_ts_f", false), - $("TIMESTAMP('" + today + " 10:20:30') = TIME('10:20:30')", "ts_t_t", true), - $("TIME('10:20:30') = TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') = TIME('09:07:00')", "ts_t_f", false), - $("TIME('09:07:00') = TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false) - )); + return Arrays.asList( + $$( + $( + "TIMESTAMP('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') = TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') = DATETIME('1961-04-12 09:07:00')", + "ts_dt_f", + false), + $( + "DATETIME('1961-04-12 09:07:00') = TIMESTAMP('1984-12-15 22:15:07')", + "dt_ts_f", + false), + $("TIMESTAMP('2020-09-16 00:00:00') = DATE('2020-09-16')", "ts_d_t", true), + $("DATE('2020-09-16') = TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), + $("TIMESTAMP('2020-09-16 10:20:30') = DATE('1961-04-12')", "ts_d_f", false), + $("DATE('1961-04-12') = TIMESTAMP('1984-12-15 22:15:07')", "d_ts_f", false), + $("TIMESTAMP('" + today + " 10:20:30') = TIME('10:20:30')", "ts_t_t", true), + $("TIME('10:20:30') = TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), + $("TIMESTAMP('2020-09-16 10:20:30') = TIME('09:07:00')", "ts_t_f", false), + $("TIME('09:07:00') = TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareEqDateTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATETIME('2020-09-16 10:20:30') = TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') = TIMESTAMP('1961-04-12 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('1961-04-12 09:07:00') = DATETIME('1984-12-15 22:15:07')", "ts_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') = DATE('2020-09-16')", "dt_d_t", true), - $("DATE('2020-09-16') = DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') = DATE('1961-04-12')", "dt_d_f", false), - $("DATE('1961-04-12') = DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('" + today + " 10:20:30') = TIME('10:20:30')", "dt_t_t", true), - $("TIME('10:20:30') = DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') = TIME('09:07:00')", "dt_t_f", false), - $("TIME('09:07:00') = DATETIME('1984-12-15 22:15:07')", "t_dt_f", false) - )); + return Arrays.asList( + $$( + $( + "DATETIME('2020-09-16 10:20:30') = TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') = DATETIME('2020-09-16 10:20:30')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') = TIMESTAMP('1961-04-12 09:07:00')", + "dt_ts_f", + false), + $( + "TIMESTAMP('1961-04-12 09:07:00') = DATETIME('1984-12-15 22:15:07')", + "ts_dt_f", + false), + $("DATETIME('2020-09-16 00:00:00') = DATE('2020-09-16')", "dt_d_t", true), + $("DATE('2020-09-16') = DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), + $("DATETIME('2020-09-16 10:20:30') = DATE('1961-04-12')", "dt_d_f", false), + $("DATE('1961-04-12') = DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), + $("DATETIME('" + today + " 10:20:30') = TIME('10:20:30')", "dt_t_t", true), + $("TIME('10:20:30') = DATETIME('" + today + " 10:20:30')", "t_dt_t", true), + $("DATETIME('2020-09-16 10:20:30') = TIME('09:07:00')", "dt_t_f", false), + $("TIME('09:07:00') = DATETIME('1984-12-15 22:15:07')", "t_dt_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareEqDateWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATE('2020-09-16') = TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), - $("TIMESTAMP('2020-09-16 00:00:00') = DATE('2020-09-16')", "ts_d_t", true), - $("DATE('2020-09-16') = TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), - $("TIMESTAMP('1984-12-15 09:07:00') = DATE('1984-12-15')", "ts_d_f", false), - $("DATE('2020-09-16') = DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') = DATE('2020-09-16')", "dt_d_t", true), - $("DATE('1961-04-12') = DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('1961-04-12 10:20:30') = DATE('1961-04-12')", "dt_d_f", false), - $("DATE('" + today + "') = TIME('00:00:00')", "d_t_t", true), - $("TIME('00:00:00') = DATE('" + today + "')", "t_d_t", true), - $("DATE('2020-09-16') = TIME('09:07:00')", "d_t_f", false), - $("TIME('09:07:00') = DATE('" + today + "')", "t_d_f", false) - )); + return Arrays.asList( + $$( + $("DATE('2020-09-16') = TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), + $("TIMESTAMP('2020-09-16 00:00:00') = DATE('2020-09-16')", "ts_d_t", true), + $("DATE('2020-09-16') = TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), + $("TIMESTAMP('1984-12-15 09:07:00') = DATE('1984-12-15')", "ts_d_f", false), + $("DATE('2020-09-16') = DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), + $("DATETIME('2020-09-16 00:00:00') = DATE('2020-09-16')", "dt_d_t", true), + $("DATE('1961-04-12') = DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), + $("DATETIME('1961-04-12 10:20:30') = DATE('1961-04-12')", "dt_d_f", false), + $("DATE('" + today + "') = TIME('00:00:00')", "d_t_t", true), + $("TIME('00:00:00') = DATE('" + today + "')", "t_d_t", true), + $("DATE('2020-09-16') = TIME('09:07:00')", "d_t_f", false), + $("TIME('09:07:00') = DATE('" + today + "')", "t_d_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareEqTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIME('10:20:30') = DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('" + today + " 10:20:30') = TIME('10:20:30')", "dt_t_t", true), - $("TIME('09:07:00') = DATETIME('1961-04-12 09:07:00')", "t_dt_f", false), - $("DATETIME('" + today + " 09:07:00') = TIME('10:20:30')", "dt_t_f", false), - $("TIME('10:20:30') = TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), - $("TIMESTAMP('" + today + " 10:20:30') = TIME('10:20:30')", "ts_t_t", true), - $("TIME('22:15:07') = TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false), - $("TIMESTAMP('1984-12-15 10:20:30') = TIME('10:20:30')", "ts_t_f", false), - $("TIME('00:00:00') = DATE('" + today + "')", "t_d_t", true), - $("DATE('" + today + "') = TIME('00:00:00')", "d_t_t", true), - $("TIME('09:07:00') = DATE('" + today + "')", "t_d_f", false), - $("DATE('2020-09-16') = TIME('09:07:00')", "d_t_f", false) - )); + return Arrays.asList( + $$( + $("TIME('10:20:30') = DATETIME('" + today + " 10:20:30')", "t_dt_t", true), + $("DATETIME('" + today + " 10:20:30') = TIME('10:20:30')", "dt_t_t", true), + $("TIME('09:07:00') = DATETIME('1961-04-12 09:07:00')", "t_dt_f", false), + $("DATETIME('" + today + " 09:07:00') = TIME('10:20:30')", "dt_t_f", false), + $("TIME('10:20:30') = TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), + $("TIMESTAMP('" + today + " 10:20:30') = TIME('10:20:30')", "ts_t_t", true), + $("TIME('22:15:07') = TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false), + $("TIMESTAMP('1984-12-15 10:20:30') = TIME('10:20:30')", "ts_t_f", false), + $("TIME('00:00:00') = DATE('" + today + "')", "t_d_t", true), + $("DATE('" + today + "') = TIME('00:00:00')", "d_t_t", true), + $("TIME('09:07:00') = DATE('" + today + "')", "t_d_f", false), + $("DATE('2020-09-16') = TIME('09:07:00')", "d_t_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareNeqTimestampWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIMESTAMP('2020-09-16 10:20:30') != DATETIME('1961-04-12 09:07:00')", "ts_dt_t", true), - $("DATETIME('1961-04-12 09:07:00') != TIMESTAMP('1984-12-15 22:15:07')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') != DATETIME('2020-09-16 10:20:30')", "ts_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') != TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') != DATE('1961-04-12')", "ts_d_t", true), - $("DATE('1961-04-12') != TIMESTAMP('1984-12-15 22:15:07')", "d_ts_t", true), - $("TIMESTAMP('2020-09-16 00:00:00') != DATE('2020-09-16')", "ts_d_f", false), - $("DATE('2020-09-16') != TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') != TIME('09:07:00')", "ts_t_t", true), - $("TIME('09:07:00') != TIMESTAMP('1984-12-15 22:15:07')", "t_ts_t", true), - $("TIMESTAMP('" + today + " 10:20:30') != TIME('10:20:30')", "ts_t_f", false), - $("TIME('10:20:30') != TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false) - )); + return Arrays.asList( + $$( + $( + "TIMESTAMP('2020-09-16 10:20:30') != DATETIME('1961-04-12 09:07:00')", + "ts_dt_t", + true), + $( + "DATETIME('1961-04-12 09:07:00') != TIMESTAMP('1984-12-15 22:15:07')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') != DATETIME('2020-09-16 10:20:30')", + "ts_dt_f", + false), + $( + "DATETIME('2020-09-16 10:20:30') != TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_f", + false), + $("TIMESTAMP('2020-09-16 10:20:30') != DATE('1961-04-12')", "ts_d_t", true), + $("DATE('1961-04-12') != TIMESTAMP('1984-12-15 22:15:07')", "d_ts_t", true), + $("TIMESTAMP('2020-09-16 00:00:00') != DATE('2020-09-16')", "ts_d_f", false), + $("DATE('2020-09-16') != TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), + $("TIMESTAMP('2020-09-16 10:20:30') != TIME('09:07:00')", "ts_t_t", true), + $("TIME('09:07:00') != TIMESTAMP('1984-12-15 22:15:07')", "t_ts_t", true), + $("TIMESTAMP('" + today + " 10:20:30') != TIME('10:20:30')", "ts_t_f", false), + $("TIME('10:20:30') != TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareNeqDateTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATETIME('2020-09-16 10:20:30') != TIMESTAMP('1961-04-12 09:07:00')", "dt_ts_t", true), - $("TIMESTAMP('1961-04-12 09:07:00') != DATETIME('1984-12-15 22:15:07')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') != TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') != DATETIME('2020-09-16 10:20:30')", "ts_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') != DATE('1961-04-12')", "dt_d_t", true), - $("DATE('1961-04-12') != DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') != DATE('2020-09-16')", "dt_d_f", false), - $("DATE('2020-09-16') != DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') != TIME('09:07:00')", "dt_t_t", true), - $("TIME('09:07:00') != DATETIME('1984-12-15 22:15:07')", "t_dt_t", true), - $("DATETIME('" + today + " 10:20:30') != TIME('10:20:30')", "dt_t_f", false), - $("TIME('10:20:30') != DATETIME('" + today + " 10:20:30')", "t_dt_f", false) - )); + return Arrays.asList( + $$( + $( + "DATETIME('2020-09-16 10:20:30') != TIMESTAMP('1961-04-12 09:07:00')", + "dt_ts_t", + true), + $( + "TIMESTAMP('1961-04-12 09:07:00') != DATETIME('1984-12-15 22:15:07')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') != TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_f", + false), + $( + "TIMESTAMP('2020-09-16 10:20:30') != DATETIME('2020-09-16 10:20:30')", + "ts_dt_f", + false), + $("DATETIME('2020-09-16 10:20:30') != DATE('1961-04-12')", "dt_d_t", true), + $("DATE('1961-04-12') != DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), + $("DATETIME('2020-09-16 00:00:00') != DATE('2020-09-16')", "dt_d_f", false), + $("DATE('2020-09-16') != DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), + $("DATETIME('2020-09-16 10:20:30') != TIME('09:07:00')", "dt_t_t", true), + $("TIME('09:07:00') != DATETIME('1984-12-15 22:15:07')", "t_dt_t", true), + $("DATETIME('" + today + " 10:20:30') != TIME('10:20:30')", "dt_t_f", false), + $("TIME('10:20:30') != DATETIME('" + today + " 10:20:30')", "t_dt_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareNeqDateWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATE('2020-09-16') != TIMESTAMP('1961-04-12 09:07:00')", "d_ts_t", true), - $("TIMESTAMP('1984-12-15 09:07:00') != DATE('1984-12-15')", "ts_d_t", true), - $("DATE('2020-09-16') != TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), - $("TIMESTAMP('2020-09-16 00:00:00') != DATE('2020-09-16')", "ts_d_f", false), - $("DATE('1961-04-12') != DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('1961-04-12 10:20:30') != DATE('1961-04-12')", "dt_d_t", true), - $("DATE('2020-09-16') != DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') != DATE('2020-09-16')", "dt_d_f", false), - $("DATE('2020-09-16') != TIME('09:07:00')", "d_t_t", true), - $("TIME('09:07:00') != DATE('" + today + "')", "t_d_t", true), - $("DATE('" + today + "') != TIME('00:00:00')", "d_t_f", false), - $("TIME('00:00:00') != DATE('" + today + "')", "t_d_f", false) - )); + return Arrays.asList( + $$( + $("DATE('2020-09-16') != TIMESTAMP('1961-04-12 09:07:00')", "d_ts_t", true), + $("TIMESTAMP('1984-12-15 09:07:00') != DATE('1984-12-15')", "ts_d_t", true), + $("DATE('2020-09-16') != TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), + $("TIMESTAMP('2020-09-16 00:00:00') != DATE('2020-09-16')", "ts_d_f", false), + $("DATE('1961-04-12') != DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), + $("DATETIME('1961-04-12 10:20:30') != DATE('1961-04-12')", "dt_d_t", true), + $("DATE('2020-09-16') != DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), + $("DATETIME('2020-09-16 00:00:00') != DATE('2020-09-16')", "dt_d_f", false), + $("DATE('2020-09-16') != TIME('09:07:00')", "d_t_t", true), + $("TIME('09:07:00') != DATE('" + today + "')", "t_d_t", true), + $("DATE('" + today + "') != TIME('00:00:00')", "d_t_f", false), + $("TIME('00:00:00') != DATE('" + today + "')", "t_d_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareNeqTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIME('09:07:00') != DATETIME('1961-04-12 09:07:00')", "t_dt_t", true), - $("DATETIME('" + today + " 09:07:00') != TIME('10:20:30')", "dt_t_t", true), - $("TIME('10:20:30') != DATETIME('" + today + " 10:20:30')", "t_dt_f", false), - $("DATETIME('" + today + " 10:20:30') != TIME('10:20:30')", "dt_t_f", false), - $("TIME('22:15:07') != TIMESTAMP('1984-12-15 22:15:07')", "t_ts_t", true), - $("TIMESTAMP('1984-12-15 10:20:30') != TIME('10:20:30')", "ts_t_t", true), - $("TIME('10:20:30') != TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), - $("TIMESTAMP('" + today + " 10:20:30') != TIME('10:20:30')", "ts_t_f", false), - $("TIME('09:07:00') != DATE('" + today + "')", "t_d_t", true), - $("DATE('2020-09-16') != TIME('09:07:00')", "d_t_t", true), - $("TIME('00:00:00') != DATE('" + today + "')", "t_d_f", false), - $("DATE('" + today + "') != TIME('00:00:00')", "d_t_f", false) - )); + return Arrays.asList( + $$( + $("TIME('09:07:00') != DATETIME('1961-04-12 09:07:00')", "t_dt_t", true), + $("DATETIME('" + today + " 09:07:00') != TIME('10:20:30')", "dt_t_t", true), + $("TIME('10:20:30') != DATETIME('" + today + " 10:20:30')", "t_dt_f", false), + $("DATETIME('" + today + " 10:20:30') != TIME('10:20:30')", "dt_t_f", false), + $("TIME('22:15:07') != TIMESTAMP('1984-12-15 22:15:07')", "t_ts_t", true), + $("TIMESTAMP('1984-12-15 10:20:30') != TIME('10:20:30')", "ts_t_t", true), + $("TIME('10:20:30') != TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), + $("TIMESTAMP('" + today + " 10:20:30') != TIME('10:20:30')", "ts_t_f", false), + $("TIME('09:07:00') != DATE('" + today + "')", "t_d_t", true), + $("DATE('2020-09-16') != TIME('09:07:00')", "d_t_t", true), + $("TIME('00:00:00') != DATE('" + today + "')", "t_d_f", false), + $("DATE('" + today + "') != TIME('00:00:00')", "d_t_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLtTimestampWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIMESTAMP('2020-09-16 10:20:30') < DATETIME('2061-04-12 09:07:00')", "ts_dt_t", true), - $("DATETIME('1961-04-12 09:07:00') < TIMESTAMP('1984-12-15 22:15:07')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') < DATETIME('2020-09-16 10:20:30')", "ts_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') < TIMESTAMP('1961-04-12 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') < DATE('2077-04-12')", "ts_d_t", true), - $("DATE('1961-04-12') < TIMESTAMP('1984-12-15 22:15:07')", "d_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') < DATE('1961-04-12')", "ts_d_f", false), - $("DATE('2020-09-16') < TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') < TIME('09:07:00')", "ts_t_t", true), - $("TIME('09:07:00') < TIMESTAMP('3077-12-15 22:15:07')", "t_ts_t", true), - $("TIMESTAMP('" + today + " 10:20:30') < TIME('10:20:30')", "ts_t_f", false), - $("TIME('20:50:40') < TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false) - )); + return Arrays.asList( + $$( + $( + "TIMESTAMP('2020-09-16 10:20:30') < DATETIME('2061-04-12 09:07:00')", + "ts_dt_t", + true), + $( + "DATETIME('1961-04-12 09:07:00') < TIMESTAMP('1984-12-15 22:15:07')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') < DATETIME('2020-09-16 10:20:30')", + "ts_dt_f", + false), + $( + "DATETIME('2020-09-16 10:20:30') < TIMESTAMP('1961-04-12 09:07:00')", + "dt_ts_f", + false), + $("TIMESTAMP('2020-09-16 10:20:30') < DATE('2077-04-12')", "ts_d_t", true), + $("DATE('1961-04-12') < TIMESTAMP('1984-12-15 22:15:07')", "d_ts_t", true), + $("TIMESTAMP('2020-09-16 10:20:30') < DATE('1961-04-12')", "ts_d_f", false), + $("DATE('2020-09-16') < TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), + $("TIMESTAMP('2020-09-16 10:20:30') < TIME('09:07:00')", "ts_t_t", true), + $("TIME('09:07:00') < TIMESTAMP('3077-12-15 22:15:07')", "t_ts_t", true), + $("TIMESTAMP('" + today + " 10:20:30') < TIME('10:20:30')", "ts_t_f", false), + $("TIME('20:50:40') < TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLtDateTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATETIME('2020-09-16 10:20:30') < TIMESTAMP('2077-04-12 09:07:00')", "dt_ts_t", true), - $("TIMESTAMP('1961-04-12 09:07:00') < DATETIME('1984-12-15 22:15:07')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') < TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') < DATETIME('1984-12-15 22:15:07')", "ts_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') < DATE('3077-04-12')", "dt_d_t", true), - $("DATE('1961-04-12') < DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') < DATE('2020-09-16')", "dt_d_f", false), - $("DATE('2020-09-16') < DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') < TIME('09:07:00')", "dt_t_t", true), - $("TIME('09:07:00') < DATETIME('3077-12-15 22:15:07')", "t_dt_t", true), - $("DATETIME('" + today + " 10:20:30') < TIME('10:20:30')", "dt_t_f", false), - $("TIME('20:40:50') < DATETIME('" + today + " 10:20:30')", "t_dt_f", false) - )); + return Arrays.asList( + $$( + $( + "DATETIME('2020-09-16 10:20:30') < TIMESTAMP('2077-04-12 09:07:00')", + "dt_ts_t", + true), + $( + "TIMESTAMP('1961-04-12 09:07:00') < DATETIME('1984-12-15 22:15:07')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') < TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_f", + false), + $( + "TIMESTAMP('2020-09-16 10:20:30') < DATETIME('1984-12-15 22:15:07')", + "ts_dt_f", + false), + $("DATETIME('2020-09-16 10:20:30') < DATE('3077-04-12')", "dt_d_t", true), + $("DATE('1961-04-12') < DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), + $("DATETIME('2020-09-16 00:00:00') < DATE('2020-09-16')", "dt_d_f", false), + $("DATE('2020-09-16') < DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), + $("DATETIME('2020-09-16 10:20:30') < TIME('09:07:00')", "dt_t_t", true), + $("TIME('09:07:00') < DATETIME('3077-12-15 22:15:07')", "t_dt_t", true), + $("DATETIME('" + today + " 10:20:30') < TIME('10:20:30')", "dt_t_f", false), + $("TIME('20:40:50') < DATETIME('" + today + " 10:20:30')", "t_dt_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLtDateWithOtherTypes() { - return Arrays.asList($$( - $("DATE('2020-09-16') < TIMESTAMP('3077-04-12 09:07:00')", "d_ts_t", true), - $("TIMESTAMP('1961-04-12 09:07:00') < DATE('1984-12-15')", "ts_d_t", true), - $("DATE('2020-09-16') < TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), - $("TIMESTAMP('2077-04-12 09:07:00') < DATE('2020-09-16')", "ts_d_f", false), - $("DATE('1961-04-12') < DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('1961-04-12 10:20:30') < DATE('1984-11-15')", "dt_d_t", true), - $("DATE('2020-09-16') < DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') < DATE('1984-03-22')", "dt_d_f", false), - $("DATE('2020-09-16') < TIME('09:07:00')", "d_t_t", true), - $("TIME('09:07:00') < DATE('3077-04-12')", "t_d_t", true), - $("DATE('3077-04-12') < TIME('00:00:00')", "d_t_f", false), - $("TIME('00:00:00') < DATE('2020-09-16')", "t_d_f", false) - )); + return Arrays.asList( + $$( + $("DATE('2020-09-16') < TIMESTAMP('3077-04-12 09:07:00')", "d_ts_t", true), + $("TIMESTAMP('1961-04-12 09:07:00') < DATE('1984-12-15')", "ts_d_t", true), + $("DATE('2020-09-16') < TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), + $("TIMESTAMP('2077-04-12 09:07:00') < DATE('2020-09-16')", "ts_d_f", false), + $("DATE('1961-04-12') < DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), + $("DATETIME('1961-04-12 10:20:30') < DATE('1984-11-15')", "dt_d_t", true), + $("DATE('2020-09-16') < DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), + $("DATETIME('2020-09-16 00:00:00') < DATE('1984-03-22')", "dt_d_f", false), + $("DATE('2020-09-16') < TIME('09:07:00')", "d_t_t", true), + $("TIME('09:07:00') < DATE('3077-04-12')", "t_d_t", true), + $("DATE('3077-04-12') < TIME('00:00:00')", "d_t_f", false), + $("TIME('00:00:00') < DATE('2020-09-16')", "t_d_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLtTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIME('09:07:00') < DATETIME('3077-04-12 09:07:00')", "t_dt_t", true), - $("DATETIME('" + today + " 09:07:00') < TIME('10:20:30')", "dt_t_t", true), - $("TIME('10:20:30') < DATETIME('" + today + " 10:20:30')", "t_dt_f", false), - $("DATETIME('" + today + " 20:40:50') < TIME('10:20:30')", "dt_t_f", false), - $("TIME('22:15:07') < TIMESTAMP('3077-12-15 22:15:07')", "t_ts_t", true), - $("TIMESTAMP('1984-12-15 10:20:30') < TIME('10:20:30')", "ts_t_t", true), - $("TIME('10:20:30') < TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), - $("TIMESTAMP('" + today + " 20:50:42') < TIME('10:20:30')", "ts_t_f", false), - $("TIME('09:07:00') < DATE('3077-04-12')", "t_d_t", true), - $("DATE('2020-09-16') < TIME('09:07:00')", "d_t_t", true), - $("TIME('00:00:00') < DATE('1961-04-12')", "t_d_f", false), - $("DATE('3077-04-12') < TIME('10:20:30')", "d_t_f", false) - )); + return Arrays.asList( + $$( + $("TIME('09:07:00') < DATETIME('3077-04-12 09:07:00')", "t_dt_t", true), + $("DATETIME('" + today + " 09:07:00') < TIME('10:20:30')", "dt_t_t", true), + $("TIME('10:20:30') < DATETIME('" + today + " 10:20:30')", "t_dt_f", false), + $("DATETIME('" + today + " 20:40:50') < TIME('10:20:30')", "dt_t_f", false), + $("TIME('22:15:07') < TIMESTAMP('3077-12-15 22:15:07')", "t_ts_t", true), + $("TIMESTAMP('1984-12-15 10:20:30') < TIME('10:20:30')", "ts_t_t", true), + $("TIME('10:20:30') < TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), + $("TIMESTAMP('" + today + " 20:50:42') < TIME('10:20:30')", "ts_t_f", false), + $("TIME('09:07:00') < DATE('3077-04-12')", "t_d_t", true), + $("DATE('2020-09-16') < TIME('09:07:00')", "d_t_t", true), + $("TIME('00:00:00') < DATE('1961-04-12')", "t_d_f", false), + $("DATE('3077-04-12') < TIME('10:20:30')", "d_t_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGtTimestampWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIMESTAMP('2020-09-16 10:20:30') > DATETIME('2020-09-16 10:20:25')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') > TIMESTAMP('1961-04-12 09:07:00')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') > DATETIME('2061-04-12 09:07:00')", "ts_dt_f", false), - $("DATETIME('1961-04-12 09:07:00') > TIMESTAMP('1984-12-15 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') > DATE('1961-04-12')", "ts_d_t", true), - $("DATE('2020-09-16') > TIMESTAMP('2020-09-15 22:15:07')", "d_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') > DATE('2077-04-12')", "ts_d_f", false), - $("DATE('1961-04-12') > TIMESTAMP('1961-04-12 00:00:00')", "d_ts_f", false), - $("TIMESTAMP('3077-07-08 20:20:30') > TIME('10:20:30')", "ts_t_t", true), - $("TIME('20:50:40') > TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), - $("TIMESTAMP('" + today + " 10:20:30') > TIME('10:20:30')", "ts_t_f", false), - $("TIME('09:07:00') > TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false) - )); + return Arrays.asList( + $$( + $( + "TIMESTAMP('2020-09-16 10:20:30') > DATETIME('2020-09-16 10:20:25')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') > TIMESTAMP('1961-04-12 09:07:00')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') > DATETIME('2061-04-12 09:07:00')", + "ts_dt_f", + false), + $( + "DATETIME('1961-04-12 09:07:00') > TIMESTAMP('1984-12-15 09:07:00')", + "dt_ts_f", + false), + $("TIMESTAMP('2020-09-16 10:20:30') > DATE('1961-04-12')", "ts_d_t", true), + $("DATE('2020-09-16') > TIMESTAMP('2020-09-15 22:15:07')", "d_ts_t", true), + $("TIMESTAMP('2020-09-16 10:20:30') > DATE('2077-04-12')", "ts_d_f", false), + $("DATE('1961-04-12') > TIMESTAMP('1961-04-12 00:00:00')", "d_ts_f", false), + $("TIMESTAMP('3077-07-08 20:20:30') > TIME('10:20:30')", "ts_t_t", true), + $("TIME('20:50:40') > TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), + $("TIMESTAMP('" + today + " 10:20:30') > TIME('10:20:30')", "ts_t_f", false), + $("TIME('09:07:00') > TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGtDateTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATETIME('2020-09-16 10:20:31') > TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') > DATETIME('1984-12-15 22:15:07')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') > TIMESTAMP('2077-04-12 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('1961-04-12 09:07:00') > DATETIME('1961-04-12 09:07:00')", "ts_dt_f", false), - $("DATETIME('3077-04-12 10:20:30') > DATE('2020-09-16')", "dt_d_t", true), - $("DATE('2020-09-16') > DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') > DATE('2020-09-16')", "dt_d_f", false), - $("DATE('1961-04-12') > DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('3077-04-12 10:20:30') > TIME('09:07:00')", "dt_t_t", true), - $("TIME('20:40:50') > DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('" + today + " 10:20:30') > TIME('10:20:30')", "dt_t_f", false), - $("TIME('09:07:00') > DATETIME('3077-12-15 22:15:07')", "t_dt_f", false) - )); + return Arrays.asList( + $$( + $( + "DATETIME('2020-09-16 10:20:31') > TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') > DATETIME('1984-12-15 22:15:07')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') > TIMESTAMP('2077-04-12 09:07:00')", + "dt_ts_f", + false), + $( + "TIMESTAMP('1961-04-12 09:07:00') > DATETIME('1961-04-12 09:07:00')", + "ts_dt_f", + false), + $("DATETIME('3077-04-12 10:20:30') > DATE('2020-09-16')", "dt_d_t", true), + $("DATE('2020-09-16') > DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), + $("DATETIME('2020-09-16 00:00:00') > DATE('2020-09-16')", "dt_d_f", false), + $("DATE('1961-04-12') > DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), + $("DATETIME('3077-04-12 10:20:30') > TIME('09:07:00')", "dt_t_t", true), + $("TIME('20:40:50') > DATETIME('" + today + " 10:20:30')", "t_dt_t", true), + $("DATETIME('" + today + " 10:20:30') > TIME('10:20:30')", "dt_t_f", false), + $("TIME('09:07:00') > DATETIME('3077-12-15 22:15:07')", "t_dt_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGtDateWithOtherTypes() { - return Arrays.asList($$( - $("DATE('2020-09-16') > TIMESTAMP('1961-04-12 09:07:00')", "d_ts_t", true), - $("TIMESTAMP('2077-04-12 09:07:00') > DATE('2020-09-16')", "ts_d_t", true), - $("DATE('2020-09-16') > TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), - $("TIMESTAMP('1961-04-12 09:07:00') > DATE('1984-12-15')", "ts_d_f", false), - $("DATE('1984-12-15') > DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') > DATE('1984-03-22')", "dt_d_t", true), - $("DATE('2020-09-16') > DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), - $("DATETIME('1961-04-12 10:20:30') > DATE('1984-11-15')", "dt_d_f", false), - $("DATE('3077-04-12') > TIME('00:00:00')", "d_t_t", true), - $("TIME('00:00:00') > DATE('2020-09-16')", "t_d_t", true), - $("DATE('2020-09-16') > TIME('09:07:00')", "d_t_f", false), - $("TIME('09:07:00') > DATE('3077-04-12')", "t_d_f", false) - )); + return Arrays.asList( + $$( + $("DATE('2020-09-16') > TIMESTAMP('1961-04-12 09:07:00')", "d_ts_t", true), + $("TIMESTAMP('2077-04-12 09:07:00') > DATE('2020-09-16')", "ts_d_t", true), + $("DATE('2020-09-16') > TIMESTAMP('2020-09-16 00:00:00')", "d_ts_f", false), + $("TIMESTAMP('1961-04-12 09:07:00') > DATE('1984-12-15')", "ts_d_f", false), + $("DATE('1984-12-15') > DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), + $("DATETIME('2020-09-16 00:00:00') > DATE('1984-03-22')", "dt_d_t", true), + $("DATE('2020-09-16') > DATETIME('2020-09-16 00:00:00')", "d_dt_f", false), + $("DATETIME('1961-04-12 10:20:30') > DATE('1984-11-15')", "dt_d_f", false), + $("DATE('3077-04-12') > TIME('00:00:00')", "d_t_t", true), + $("TIME('00:00:00') > DATE('2020-09-16')", "t_d_t", true), + $("DATE('2020-09-16') > TIME('09:07:00')", "d_t_f", false), + $("TIME('09:07:00') > DATE('3077-04-12')", "t_d_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGtTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIME('09:07:00') > DATETIME('1961-04-12 09:07:00')", "t_dt_t", true), - $("DATETIME('" + today + " 20:40:50') > TIME('10:20:30')", "dt_t_t", true), - $("TIME('10:20:30') > DATETIME('" + today + " 10:20:30')", "t_dt_f", false), - $("DATETIME('" + today + " 09:07:00') > TIME('10:20:30')", "dt_t_f", false), - $("TIME('22:15:07') > TIMESTAMP('1984-12-15 22:15:07')", "t_ts_t", true), - $("TIMESTAMP('" + today + " 20:50:42') > TIME('10:20:30')", "ts_t_t", true), - $("TIME('10:20:30') > TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), - $("TIMESTAMP('1984-12-15 10:20:30') > TIME('10:20:30')", "ts_t_f", false), - $("TIME('00:00:00') > DATE('1961-04-12')", "t_d_t", true), - $("DATE('3077-04-12') > TIME('10:20:30')", "d_t_t", true), - $("TIME('09:07:00') > DATE('3077-04-12')", "t_d_f", false), - $("DATE('2020-09-16') > TIME('09:07:00')", "d_t_f", false) - )); + return Arrays.asList( + $$( + $("TIME('09:07:00') > DATETIME('1961-04-12 09:07:00')", "t_dt_t", true), + $("DATETIME('" + today + " 20:40:50') > TIME('10:20:30')", "dt_t_t", true), + $("TIME('10:20:30') > DATETIME('" + today + " 10:20:30')", "t_dt_f", false), + $("DATETIME('" + today + " 09:07:00') > TIME('10:20:30')", "dt_t_f", false), + $("TIME('22:15:07') > TIMESTAMP('1984-12-15 22:15:07')", "t_ts_t", true), + $("TIMESTAMP('" + today + " 20:50:42') > TIME('10:20:30')", "ts_t_t", true), + $("TIME('10:20:30') > TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false), + $("TIMESTAMP('1984-12-15 10:20:30') > TIME('10:20:30')", "ts_t_f", false), + $("TIME('00:00:00') > DATE('1961-04-12')", "t_d_t", true), + $("DATE('3077-04-12') > TIME('10:20:30')", "d_t_t", true), + $("TIME('09:07:00') > DATE('3077-04-12')", "t_d_f", false), + $("DATE('2020-09-16') > TIME('09:07:00')", "d_t_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLteTimestampWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('2020-09-16 10:20:30')", "ts_dt_t", true), - $("DATETIME('1961-04-12 09:07:00') <= TIMESTAMP('1984-12-15 22:15:07')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('1961-04-12 09:07:00')", "ts_dt_f", false), - $("DATETIME('2020-09-16 10:20:30') <= TIMESTAMP('1961-04-12 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') <= DATE('2077-04-12')", "ts_d_t", true), - $("DATE('2020-09-16') <= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') <= DATE('1961-04-12')", "ts_d_f", false), - $("DATE('2077-04-12') <= TIMESTAMP('1984-12-15 22:15:07')", "d_ts_f", false), - $("TIMESTAMP('" + today + " 10:20:30') <= TIME('10:20:30')", "ts_t_t", true), - $("TIME('09:07:00') <= TIMESTAMP('3077-12-15 22:15:07')", "t_ts_t", true), - $("TIMESTAMP('3077-09-16 10:20:30') <= TIME('09:07:00')", "ts_t_f", false), - $("TIME('20:50:40') <= TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false) - )); + return Arrays.asList( + $$( + $( + "TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('2020-09-16 10:20:30')", + "ts_dt_t", + true), + $( + "DATETIME('1961-04-12 09:07:00') <= TIMESTAMP('1984-12-15 22:15:07')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('1961-04-12 09:07:00')", + "ts_dt_f", + false), + $( + "DATETIME('2020-09-16 10:20:30') <= TIMESTAMP('1961-04-12 09:07:00')", + "dt_ts_f", + false), + $("TIMESTAMP('2020-09-16 10:20:30') <= DATE('2077-04-12')", "ts_d_t", true), + $("DATE('2020-09-16') <= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), + $("TIMESTAMP('2020-09-16 10:20:30') <= DATE('1961-04-12')", "ts_d_f", false), + $("DATE('2077-04-12') <= TIMESTAMP('1984-12-15 22:15:07')", "d_ts_f", false), + $("TIMESTAMP('" + today + " 10:20:30') <= TIME('10:20:30')", "ts_t_t", true), + $("TIME('09:07:00') <= TIMESTAMP('3077-12-15 22:15:07')", "t_ts_t", true), + $("TIMESTAMP('3077-09-16 10:20:30') <= TIME('09:07:00')", "ts_t_f", false), + $("TIME('20:50:40') <= TIMESTAMP('" + today + " 10:20:30')", "t_ts_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLteDateTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATETIME('2020-09-16 10:20:30') <= TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_t", true), - $("TIMESTAMP('1961-04-12 09:07:00') <= DATETIME('1984-12-15 22:15:07')", "ts_dt_t", true), - $("DATETIME('3077-09-16 10:20:30') <= TIMESTAMP('2077-04-12 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('1984-12-15 22:15:07')", "ts_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') <= DATE('2020-09-16')", "dt_d_t", true), - $("DATE('1961-04-12') <= DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') <= DATE('1984-04-12')", "dt_d_f", false), - $("DATE('2020-09-16') <= DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), - $("DATETIME('" + today + " 10:20:30') <= TIME('10:20:30')", "dt_t_t", true), - $("TIME('09:07:00') <= DATETIME('3077-12-15 22:15:07')", "t_dt_t", true), - $("DATETIME('3077-09-16 10:20:30') <= TIME('19:07:00')", "dt_t_f", false), - $("TIME('20:40:50') <= DATETIME('" + today + " 10:20:30')", "t_dt_f", false) - )); + return Arrays.asList( + $$( + $( + "DATETIME('2020-09-16 10:20:30') <= TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_t", + true), + $( + "TIMESTAMP('1961-04-12 09:07:00') <= DATETIME('1984-12-15 22:15:07')", + "ts_dt_t", + true), + $( + "DATETIME('3077-09-16 10:20:30') <= TIMESTAMP('2077-04-12 09:07:00')", + "dt_ts_f", + false), + $( + "TIMESTAMP('2020-09-16 10:20:30') <= DATETIME('1984-12-15 22:15:07')", + "ts_dt_f", + false), + $("DATETIME('2020-09-16 00:00:00') <= DATE('2020-09-16')", "dt_d_t", true), + $("DATE('1961-04-12') <= DATETIME('1984-12-15 22:15:07')", "d_dt_t", true), + $("DATETIME('2020-09-16 10:20:30') <= DATE('1984-04-12')", "dt_d_f", false), + $("DATE('2020-09-16') <= DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), + $("DATETIME('" + today + " 10:20:30') <= TIME('10:20:30')", "dt_t_t", true), + $("TIME('09:07:00') <= DATETIME('3077-12-15 22:15:07')", "t_dt_t", true), + $("DATETIME('3077-09-16 10:20:30') <= TIME('19:07:00')", "dt_t_f", false), + $("TIME('20:40:50') <= DATETIME('" + today + " 10:20:30')", "t_dt_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLteDateWithOtherTypes() { - return Arrays.asList($$( - $("DATE('2020-09-16') <= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), - $("TIMESTAMP('1961-04-12 09:07:00') <= DATE('1984-12-15')", "ts_d_t", true), - $("DATE('2020-09-16') <= TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), - $("TIMESTAMP('2077-04-12 09:07:00') <= DATE('2020-09-16')", "ts_d_f", false), - $("DATE('2020-09-16') <= DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), - $("DATETIME('1961-04-12 10:20:30') <= DATE('1984-11-15')", "dt_d_t", true), - $("DATE('2077-04-12') <= DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') <= DATE('1984-03-22')", "dt_d_f", false), - $("DATE('2020-09-16') <= TIME('09:07:00')", "d_t_t", true), - $("TIME('09:07:00') <= DATE('3077-04-12')", "t_d_t", true), - $("DATE('3077-04-12') <= TIME('00:00:00')", "d_t_f", false), - $("TIME('00:00:00') <= DATE('2020-09-16')", "t_d_f", false) - )); + return Arrays.asList( + $$( + $("DATE('2020-09-16') <= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), + $("TIMESTAMP('1961-04-12 09:07:00') <= DATE('1984-12-15')", "ts_d_t", true), + $("DATE('2020-09-16') <= TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), + $("TIMESTAMP('2077-04-12 09:07:00') <= DATE('2020-09-16')", "ts_d_f", false), + $("DATE('2020-09-16') <= DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), + $("DATETIME('1961-04-12 10:20:30') <= DATE('1984-11-15')", "dt_d_t", true), + $("DATE('2077-04-12') <= DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), + $("DATETIME('2020-09-16 00:00:00') <= DATE('1984-03-22')", "dt_d_f", false), + $("DATE('2020-09-16') <= TIME('09:07:00')", "d_t_t", true), + $("TIME('09:07:00') <= DATE('3077-04-12')", "t_d_t", true), + $("DATE('3077-04-12') <= TIME('00:00:00')", "d_t_f", false), + $("TIME('00:00:00') <= DATE('2020-09-16')", "t_d_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareLteTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIME('10:20:30') <= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('" + today + " 09:07:00') <= TIME('10:20:30')", "dt_t_t", true), - $("TIME('09:07:00') <= DATETIME('1961-04-12 09:07:00')", "t_dt_f", false), - $("DATETIME('" + today + " 20:40:50') <= TIME('10:20:30')", "dt_t_f", false), - $("TIME('10:20:30') <= TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), - $("TIMESTAMP('1984-12-15 10:20:30') <= TIME('10:20:30')", "ts_t_t", true), - $("TIME('22:15:07') <= TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false), - $("TIMESTAMP('" + today + " 20:50:42') <= TIME('10:20:30')", "ts_t_f", false), - $("TIME('09:07:00') <= DATE('3077-04-12')", "t_d_t", true), - $("DATE('2020-09-16') <= TIME('09:07:00')", "d_t_t", true), - $("TIME('00:00:00') <= DATE('1961-04-12')", "t_d_f", false), - $("DATE('3077-04-12') <= TIME('10:20:30')", "d_t_f", false) - )); + return Arrays.asList( + $$( + $("TIME('10:20:30') <= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), + $("DATETIME('" + today + " 09:07:00') <= TIME('10:20:30')", "dt_t_t", true), + $("TIME('09:07:00') <= DATETIME('1961-04-12 09:07:00')", "t_dt_f", false), + $("DATETIME('" + today + " 20:40:50') <= TIME('10:20:30')", "dt_t_f", false), + $("TIME('10:20:30') <= TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), + $("TIMESTAMP('1984-12-15 10:20:30') <= TIME('10:20:30')", "ts_t_t", true), + $("TIME('22:15:07') <= TIMESTAMP('1984-12-15 22:15:07')", "t_ts_f", false), + $("TIMESTAMP('" + today + " 20:50:42') <= TIME('10:20:30')", "ts_t_f", false), + $("TIME('09:07:00') <= DATE('3077-04-12')", "t_d_t", true), + $("DATE('2020-09-16') <= TIME('09:07:00')", "d_t_t", true), + $("TIME('00:00:00') <= DATE('1961-04-12')", "t_d_f", false), + $("DATE('3077-04-12') <= TIME('10:20:30')", "d_t_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGteTimestampWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('2020-09-16 10:20:30')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('1961-04-12 09:07:00')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('2061-04-12 09:07:00')", "ts_dt_f", false), - $("DATETIME('1961-04-12 09:07:00') >= TIMESTAMP('1984-12-15 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('2020-09-16 10:20:30') >= DATE('1961-04-12')", "ts_d_t", true), - $("DATE('2020-09-16') >= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') >= DATE('2077-04-12')", "ts_d_f", false), - $("DATE('1961-04-11') >= TIMESTAMP('1961-04-12 00:00:00')", "d_ts_f", false), - $("TIMESTAMP('" + today + " 10:20:30') >= TIME('10:20:30')", "ts_t_t", true), - $("TIME('20:50:40') >= TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), - $("TIMESTAMP('1977-07-08 10:20:30') >= TIME('10:20:30')", "ts_t_f", false), - $("TIME('09:07:00') >= TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false) - )); + return Arrays.asList( + $$( + $( + "TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('2020-09-16 10:20:30')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('1961-04-12 09:07:00')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('2061-04-12 09:07:00')", + "ts_dt_f", + false), + $( + "DATETIME('1961-04-12 09:07:00') >= TIMESTAMP('1984-12-15 09:07:00')", + "dt_ts_f", + false), + $("TIMESTAMP('2020-09-16 10:20:30') >= DATE('1961-04-12')", "ts_d_t", true), + $("DATE('2020-09-16') >= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), + $("TIMESTAMP('2020-09-16 10:20:30') >= DATE('2077-04-12')", "ts_d_f", false), + $("DATE('1961-04-11') >= TIMESTAMP('1961-04-12 00:00:00')", "d_ts_f", false), + $("TIMESTAMP('" + today + " 10:20:30') >= TIME('10:20:30')", "ts_t_t", true), + $("TIME('20:50:40') >= TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), + $("TIMESTAMP('1977-07-08 10:20:30') >= TIME('10:20:30')", "ts_t_f", false), + $("TIME('09:07:00') >= TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGteDateTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('2020-09-16 10:20:30')", "dt_ts_t", true), - $("TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('1984-12-15 22:15:07')", "ts_dt_t", true), - $("DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('2077-04-12 09:07:00')", "dt_ts_f", false), - $("TIMESTAMP('1961-04-12 00:00:00') >= DATETIME('1961-04-12 09:07:00')", "ts_dt_f", false), - $("DATETIME('2020-09-16 00:00:00') >= DATE('2020-09-16')", "dt_d_t", true), - $("DATE('2020-09-16') >= DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), - $("DATETIME('1961-04-12 09:07:00') >= DATE('2020-09-16')", "dt_d_f", false), - $("DATE('1961-04-12') >= DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), - $("DATETIME('" + today + " 10:20:30') >= TIME('10:20:30')", "dt_t_t", true), - $("TIME('20:40:50') >= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('1961-04-12 09:07:00') >= TIME('09:07:00')", "dt_t_f", false), - $("TIME('09:07:00') >= DATETIME('3077-12-15 22:15:07')", "t_dt_f", false) - )); + return Arrays.asList( + $$( + $( + "DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('2020-09-16 10:20:30')", + "dt_ts_t", + true), + $( + "TIMESTAMP('2020-09-16 10:20:30') >= DATETIME('1984-12-15 22:15:07')", + "ts_dt_t", + true), + $( + "DATETIME('2020-09-16 10:20:30') >= TIMESTAMP('2077-04-12 09:07:00')", + "dt_ts_f", + false), + $( + "TIMESTAMP('1961-04-12 00:00:00') >= DATETIME('1961-04-12 09:07:00')", + "ts_dt_f", + false), + $("DATETIME('2020-09-16 00:00:00') >= DATE('2020-09-16')", "dt_d_t", true), + $("DATE('2020-09-16') >= DATETIME('1961-04-12 09:07:00')", "d_dt_t", true), + $("DATETIME('1961-04-12 09:07:00') >= DATE('2020-09-16')", "dt_d_f", false), + $("DATE('1961-04-12') >= DATETIME('1984-12-15 22:15:07')", "d_dt_f", false), + $("DATETIME('" + today + " 10:20:30') >= TIME('10:20:30')", "dt_t_t", true), + $("TIME('20:40:50') >= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), + $("DATETIME('1961-04-12 09:07:00') >= TIME('09:07:00')", "dt_t_f", false), + $("TIME('09:07:00') >= DATETIME('3077-12-15 22:15:07')", "t_dt_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGteDateWithOtherTypes() { - return Arrays.asList($$( - $("DATE('2020-09-16') >= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), - $("TIMESTAMP('2077-04-12 09:07:00') >= DATE('2020-09-16')", "ts_d_t", true), - $("DATE('1961-04-12') >= TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), - $("TIMESTAMP('1961-04-12 09:07:00') >= DATE('1984-12-15')", "ts_d_f", false), - $("DATE('2020-09-16') >= DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), - $("DATETIME('2020-09-16 00:00:00') >= DATE('1984-03-22')", "dt_d_t", true), - $("DATE('1960-12-15') >= DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), - $("DATETIME('1961-04-12 10:20:30') >= DATE('1984-11-15')", "dt_d_f", false), - $("DATE('3077-04-12') >= TIME('00:00:00')", "d_t_t", true), - $("TIME('00:00:00') >= DATE('2020-09-16')", "t_d_t", true), - $("DATE('2020-09-16') >= TIME('09:07:00')", "d_t_f", false), - $("TIME('09:07:00') >= DATE('3077-04-12')", "t_d_f", false) - )); + return Arrays.asList( + $$( + $("DATE('2020-09-16') >= TIMESTAMP('2020-09-16 00:00:00')", "d_ts_t", true), + $("TIMESTAMP('2077-04-12 09:07:00') >= DATE('2020-09-16')", "ts_d_t", true), + $("DATE('1961-04-12') >= TIMESTAMP('1961-04-12 09:07:00')", "d_ts_f", false), + $("TIMESTAMP('1961-04-12 09:07:00') >= DATE('1984-12-15')", "ts_d_f", false), + $("DATE('2020-09-16') >= DATETIME('2020-09-16 00:00:00')", "d_dt_t", true), + $("DATETIME('2020-09-16 00:00:00') >= DATE('1984-03-22')", "dt_d_t", true), + $("DATE('1960-12-15') >= DATETIME('1961-04-12 09:07:00')", "d_dt_f", false), + $("DATETIME('1961-04-12 10:20:30') >= DATE('1984-11-15')", "dt_d_f", false), + $("DATE('3077-04-12') >= TIME('00:00:00')", "d_t_t", true), + $("TIME('00:00:00') >= DATE('2020-09-16')", "t_d_t", true), + $("DATE('2020-09-16') >= TIME('09:07:00')", "d_t_f", false), + $("TIME('09:07:00') >= DATE('3077-04-12')", "t_d_f", false))); } @ParametersFactory(argumentFormatting = "%1$s => %3$s") public static Iterable compareGteTimeWithOtherTypes() { var today = LocalDate.now().toString(); - return Arrays.asList($$( - $("TIME('10:20:30') >= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), - $("DATETIME('" + today + " 20:40:50') >= TIME('10:20:30')", "dt_t_t", true), - $("TIME('09:07:00') >= DATETIME('3077-04-12 09:07:00')", "t_dt_f", false), - $("DATETIME('" + today + " 09:07:00') >= TIME('10:20:30')", "dt_t_f", false), - $("TIME('10:20:30') >= TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), - $("TIMESTAMP('" + today + " 20:50:42') >= TIME('10:20:30')", "ts_t_t", true), - $("TIME('22:15:07') >= TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false), - $("TIMESTAMP('1984-12-15 10:20:30') >= TIME('10:20:30')", "ts_t_f", false), - $("TIME('00:00:00') >= DATE('1961-04-12')", "t_d_t", true), - $("DATE('3077-04-12') >= TIME('10:20:30')", "d_t_t", true), - $("TIME('09:07:00') >= DATE('3077-04-12')", "t_d_f", false), - $("DATE('2020-09-16') >= TIME('09:07:00')", "d_t_f", false) - )); + return Arrays.asList( + $$( + $("TIME('10:20:30') >= DATETIME('" + today + " 10:20:30')", "t_dt_t", true), + $("DATETIME('" + today + " 20:40:50') >= TIME('10:20:30')", "dt_t_t", true), + $("TIME('09:07:00') >= DATETIME('3077-04-12 09:07:00')", "t_dt_f", false), + $("DATETIME('" + today + " 09:07:00') >= TIME('10:20:30')", "dt_t_f", false), + $("TIME('10:20:30') >= TIMESTAMP('" + today + " 10:20:30')", "t_ts_t", true), + $("TIMESTAMP('" + today + " 20:50:42') >= TIME('10:20:30')", "ts_t_t", true), + $("TIME('22:15:07') >= TIMESTAMP('3077-12-15 22:15:07')", "t_ts_f", false), + $("TIMESTAMP('1984-12-15 10:20:30') >= TIME('10:20:30')", "ts_t_f", false), + $("TIME('00:00:00') >= DATE('1961-04-12')", "t_d_t", true), + $("DATE('3077-04-12') >= TIME('10:20:30')", "d_t_t", true), + $("TIME('09:07:00') >= DATE('3077-04-12')", "t_d_f", false), + $("DATE('2020-09-16') >= TIME('09:07:00')", "d_t_f", false))); } @Test diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeFormatsIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeFormatsIT.java index fc05e502c5..d6f2d2c7f4 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeFormatsIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeFormatsIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_DATE_FORMATS; @@ -34,57 +33,72 @@ public void init() throws Exception { @Test public void testReadingDateFormats() throws IOException { - String query = String.format("SELECT weekyear_week_day, hour_minute_second_millis," + - " strict_ordinal_date_time FROM %s LIMIT 1", TEST_INDEX_DATE_FORMATS); + String query = + String.format( + "SELECT weekyear_week_day, hour_minute_second_millis," + + " strict_ordinal_date_time FROM %s LIMIT 1", + TEST_INDEX_DATE_FORMATS); JSONObject result = executeQuery(query); - verifySchema(result, + verifySchema( + result, schema("weekyear_week_day", null, "date"), schema("hour_minute_second_millis", null, "time"), schema("strict_ordinal_date_time", null, "timestamp")); - verifyDataRows(result, - rows("1984-04-12", - "09:07:42", - "1984-04-12 09:07:42.000123456" - )); + verifyDataRows(result, rows("1984-04-12", "09:07:42", "1984-04-12 09:07:42.000123456")); } @Test public void testDateFormatsWithOr() throws IOException { - String query = String.format("SELECT yyyy-MM-dd_OR_epoch_millis FROM %s", TEST_INDEX_DATE_FORMATS); + String query = + String.format("SELECT yyyy-MM-dd_OR_epoch_millis FROM %s", TEST_INDEX_DATE_FORMATS); JSONObject result = executeQuery(query); - verifyDataRows(result, - rows("1984-04-12 00:00:00"), - rows("1984-04-12 09:07:42.000123456")); + verifyDataRows(result, rows("1984-04-12 00:00:00"), rows("1984-04-12 09:07:42.000123456")); } @Test @SneakyThrows public void testCustomFormats() { - String query = String.format("SELECT custom_time, custom_timestamp, custom_date_or_date," - + "custom_date_or_custom_time, custom_time_parser_check FROM %s", TEST_INDEX_DATE_FORMATS); + String query = + String.format( + "SELECT custom_time, custom_timestamp, custom_date_or_date," + + "custom_date_or_custom_time, custom_time_parser_check FROM %s", + TEST_INDEX_DATE_FORMATS); JSONObject result = executeQuery(query); - verifySchema(result, + verifySchema( + result, schema("custom_time", null, "time"), schema("custom_timestamp", null, "timestamp"), schema("custom_date_or_date", null, "date"), schema("custom_date_or_custom_time", null, "timestamp"), schema("custom_time_parser_check", null, "time")); - verifyDataRows(result, - rows("09:07:42", "1984-04-12 09:07:42", "1984-04-12", "1961-04-12 00:00:00", "23:44:36.321"), - rows("21:07:42", "1984-04-12 22:07:42", "1984-04-12", "1970-01-01 09:07:00", "09:01:16.542")); + verifyDataRows( + result, + rows( + "09:07:42", "1984-04-12 09:07:42", "1984-04-12", "1961-04-12 00:00:00", "23:44:36.321"), + rows( + "21:07:42", + "1984-04-12 22:07:42", + "1984-04-12", + "1970-01-01 09:07:00", + "09:01:16.542")); } @Test @SneakyThrows public void testCustomFormats2() { - String query = String.format("SELECT custom_no_delimiter_date, custom_no_delimiter_time," - + "custom_no_delimiter_ts FROM %s", TEST_INDEX_DATE_FORMATS); + String query = + String.format( + "SELECT custom_no_delimiter_date, custom_no_delimiter_time," + + "custom_no_delimiter_ts FROM %s", + TEST_INDEX_DATE_FORMATS); JSONObject result = executeQuery(query); - verifySchema(result, + verifySchema( + result, schema("custom_no_delimiter_date", null, "date"), schema("custom_no_delimiter_time", null, "time"), schema("custom_no_delimiter_ts", null, "timestamp")); - verifyDataRows(result, + verifyDataRows( + result, rows("1984-10-20", "10:20:30", "1984-10-20 15:35:48"), rows("1961-04-12", "09:07:00", "1961-04-12 09:07:00")); } @@ -92,16 +106,21 @@ public void testCustomFormats2() { @Test @SneakyThrows public void testIncompleteFormats() { - String query = String.format("SELECT incomplete_1, incomplete_2, incorrect," - + "incomplete_custom_time, incomplete_custom_date FROM %s", TEST_INDEX_DATE_FORMATS); + String query = + String.format( + "SELECT incomplete_1, incomplete_2, incorrect," + + "incomplete_custom_time, incomplete_custom_date FROM %s", + TEST_INDEX_DATE_FORMATS); JSONObject result = executeQuery(query); - verifySchema(result, + verifySchema( + result, schema("incomplete_1", null, "timestamp"), schema("incomplete_2", null, "date"), schema("incorrect", null, "timestamp"), schema("incomplete_custom_time", null, "time"), schema("incomplete_custom_date", null, "date")); - verifyDataRows(result, + verifyDataRows( + result, rows("1984-01-01 00:00:00", null, null, "10:00:00", "1999-01-01"), rows("2012-01-01 00:00:00", null, null, "20:00:00", "3021-01-01")); } @@ -109,13 +128,13 @@ public void testIncompleteFormats() { @Test @SneakyThrows public void testNumericFormats() { - String query = String.format("SELECT epoch_sec, epoch_milli" - + " FROM %s", TEST_INDEX_DATE_FORMATS); + String query = + String.format("SELECT epoch_sec, epoch_milli" + " FROM %s", TEST_INDEX_DATE_FORMATS); JSONObject result = executeQuery(query); - verifySchema(result, - schema("epoch_sec", null, "timestamp"), - schema("epoch_milli", null, "timestamp")); - verifyDataRows(result, + verifySchema( + result, schema("epoch_sec", null, "timestamp"), schema("epoch_milli", null, "timestamp")); + verifyDataRows( + result, rows("1970-01-01 00:00:42", "1970-01-01 00:00:00.042"), rows("1970-01-02 03:55:00", "1970-01-01 00:01:40.5")); } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeFunctionIT.java index 2696a9a0d6..33eb8b693f 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeFunctionIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; @@ -60,12 +59,14 @@ public void resetTimeZone() { } @Test - public void testDateInGroupBy() throws IOException{ + public void testDateInGroupBy() throws IOException { JSONObject result = - executeQuery(String.format("SELECT DATE(birthdate) FROM %s GROUP BY DATE(birthdate)",TEST_INDEX_BANK) ); - verifySchema(result, - schema("DATE(birthdate)", null, "date")); - verifyDataRows(result, + executeQuery( + String.format( + "SELECT DATE(birthdate) FROM %s GROUP BY DATE(birthdate)", TEST_INDEX_BANK)); + verifySchema(result, schema("DATE(birthdate)", null, "date")); + verifyDataRows( + result, rows("2017-10-23"), rows("2017-11-20"), rows("2018-06-23"), @@ -78,9 +79,11 @@ public void testDateInGroupBy() throws IOException{ @Test public void testDateWithHavingClauseOnly() throws IOException { JSONObject result = - executeQuery(String.format("SELECT (TO_DAYS(DATE('2050-01-01')) - 693961) FROM %s HAVING (COUNT(1) > 0)",TEST_INDEX_BANK) ); - verifySchema(result, - schema("(TO_DAYS(DATE('2050-01-01')) - 693961)", null, "long")); + executeQuery( + String.format( + "SELECT (TO_DAYS(DATE('2050-01-01')) - 693961) FROM %s HAVING (COUNT(1) > 0)", + TEST_INDEX_BANK)); + verifySchema(result, schema("(TO_DAYS(DATE('2050-01-01')) - 693961)", null, "long")); verifyDataRows(result, rows(54787)); } @@ -107,83 +110,98 @@ public void testAddDateWithDays() throws IOException { public void testAddDateWithInterval() throws IOException { JSONObject result = executeQuery("select adddate(timestamp('2020-09-16 17:30:00'), interval 1 day)"); - verifySchema(result, + verifySchema( + result, schema("adddate(timestamp('2020-09-16 17:30:00'), interval 1 day)", null, "datetime")); verifyDataRows(result, rows("2020-09-17 17:30:00")); result = executeQuery("select adddate(DATETIME('2020-09-16 17:30:00'), interval 1 day)"); - verifySchema(result, + verifySchema( + result, schema("adddate(DATETIME('2020-09-16 17:30:00'), interval 1 day)", null, "datetime")); verifyDataRows(result, rows("2020-09-17 17:30:00")); result = executeQuery("select adddate(date('2020-09-16'), interval 1 day)"); - verifySchema(result, - schema("adddate(date('2020-09-16'), interval 1 day)", null, "datetime")); + verifySchema(result, schema("adddate(date('2020-09-16'), interval 1 day)", null, "datetime")); verifyDataRows(result, rows("2020-09-17 00:00:00")); result = executeQuery("select adddate(date('2020-09-16'), interval 1 hour)"); - verifySchema(result, - schema("adddate(date('2020-09-16'), interval 1 hour)", null, "datetime")); + verifySchema(result, schema("adddate(date('2020-09-16'), interval 1 hour)", null, "datetime")); verifyDataRows(result, rows("2020-09-16 01:00:00")); result = executeQuery("select adddate(TIME('07:40:00'), interval 1 day)"); - verifySchema(result, - schema("adddate(TIME('07:40:00'), interval 1 day)", null, "datetime")); - verifyDataRows(result, - rows(LocalDate.now().plusDays(1).atTime(LocalTime.of(7, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySchema(result, schema("adddate(TIME('07:40:00'), interval 1 day)", null, "datetime")); + verifyDataRows( + result, + rows( + LocalDate.now() + .plusDays(1) + .atTime(LocalTime.of(7, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); result = executeQuery("select adddate(TIME('07:40:00'), interval 1 hour)"); - verifySchema(result, - schema("adddate(TIME('07:40:00'), interval 1 hour)", null, "datetime")); - verifyDataRows(result, - rows(LocalDate.now().atTime(LocalTime.of(8, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySchema(result, schema("adddate(TIME('07:40:00'), interval 1 hour)", null, "datetime")); + verifyDataRows( + result, + rows( + LocalDate.now() + .atTime(LocalTime.of(8, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); } @Test public void testDateAdd() throws IOException { JSONObject result = executeQuery("select date_add(timestamp('2020-09-16 17:30:00'), interval 1 day)"); - verifySchema(result, + verifySchema( + result, schema("date_add(timestamp('2020-09-16 17:30:00'), interval 1 day)", null, "datetime")); verifyDataRows(result, rows("2020-09-17 17:30:00")); result = executeQuery("select date_add(DATETIME('2020-09-16 17:30:00'), interval 1 day)"); - verifySchema(result, + verifySchema( + result, schema("date_add(DATETIME('2020-09-16 17:30:00'), interval 1 day)", null, "datetime")); verifyDataRows(result, rows("2020-09-17 17:30:00")); result = executeQuery("select date_add(date('2020-09-16'), interval 1 day)"); - verifySchema(result, - schema("date_add(date('2020-09-16'), interval 1 day)", null, "datetime")); + verifySchema(result, schema("date_add(date('2020-09-16'), interval 1 day)", null, "datetime")); verifyDataRows(result, rows("2020-09-17 00:00:00")); result = executeQuery("select date_add(date('2020-09-16'), interval 1 hour)"); - verifySchema(result, - schema("date_add(date('2020-09-16'), interval 1 hour)", null, "datetime")); + verifySchema(result, schema("date_add(date('2020-09-16'), interval 1 hour)", null, "datetime")); verifyDataRows(result, rows("2020-09-16 01:00:00")); result = executeQuery("select date_add(TIME('07:40:00'), interval 1 day)"); - verifySchema(result, - schema("date_add(TIME('07:40:00'), interval 1 day)", null, "datetime")); - verifyDataRows(result, - rows(LocalDate.now().plusDays(1).atTime(LocalTime.of(7, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySchema(result, schema("date_add(TIME('07:40:00'), interval 1 day)", null, "datetime")); + verifyDataRows( + result, + rows( + LocalDate.now() + .plusDays(1) + .atTime(LocalTime.of(7, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); result = executeQuery("select date_add(TIME('07:40:00'), interval 1 hour)"); - verifySchema(result, - schema("date_add(TIME('07:40:00'), interval 1 hour)", null, "datetime")); - verifyDataRows(result, - rows(LocalDate.now().atTime(LocalTime.of(8, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); - - result = executeQuery(String.format("SELECT DATE_ADD(birthdate, INTERVAL 1 YEAR) FROM %s", - TEST_INDEX_BANK)); - - verifySchema(result, - schema("DATE_ADD(birthdate, INTERVAL 1 YEAR)", null, "datetime")); - verifyDataRows(result, + verifySchema(result, schema("date_add(TIME('07:40:00'), interval 1 hour)", null, "datetime")); + verifyDataRows( + result, + rows( + LocalDate.now() + .atTime(LocalTime.of(8, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + + result = + executeQuery( + String.format("SELECT DATE_ADD(birthdate, INTERVAL 1 YEAR) FROM %s", TEST_INDEX_BANK)); + + verifySchema(result, schema("DATE_ADD(birthdate, INTERVAL 1 YEAR)", null, "datetime")); + verifyDataRows( + result, rows("2018-10-23 00:00:00"), rows("2018-11-20 00:00:00"), rows("2019-06-23 00:00:00"), @@ -197,38 +215,45 @@ public void testDateAdd() throws IOException { public void testDateSub() throws IOException { JSONObject result = executeQuery("select date_sub(timestamp('2020-09-16 17:30:00'), interval 1 day)"); - verifySchema(result, + verifySchema( + result, schema("date_sub(timestamp('2020-09-16 17:30:00'), interval 1 day)", null, "datetime")); verifyDataRows(result, rows("2020-09-15 17:30:00")); result = executeQuery("select date_sub(DATETIME('2020-09-16 17:30:00'), interval 1 day)"); - verifySchema(result, + verifySchema( + result, schema("date_sub(DATETIME('2020-09-16 17:30:00'), interval 1 day)", null, "datetime")); verifyDataRows(result, rows("2020-09-15 17:30:00")); result = executeQuery("select date_sub(date('2020-09-16'), interval 1 day)"); - verifySchema(result, - schema("date_sub(date('2020-09-16'), interval 1 day)", null, "datetime")); + verifySchema(result, schema("date_sub(date('2020-09-16'), interval 1 day)", null, "datetime")); verifyDataRows(result, rows("2020-09-15 00:00:00")); result = executeQuery("select date_sub(date('2020-09-16'), interval 1 hour)"); - verifySchema(result, - schema("date_sub(date('2020-09-16'), interval 1 hour)", null, "datetime")); + verifySchema(result, schema("date_sub(date('2020-09-16'), interval 1 hour)", null, "datetime")); verifyDataRows(result, rows("2020-09-15 23:00:00")); result = executeQuery("select date_sub(TIME('07:40:00'), interval 1 day)"); - verifySchema(result, - schema("date_sub(TIME('07:40:00'), interval 1 day)", null, "datetime")); - verifyDataRows(result, - rows(LocalDate.now().plusDays(-1).atTime(LocalTime.of(7, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySchema(result, schema("date_sub(TIME('07:40:00'), interval 1 day)", null, "datetime")); + verifyDataRows( + result, + rows( + LocalDate.now() + .plusDays(-1) + .atTime(LocalTime.of(7, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); result = executeQuery("select date_sub(TIME('07:40:00'), interval 1 hour)"); - verifySchema(result, - schema("date_sub(TIME('07:40:00'), interval 1 hour)", null, "datetime")); - verifyDataRows(result, - rows(LocalDate.now().atTime(LocalTime.of(6, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySchema(result, schema("date_sub(TIME('07:40:00'), interval 1 hour)", null, "datetime")); + verifyDataRows( + result, + rows( + LocalDate.now() + .atTime(LocalTime.of(6, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); } @Test @@ -282,30 +307,34 @@ public void testDayOfMonthAliasesReturnTheSameResults() throws IOException { verifyDataRows(result1, rows(22)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT dayofmonth(date0) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT day_of_month(date0) FROM %s", TEST_INDEX_CALCS)); + result1 = executeQuery(String.format("SELECT dayofmonth(date0) FROM %s", TEST_INDEX_CALCS)); + result2 = executeQuery(String.format("SELECT day_of_month(date0) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT dayofmonth(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT day_of_month(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + result1 = + executeQuery( + String.format( + "SELECT dayofmonth(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery( + String.format( + "SELECT day_of_month(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT dayofmonth(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT day_of_month(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); + result1 = + executeQuery( + String.format("SELECT dayofmonth(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery( + String.format("SELECT day_of_month(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT dayofmonth(datetime0) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT day_of_month(datetime0) FROM %s", TEST_INDEX_CALCS)); + result1 = executeQuery(String.format("SELECT dayofmonth(datetime0) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery(String.format("SELECT day_of_month(datetime0) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); } + @Test public void testDayOfWeek() throws IOException { JSONObject result = executeQuery("select dayofweek(date('2020-09-16'))"); @@ -335,28 +364,31 @@ public void testDayOfWeekAliasesReturnTheSameResults() throws IOException { verifyDataRows(result1, rows(3)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT dayofweek(date0) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT day_of_week(date0) FROM %s", TEST_INDEX_CALCS)); + result1 = executeQuery(String.format("SELECT dayofweek(date0) FROM %s", TEST_INDEX_CALCS)); + result2 = executeQuery(String.format("SELECT day_of_week(date0) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT dayofweek(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT day_of_week(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + result1 = + executeQuery( + String.format( + "SELECT dayofweek(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery( + String.format( + "SELECT day_of_week(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT dayofweek(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT day_of_week(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); + result1 = + executeQuery( + String.format("SELECT dayofweek(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery( + String.format("SELECT day_of_week(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT dayofweek(datetime0) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT day_of_week(datetime0) FROM %s", TEST_INDEX_CALCS)); + result1 = executeQuery(String.format("SELECT dayofweek(datetime0) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery(String.format("SELECT day_of_week(datetime0) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); } @@ -397,30 +429,34 @@ public void testDayOfYearAlternateSyntaxesReturnTheSameResults() throws IOExcept verifyDataRows(result1, rows(326)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT dayofyear(date0) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT day_of_year(date0) FROM %s", TEST_INDEX_CALCS)); + result1 = executeQuery(String.format("SELECT dayofyear(date0) FROM %s", TEST_INDEX_CALCS)); + result2 = executeQuery(String.format("SELECT day_of_year(date0) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT dayofyear(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT day_of_year(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + result1 = + executeQuery( + String.format( + "SELECT dayofyear(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery( + String.format( + "SELECT day_of_year(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT dayofyear(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT day_of_year(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); + result1 = + executeQuery( + String.format("SELECT dayofyear(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery( + String.format("SELECT day_of_year(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT dayofyear(datetime0) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT day_of_year(datetime0) FROM %s", TEST_INDEX_CALCS)); + result1 = executeQuery(String.format("SELECT dayofyear(datetime0) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery(String.format("SELECT day_of_year(datetime0) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); } + @Test public void testFromDays() throws IOException { JSONObject result = executeQuery("select from_days(738049)"); @@ -450,13 +486,11 @@ public void testHour() throws IOException { @Test public void testHourOfDayWithUnderscores() throws IOException { JSONObject result = executeQuery("select hour_of_day(timestamp('2020-09-16 17:30:00'))"); - verifySchema(result, schema( - "hour_of_day(timestamp('2020-09-16 17:30:00'))", null, "integer")); + verifySchema(result, schema("hour_of_day(timestamp('2020-09-16 17:30:00'))", null, "integer")); verifyDataRows(result, rows(17)); result = executeQuery("select hour_of_day(datetime('2020-09-16 17:30:00'))"); - verifySchema(result, schema( - "hour_of_day(datetime('2020-09-16 17:30:00'))", null, "integer")); + verifySchema(result, schema("hour_of_day(datetime('2020-09-16 17:30:00'))", null, "integer")); verifyDataRows(result, rows(17)); result = executeQuery("select hour_of_day(time('17:30:00'))"); @@ -474,41 +508,45 @@ public void testHourOfDayWithUnderscores() throws IOException { @Test public void testExtractWithDatetime() throws IOException { - JSONObject datetimeResult = executeQuery( - String.format( - "SELECT extract(DAY_SECOND FROM datetime(cast(datetime0 AS STRING))) FROM %s LIMIT 1", - TEST_INDEX_CALCS)); + JSONObject datetimeResult = + executeQuery( + String.format( + "SELECT extract(DAY_SECOND FROM datetime(cast(datetime0 AS STRING))) FROM %s LIMIT" + + " 1", + TEST_INDEX_CALCS)); verifyDataRows(datetimeResult, rows(9101735)); } @Test public void testExtractWithTime() throws IOException { - JSONObject timeResult = executeQuery( - String.format( - "SELECT extract(HOUR_SECOND FROM time0) FROM %s LIMIT 1", - TEST_INDEX_CALCS)); + JSONObject timeResult = + executeQuery( + String.format( + "SELECT extract(HOUR_SECOND FROM time0) FROM %s LIMIT 1", TEST_INDEX_CALCS)); verifyDataRows(timeResult, rows(210732)); - } @Test public void testExtractWithDate() throws IOException { - JSONObject dateResult = executeQuery( - String.format( - "SELECT extract(YEAR_MONTH FROM date0) FROM %s LIMIT 1", - TEST_INDEX_CALCS)); + JSONObject dateResult = + executeQuery( + String.format( + "SELECT extract(YEAR_MONTH FROM date0) FROM %s LIMIT 1", TEST_INDEX_CALCS)); verifyDataRows(dateResult, rows(200404)); } @Test public void testExtractWithDifferentTypesReturnSameResult() throws IOException { - JSONObject dateResult = executeQuery( - String.format("SELECT extract(YEAR_MONTH FROM datetime0) FROM %s LIMIT 1", TEST_INDEX_CALCS)); + JSONObject dateResult = + executeQuery( + String.format( + "SELECT extract(YEAR_MONTH FROM datetime0) FROM %s LIMIT 1", TEST_INDEX_CALCS)); - JSONObject datetimeResult = executeQuery( - String.format( - "SELECT extract(YEAR_MONTH FROM date(datetime0)) FROM %s LIMIT 1", - TEST_INDEX_CALCS)); + JSONObject datetimeResult = + executeQuery( + String.format( + "SELECT extract(YEAR_MONTH FROM date(datetime0)) FROM %s LIMIT 1", + TEST_INDEX_CALCS)); dateResult.getJSONArray("datarows").similar(datetimeResult.getJSONArray("datarows")); } @@ -520,63 +558,55 @@ public void testHourFunctionAliasesReturnTheSameResults() throws IOException { verifyDataRows(result1, rows(11)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT hour(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT hour_of_day(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + result1 = + executeQuery( + String.format( + "SELECT hour(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery( + String.format( + "SELECT hour_of_day(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT hour(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT hour_of_day(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); + result1 = + executeQuery(String.format("SELECT hour(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery( + String.format("SELECT hour_of_day(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT hour(datetime0) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT hour_of_day(datetime0) FROM %s", TEST_INDEX_CALCS)); + result1 = executeQuery(String.format("SELECT hour(datetime0) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery(String.format("SELECT hour_of_day(datetime0) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); } @Test public void testLastDay() throws IOException { - JSONObject result = executeQuery( - String.format("SELECT last_day(date0) FROM %s LIMIT 3", - TEST_INDEX_CALCS)); - verifyDataRows(result, - rows("2004-04-30"), - rows("1972-07-31"), - rows("1975-11-30")); - - result = executeQuery( - String.format("SELECT last_day(date0) FROM %s LIMIT 3", - TEST_INDEX_CALCS)); - verifyDataRows(result, - rows("2004-04-30"), - rows("1972-07-31"), - rows("1975-11-30")); - - result = executeQuery( - String.format("SELECT last_day(date0) FROM %s LIMIT 3", - TEST_INDEX_CALCS)); - verifyDataRows(result, - rows("2004-04-30"), - rows("1972-07-31"), - rows("1975-11-30")); + JSONObject result = + executeQuery(String.format("SELECT last_day(date0) FROM %s LIMIT 3", TEST_INDEX_CALCS)); + verifyDataRows(result, rows("2004-04-30"), rows("1972-07-31"), rows("1975-11-30")); + + result = + executeQuery(String.format("SELECT last_day(date0) FROM %s LIMIT 3", TEST_INDEX_CALCS)); + verifyDataRows(result, rows("2004-04-30"), rows("1972-07-31"), rows("1975-11-30")); + + result = + executeQuery(String.format("SELECT last_day(date0) FROM %s LIMIT 3", TEST_INDEX_CALCS)); + verifyDataRows(result, rows("2004-04-30"), rows("1972-07-31"), rows("1975-11-30")); } @Test public void testMicrosecond() throws IOException { JSONObject result = executeQuery("select microsecond(timestamp('2020-09-16 17:30:00.123456'))"); - verifySchema(result, - schema("microsecond(timestamp('2020-09-16 17:30:00.123456'))", null, "integer")); + verifySchema( + result, schema("microsecond(timestamp('2020-09-16 17:30:00.123456'))", null, "integer")); verifyDataRows(result, rows(123456)); // Explicit timestamp value with less than 6 microsecond digits result = executeQuery("select microsecond(timestamp('2020-09-16 17:30:00.1234'))"); - verifySchema(result, - schema("microsecond(timestamp('2020-09-16 17:30:00.1234'))", null, "integer")); + verifySchema( + result, schema("microsecond(timestamp('2020-09-16 17:30:00.1234'))", null, "integer")); verifyDataRows(result, rows(123400)); result = executeQuery("select microsecond(time('17:30:00.000010'))"); @@ -626,11 +656,11 @@ public void testMinute() throws IOException { verifyDataRows(result, rows(30)); } - @Test public void testMinuteOfDay() throws IOException { JSONObject result = executeQuery("select minute_of_day(timestamp('2020-09-16 17:30:00'))"); - verifySchema(result, schema("minute_of_day(timestamp('2020-09-16 17:30:00'))", null, "integer")); + verifySchema( + result, schema("minute_of_day(timestamp('2020-09-16 17:30:00'))", null, "integer")); verifyDataRows(result, rows(1050)); result = executeQuery("select minute_of_day(datetime('2020-09-16 17:30:00'))"); @@ -653,8 +683,8 @@ public void testMinuteOfDay() throws IOException { @Test public void testMinuteOfHour() throws IOException { JSONObject result = executeQuery("select minute_of_hour(timestamp('2020-09-16 17:30:00'))"); - verifySchema(result, schema( - "minute_of_hour(timestamp('2020-09-16 17:30:00'))", null, "integer")); + verifySchema( + result, schema("minute_of_hour(timestamp('2020-09-16 17:30:00'))", null, "integer")); verifyDataRows(result, rows(30)); result = executeQuery("select minute_of_hour(time('17:30:00'))"); @@ -677,22 +707,29 @@ public void testMinuteFunctionAliasesReturnTheSameResults() throws IOException { verifyDataRows(result1, rows(30)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT minute(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT minute_of_hour(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + result1 = + executeQuery( + String.format( + "SELECT minute(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery( + String.format( + "SELECT minute_of_hour(datetime(CAST(time0 AS STRING))) FROM %s", + TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT minute(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT minute_of_hour(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); + result1 = + executeQuery( + String.format("SELECT minute(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery( + String.format( + "SELECT minute_of_hour(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT minute(datetime0) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT minute_of_hour(datetime0) FROM %s", TEST_INDEX_CALCS)); + result1 = executeQuery(String.format("SELECT minute(datetime0) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery(String.format("SELECT minute_of_hour(datetime0) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); } @@ -718,7 +755,8 @@ public void testMonthOfYearTypes() throws IOException { verifyDataRows(result, rows(9)); result = executeQuery("select month_of_year(timestamp('2020-09-16 00:00:00'))"); - verifySchema(result, schema("month_of_year(timestamp('2020-09-16 00:00:00'))", null, "integer")); + verifySchema( + result, schema("month_of_year(timestamp('2020-09-16 00:00:00'))", null, "integer")); verifyDataRows(result, rows(9)); result = executeQuery("select month_of_year('2020-09-16')"); @@ -733,28 +771,31 @@ public void testMonthAlternateSyntaxesReturnTheSameResults() throws IOException verifyDataRows(result1, rows(11)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT month(date0) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT month_of_year(date0) FROM %s", TEST_INDEX_CALCS)); + result1 = executeQuery(String.format("SELECT month(date0) FROM %s", TEST_INDEX_CALCS)); + result2 = executeQuery(String.format("SELECT month_of_year(date0) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT month(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT month_of_year(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + result1 = + executeQuery( + String.format( + "SELECT month(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery( + String.format( + "SELECT month_of_year(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT month(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT month_of_year(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); + result1 = + executeQuery( + String.format("SELECT month(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery( + String.format("SELECT month_of_year(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT month(datetime0) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT month_of_year(datetime0) FROM %s", TEST_INDEX_CALCS)); + result1 = executeQuery(String.format("SELECT month(datetime0) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery(String.format("SELECT month_of_year(datetime0) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); } @@ -782,12 +823,9 @@ public void testQuarter() throws IOException { @Test public void testSecToTime() throws IOException { - JSONObject result = executeQuery( - String.format("SELECT sec_to_time(balance) FROM %s LIMIT 3", TEST_INDEX_BANK)); - verifyDataRows(result, - rows("10:53:45"), - rows("01:34:46"), - rows("09:07:18")); + JSONObject result = + executeQuery(String.format("SELECT sec_to_time(balance) FROM %s LIMIT 3", TEST_INDEX_BANK)); + verifyDataRows(result, rows("10:53:45"), rows("01:34:46"), rows("09:07:18")); } @Test @@ -811,7 +849,8 @@ public void testSecond() throws IOException { public void testSecondOfMinute() throws IOException { JSONObject result = executeQuery("select second_of_minute(timestamp('2020-09-16 17:30:00'))"); - verifySchema(result, schema("second_of_minute(timestamp('2020-09-16 17:30:00'))", null, "integer")); + verifySchema( + result, schema("second_of_minute(timestamp('2020-09-16 17:30:00'))", null, "integer")); verifyDataRows(result, rows(0)); result = executeQuery("select second_of_minute(time('17:30:00'))"); @@ -834,70 +873,68 @@ public void testSecondFunctionAliasesReturnTheSameResults() throws IOException { verifyDataRows(result1, rows(34)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT second(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT second_of_minute(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + result1 = + executeQuery( + String.format( + "SELECT second(datetime(CAST(time0 AS STRING))) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery( + String.format( + "SELECT second_of_minute(datetime(CAST(time0 AS STRING))) FROM %s", + TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT second(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT second_of_minute(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); + result1 = + executeQuery( + String.format("SELECT second(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery( + String.format( + "SELECT second_of_minute(CAST(time0 AS STRING)) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); - result1 = executeQuery(String.format( - "SELECT second(datetime0) FROM %s", TEST_INDEX_CALCS)); - result2 = executeQuery(String.format( - "SELECT second_of_minute(datetime0) FROM %s", TEST_INDEX_CALCS)); + result1 = executeQuery(String.format("SELECT second(datetime0) FROM %s", TEST_INDEX_CALCS)); + result2 = + executeQuery(String.format("SELECT second_of_minute(datetime0) FROM %s", TEST_INDEX_CALCS)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); } @Test public void testStrToDate() throws IOException { - //Ideal case - JSONObject result = executeQuery( - String.format("SELECT str_to_date(CAST(birthdate AS STRING)," - + " '%%Y-%%m-%%d %%h:%%i:%%s') FROM %s LIMIT 2", - TEST_INDEX_BANK)); - verifyDataRows(result, - rows("2017-10-23 00:00:00"), - rows("2017-11-20 00:00:00") - ); - - //Bad string format case - result = executeQuery( - String.format("SELECT str_to_date(CAST(birthdate AS STRING)," - + " '%%Y %%s') FROM %s LIMIT 2", - TEST_INDEX_BANK)); - verifyDataRows(result, - rows((Object) null), - rows((Object) null) - ); - - //bad date format case - result = executeQuery( - String.format("SELECT str_to_date(firstname," - + " '%%Y-%%m-%%d %%h:%%i:%%s') FROM %s LIMIT 2", - TEST_INDEX_BANK)); - verifyDataRows(result, - rows((Object) null), - rows((Object) null) - ); + // Ideal case + JSONObject result = + executeQuery( + String.format( + "SELECT str_to_date(CAST(birthdate AS STRING)," + + " '%%Y-%%m-%%d %%h:%%i:%%s') FROM %s LIMIT 2", + TEST_INDEX_BANK)); + verifyDataRows(result, rows("2017-10-23 00:00:00"), rows("2017-11-20 00:00:00")); + + // Bad string format case + result = + executeQuery( + String.format( + "SELECT str_to_date(CAST(birthdate AS STRING)," + " '%%Y %%s') FROM %s LIMIT 2", + TEST_INDEX_BANK)); + verifyDataRows(result, rows((Object) null), rows((Object) null)); + + // bad date format case + result = + executeQuery( + String.format( + "SELECT str_to_date(firstname," + " '%%Y-%%m-%%d %%h:%%i:%%s') FROM %s LIMIT 2", + TEST_INDEX_BANK)); + verifyDataRows(result, rows((Object) null), rows((Object) null)); } @Test public void testSubDateWithDays() throws IOException { - var result = - executeQuery("select subdate(date('2020-09-16'), 1)"); - verifySchema(result, - schema("subdate(date('2020-09-16'), 1)", null, "date")); + var result = executeQuery("select subdate(date('2020-09-16'), 1)"); + verifySchema(result, schema("subdate(date('2020-09-16'), 1)", null, "date")); verifyDataRows(result, rows("2020-09-15")); - result = - executeQuery("select subdate(timestamp('2020-09-16 17:30:00'), 1)"); - verifySchema(result, - schema("subdate(timestamp('2020-09-16 17:30:00'), 1)", null, "datetime")); + result = executeQuery("select subdate(timestamp('2020-09-16 17:30:00'), 1)"); + verifySchema(result, schema("subdate(timestamp('2020-09-16 17:30:00'), 1)", null, "datetime")); verifyDataRows(result, rows("2020-09-15 17:30:00")); result = executeQuery("select subdate(DATETIME('2020-09-16 07:40:00'), 1)"); @@ -913,60 +950,68 @@ public void testSubDateWithDays() throws IOException { public void testSubDateWithInterval() throws IOException { JSONObject result = executeQuery("select subdate(timestamp('2020-09-16 17:30:00'), interval 1 day)"); - verifySchema(result, + verifySchema( + result, schema("subdate(timestamp('2020-09-16 17:30:00'), interval 1 day)", null, "datetime")); verifyDataRows(result, rows("2020-09-15 17:30:00")); result = executeQuery("select subdate(DATETIME('2020-09-16 17:30:00'), interval 1 day)"); - verifySchema(result, + verifySchema( + result, schema("subdate(DATETIME('2020-09-16 17:30:00'), interval 1 day)", null, "datetime")); verifyDataRows(result, rows("2020-09-15 17:30:00")); result = executeQuery("select subdate(date('2020-09-16'), interval 1 day)"); - verifySchema(result, - schema("subdate(date('2020-09-16'), interval 1 day)", null, "datetime")); + verifySchema(result, schema("subdate(date('2020-09-16'), interval 1 day)", null, "datetime")); verifyDataRows(result, rows("2020-09-15 00:00:00")); result = executeQuery("select subdate(date('2020-09-16'), interval 1 hour)"); - verifySchema(result, - schema("subdate(date('2020-09-16'), interval 1 hour)", null, "datetime")); + verifySchema(result, schema("subdate(date('2020-09-16'), interval 1 hour)", null, "datetime")); verifyDataRows(result, rows("2020-09-15 23:00:00")); result = executeQuery("select subdate(TIME('07:40:00'), interval 1 day)"); - verifySchema(result, - schema("subdate(TIME('07:40:00'), interval 1 day)", null, "datetime")); - verifyDataRows(result, - rows(LocalDate.now().plusDays(-1).atTime(LocalTime.of(7, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySchema(result, schema("subdate(TIME('07:40:00'), interval 1 day)", null, "datetime")); + verifyDataRows( + result, + rows( + LocalDate.now() + .plusDays(-1) + .atTime(LocalTime.of(7, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); result = executeQuery("select subdate(TIME('07:40:00'), interval 1 hour)"); - verifySchema(result, - schema("subdate(TIME('07:40:00'), interval 1 hour)", null, "datetime")); - verifyDataRows(result, - rows(LocalDate.now().atTime(LocalTime.of(6, 40)).atZone(systemTz.toZoneId()) - .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); + verifySchema(result, schema("subdate(TIME('07:40:00'), interval 1 hour)", null, "datetime")); + verifyDataRows( + result, + rows( + LocalDate.now() + .atTime(LocalTime.of(6, 40)) + .atZone(systemTz.toZoneId()) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")))); } @Test - public void testTimstampadd() throws IOException { - JSONObject result = executeQuery( - String.format("SELECT timestampadd(WEEK, 2, time0) FROM %s LIMIT 3", TEST_INDEX_CALCS)); + public void testTimstampadd() throws IOException { + JSONObject result = + executeQuery( + String.format("SELECT timestampadd(WEEK, 2, time0) FROM %s LIMIT 3", TEST_INDEX_CALCS)); - verifyDataRows(result, + verifyDataRows( + result, rows("1900-01-13 21:07:32"), rows("1900-01-15 13:48:48"), rows("1900-01-15 18:21:08")); } @Test - public void testTimstampdiff() throws IOException { - JSONObject result = executeQuery( - String.format("SELECT timestampdiff(DAY, time0, datetime0) FROM %s LIMIT 3", TEST_INDEX_CALCS)); + public void testTimstampdiff() throws IOException { + JSONObject result = + executeQuery( + String.format( + "SELECT timestampdiff(DAY, time0, datetime0) FROM %s LIMIT 3", TEST_INDEX_CALCS)); - verifyDataRows(result, - rows(38176), - rows(38191), - rows(38198)); + verifyDataRows(result, rows(38176), rows(38191), rows(38198)); } @Test @@ -993,16 +1038,20 @@ public void testToDays() throws IOException { @Test public void testToSeconds() throws IOException { - JSONObject result = executeQuery( - String.format("select to_seconds(date0) FROM %s LIMIT 2", TEST_INDEX_CALCS)); + JSONObject result = + executeQuery(String.format("select to_seconds(date0) FROM %s LIMIT 2", TEST_INDEX_CALCS)); verifyDataRows(result, rows(63249206400L), rows(62246275200L)); - result = executeQuery( - String.format("SELECT to_seconds(datetime(cast(datetime0 AS string))) FROM %s LIMIT 2", TEST_INDEX_CALCS)); + result = + executeQuery( + String.format( + "SELECT to_seconds(datetime(cast(datetime0 AS string))) FROM %s LIMIT 2", + TEST_INDEX_CALCS)); verifyDataRows(result, rows(63256587455L), rows(63258064234L)); - result = executeQuery(String.format( - "select to_seconds(datetime0) FROM %s LIMIT 2", TEST_INDEX_CALCS)); + result = + executeQuery( + String.format("select to_seconds(datetime0) FROM %s LIMIT 2", TEST_INDEX_CALCS)); verifyDataRows(result, rows(63256587455L), rows(63258064234L)); } @@ -1017,11 +1066,14 @@ public void testYear() throws IOException { verifyDataRows(result, rows(2020)); } - private void week(String date, int mode, int expectedResult, String functionName) throws IOException { - JSONObject result = executeQuery(StringUtils.format("select %s(date('%s'), %d)", functionName, date, - mode)); - verifySchema(result, - schema(StringUtils.format("%s(date('%s'), %d)", functionName, date, mode), null, "integer")); + private void week(String date, int mode, int expectedResult, String functionName) + throws IOException { + JSONObject result = + executeQuery(StringUtils.format("select %s(date('%s'), %d)", functionName, date, mode)); + verifySchema( + result, + schema( + StringUtils.format("%s(date('%s'), %d)", functionName, date, mode), null, "integer")); verifyDataRows(result, rows(expectedResult)); } @@ -1040,7 +1092,8 @@ public void testWeek() throws IOException { @Test public void testWeekday() throws IOException { - JSONObject result = executeQuery(String.format("SELECT weekday(date0) FROM %s LIMIT 3", TEST_INDEX_CALCS)); + JSONObject result = + executeQuery(String.format("SELECT weekday(date0) FROM %s LIMIT 3", TEST_INDEX_CALCS)); verifyDataRows(result, rows(3), rows(1), rows(2)); } @@ -1071,12 +1124,9 @@ public void testWeekOfYear() throws IOException { } private void compareWeekResults(String arg, String table) throws IOException { - JSONObject result1 = executeQuery(String.format( - "SELECT week(%s) FROM %s", arg, table)); - JSONObject result2 = executeQuery(String.format( - "SELECT week_of_year(%s) FROM %s", arg, table)); - JSONObject result3 = executeQuery(String.format( - "SELECT weekofyear(%s) FROM %s", arg, table)); + JSONObject result1 = executeQuery(String.format("SELECT week(%s) FROM %s", arg, table)); + JSONObject result2 = executeQuery(String.format("SELECT week_of_year(%s) FROM %s", arg, table)); + JSONObject result3 = executeQuery(String.format("SELECT weekofyear(%s) FROM %s", arg, table)); result1.getJSONArray("datarows").similar(result2.getJSONArray("datarows")); result1.getJSONArray("datarows").similar(result3.getJSONArray("datarows")); @@ -1099,13 +1149,16 @@ public void testWeekAlternateSyntaxesReturnTheSameResults() throws IOException { @Test public void testYearweek() throws IOException { - JSONObject result = executeQuery( - String.format("SELECT yearweek(time0), yearweek(time0, 4) FROM %s LIMIT 2", TEST_INDEX_CALCS)); + JSONObject result = + executeQuery( + String.format( + "SELECT yearweek(time0), yearweek(time0, 4) FROM %s LIMIT 2", TEST_INDEX_CALCS)); verifyDataRows(result, rows(189952, 189952), rows(189953, 190001)); } - void verifyDateFormat(String date, String type, String format, String formatted) throws IOException { + void verifyDateFormat(String date, String type, String format, String formatted) + throws IOException { String query = String.format("date_format(%s('%s'), '%s')", type, date, format); JSONObject result = executeQuery("select " + query); verifySchema(result, schema(query, null, "keyword")); @@ -1120,10 +1173,11 @@ void verifyDateFormat(String date, String type, String format, String formatted) @Test public void testDateFormat() throws IOException { String timestamp = "1998-01-31 13:14:15.012345"; - String timestampFormat = "%a %b %c %D %d %e %f %H %h %I %i %j %k %l %M " - + "%m %p %r %S %s %T %% %P"; - String timestampFormatted = "Sat Jan 01 31st 31 31 012345 13 01 01 14 031 13 1 " - + "January 01 PM 01:14:15 PM 15 15 13:14:15 % P"; + String timestampFormat = + "%a %b %c %D %d %e %f %H %h %I %i %j %k %l %M " + "%m %p %r %S %s %T %% %P"; + String timestampFormatted = + "Sat Jan 01 31st 31 31 012345 13 01 01 14 031 13 1 " + + "January 01 PM 01:14:15 PM 15 15 13:14:15 % P"; verifyDateFormat(timestamp, "timestamp", timestampFormat, timestampFormatted); String date = "1998-01-31"; @@ -1134,9 +1188,10 @@ public void testDateFormat() throws IOException { @Test public void testMakeTime() throws IOException { - var result = executeQuery( - "select MAKETIME(20, 30, 40) as f1, MAKETIME(20.2, 49.5, 42.100502) as f2"); - verifySchema(result, + var result = + executeQuery("select MAKETIME(20, 30, 40) as f1, MAKETIME(20.2, 49.5, 42.100502) as f2"); + verifySchema( + result, schema("MAKETIME(20, 30, 40)", "f1", "time"), schema("MAKETIME(20.2, 49.5, 42.100502)", "f2", "time")); verifyDataRows(result, rows("20:30:40", "20:50:42.100502")); @@ -1144,9 +1199,9 @@ public void testMakeTime() throws IOException { @Test public void testMakeDate() throws IOException { - var result = executeQuery( - "select MAKEDATE(1945, 5.9) as f1, MAKEDATE(1984, 1984) as f2"); - verifySchema(result, + var result = executeQuery("select MAKEDATE(1945, 5.9) as f1, MAKEDATE(1984, 1984) as f2"); + verifySchema( + result, schema("MAKEDATE(1945, 5.9)", "f1", "date"), schema("MAKEDATE(1984, 1984)", "f2", "date")); verifyDataRows(result, rows("1945-01-06", "1989-06-06")); @@ -1154,30 +1209,35 @@ public void testMakeDate() throws IOException { @Test public void testFromUnixTime() throws IOException { - var result = executeQuery( - "select FROM_UNIXTIME(200300400) f1, FROM_UNIXTIME(12224.12) f2, " - + "FROM_UNIXTIME(1662601316, '%T') f3"); - verifySchema(result, - schema("FROM_UNIXTIME(200300400)", "f1", "datetime"), + var result = + executeQuery( + "select FROM_UNIXTIME(200300400) f1, FROM_UNIXTIME(12224.12) f2, " + + "FROM_UNIXTIME(1662601316, '%T') f3"); + verifySchema( + result, + schema("FROM_UNIXTIME(200300400)", "f1", "datetime"), schema("FROM_UNIXTIME(12224.12)", "f2", "datetime"), schema("FROM_UNIXTIME(1662601316, '%T')", "f3", "keyword")); - verifySome(result.getJSONArray("datarows"), + verifySome( + result.getJSONArray("datarows"), rows("1976-05-07 07:00:00", "1970-01-01 03:23:44.12", "01:41:56")); } @Test - public void testGetFormatAsArgument() throws IOException{ + public void testGetFormatAsArgument() throws IOException { var result = executeQuery("SELECT DATE_FORMAT('2003-10-03',GET_FORMAT(DATE,'USA'))"); verifyDataRows(result, rows("10.03.2003")); } @Test public void testUnixTimeStamp() throws IOException { - var result = executeQuery( - "select UNIX_TIMESTAMP(MAKEDATE(1984, 1984)) f1, " - + "UNIX_TIMESTAMP(TIMESTAMP('2003-12-31 12:00:00')) f2, " - + "UNIX_TIMESTAMP(20771122143845) f3"); - verifySchema(result, + var result = + executeQuery( + "select UNIX_TIMESTAMP(MAKEDATE(1984, 1984)) f1, " + + "UNIX_TIMESTAMP(TIMESTAMP('2003-12-31 12:00:00')) f2, " + + "UNIX_TIMESTAMP(20771122143845) f3"); + verifySchema( + result, schema("UNIX_TIMESTAMP(MAKEDATE(1984, 1984))", "f1", "double"), schema("UNIX_TIMESTAMP(TIMESTAMP('2003-12-31 12:00:00'))", "f2", "double"), schema("UNIX_TIMESTAMP(20771122143845)", "f3", "double")); @@ -1186,9 +1246,9 @@ public void testUnixTimeStamp() throws IOException { @Test public void testPeriodAdd() throws IOException { - var result = executeQuery( - "select PERIOD_ADD(200801, 2) as f1, PERIOD_ADD(200801, -12) as f2"); - verifySchema(result, + var result = executeQuery("select PERIOD_ADD(200801, 2) as f1, PERIOD_ADD(200801, -12) as f2"); + verifySchema( + result, schema("PERIOD_ADD(200801, 2)", "f1", "integer"), schema("PERIOD_ADD(200801, -12)", "f2", "integer")); verifyDataRows(result, rows(200803, 200701)); @@ -1196,57 +1256,103 @@ public void testPeriodAdd() throws IOException { @Test public void testPeriodDiff() throws IOException { - var result = executeQuery( - "select PERIOD_DIFF(200802, 200703) as f1, PERIOD_DIFF(200802, 201003) as f2"); - verifySchema(result, + var result = + executeQuery("select PERIOD_DIFF(200802, 200703) as f1, PERIOD_DIFF(200802, 201003) as f2"); + verifySchema( + result, schema("PERIOD_DIFF(200802, 200703)", "f1", "integer"), schema("PERIOD_DIFF(200802, 201003)", "f2", "integer")); verifyDataRows(result, rows(11, -25)); } public void testAddTime() throws IOException { - var result = executeQuery("SELECT" - + " ADDTIME(DATE('2008-12-12'), DATE('2008-11-15')) AS `'2008-12-12' + 0`," - + " ADDTIME(TIME('23:59:59'), DATE('2004-01-01')) AS `'23:59:59' + 0`," - + " ADDTIME(DATE('2004-01-01'), TIME('23:59:59')) AS `'2004-01-01' + '23:59:59'`," - + " ADDTIME(TIME('10:20:30'), TIME('00:05:42')) AS `'10:20:30' + '00:05:42'`," - + " ADDTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00')) AS `'15:42:13' + '09:07:00'`"); - verifySchema(result, + var result = + executeQuery( + "SELECT ADDTIME(DATE('2008-12-12'), DATE('2008-11-15')) AS `'2008-12-12' + 0`," + + " ADDTIME(TIME('23:59:59'), DATE('2004-01-01')) AS `'23:59:59' + 0`," + + " ADDTIME(DATE('2004-01-01'), TIME('23:59:59')) AS `'2004-01-01' + '23:59:59'`," + + " ADDTIME(TIME('10:20:30'), TIME('00:05:42')) AS `'10:20:30' + '00:05:42'`," + + " ADDTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00')) AS" + + " `'15:42:13' + '09:07:00'`"); + verifySchema( + result, schema("ADDTIME(DATE('2008-12-12'), DATE('2008-11-15'))", "'2008-12-12' + 0", "datetime"), schema("ADDTIME(TIME('23:59:59'), DATE('2004-01-01'))", "'23:59:59' + 0", "time"), - schema("ADDTIME(DATE('2004-01-01'), TIME('23:59:59'))", "'2004-01-01' + '23:59:59'", "datetime"), + schema( + "ADDTIME(DATE('2004-01-01'), TIME('23:59:59'))", + "'2004-01-01' + '23:59:59'", + "datetime"), schema("ADDTIME(TIME('10:20:30'), TIME('00:05:42'))", "'10:20:30' + '00:05:42'", "time"), - schema("ADDTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00'))", "'15:42:13' + '09:07:00'", "datetime")); - verifyDataRows(result, rows("2008-12-12 00:00:00", "23:59:59", "2004-01-01 23:59:59", "10:26:12", "2000-01-01 00:49:13")); + schema( + "ADDTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00'))", + "'15:42:13' + '09:07:00'", + "datetime")); + verifyDataRows( + result, + rows( + "2008-12-12 00:00:00", + "23:59:59", + "2004-01-01 23:59:59", + "10:26:12", + "2000-01-01 00:49:13")); } @Test public void testSubTime() throws IOException { - var result = executeQuery("SELECT" - + " SUBTIME(DATE('2008-12-12'), DATE('2008-11-15')) AS `'2008-12-12' - 0`," - + " SUBTIME(TIME('23:59:59'), DATE('2004-01-01')) AS `'23:59:59' - 0`," - + " SUBTIME(DATE('2004-01-01'), TIME('23:59:59')) AS `'2004-01-01' - '23:59:59'`," - + " SUBTIME(TIME('10:20:30'), TIME('00:05:42')) AS `'10:20:30' - '00:05:42'`," - + " SUBTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00')) AS `'15:42:13' - '09:07:00'`"); - verifySchema(result, + var result = + executeQuery( + "SELECT SUBTIME(DATE('2008-12-12'), DATE('2008-11-15')) AS `'2008-12-12' - 0`," + + " SUBTIME(TIME('23:59:59'), DATE('2004-01-01')) AS `'23:59:59' - 0`," + + " SUBTIME(DATE('2004-01-01'), TIME('23:59:59')) AS `'2004-01-01' - '23:59:59'`," + + " SUBTIME(TIME('10:20:30'), TIME('00:05:42')) AS `'10:20:30' - '00:05:42'`," + + " SUBTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00')) AS" + + " `'15:42:13' - '09:07:00'`"); + verifySchema( + result, schema("SUBTIME(DATE('2008-12-12'), DATE('2008-11-15'))", "'2008-12-12' - 0", "datetime"), schema("SUBTIME(TIME('23:59:59'), DATE('2004-01-01'))", "'23:59:59' - 0", "time"), - schema("SUBTIME(DATE('2004-01-01'), TIME('23:59:59'))", "'2004-01-01' - '23:59:59'", "datetime"), + schema( + "SUBTIME(DATE('2004-01-01'), TIME('23:59:59'))", + "'2004-01-01' - '23:59:59'", + "datetime"), schema("SUBTIME(TIME('10:20:30'), TIME('00:05:42'))", "'10:20:30' - '00:05:42'", "time"), - schema("SUBTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00'))", "'15:42:13' - '09:07:00'", "datetime")); - verifyDataRows(result, rows("2008-12-12 00:00:00", "23:59:59", "2003-12-31 00:00:01", "10:14:48", "1999-12-31 06:35:13")); + schema( + "SUBTIME(TIMESTAMP('1999-12-31 15:42:13'), DATETIME('1961-04-12 09:07:00'))", + "'15:42:13' - '09:07:00'", + "datetime")); + verifyDataRows( + result, + rows( + "2008-12-12 00:00:00", + "23:59:59", + "2003-12-31 00:00:01", + "10:14:48", + "1999-12-31 06:35:13")); } public void testDateDiff() throws IOException { - var result = executeQuery("SELECT" - + " DATEDIFF(TIMESTAMP('2000-01-02 00:00:00'), TIMESTAMP('2000-01-01 23:59:59')) AS `'2000-01-02' - '2000-01-01'`," - + " DATEDIFF(DATE('2001-02-01'), TIMESTAMP('2004-01-01 00:00:00')) AS `'2001-02-01' - '2004-01-01'`," - + " DATEDIFF(TIMESTAMP('2004-01-01 00:00:00'), DATETIME('2002-02-01 14:25:30')) AS `'2004-01-01' - '2002-02-01'`," - + " DATEDIFF(TIME('23:59:59'), TIME('00:00:00')) AS `today - today`"); - verifySchema(result, - schema("DATEDIFF(TIMESTAMP('2000-01-02 00:00:00'), TIMESTAMP('2000-01-01 23:59:59'))", "'2000-01-02' - '2000-01-01'", "long"), - schema("DATEDIFF(DATE('2001-02-01'), TIMESTAMP('2004-01-01 00:00:00'))", "'2001-02-01' - '2004-01-01'", "long"), - schema("DATEDIFF(TIMESTAMP('2004-01-01 00:00:00'), DATETIME('2002-02-01 14:25:30'))", "'2004-01-01' - '2002-02-01'", "long"), + var result = + executeQuery( + "SELECT DATEDIFF(TIMESTAMP('2000-01-02 00:00:00'), TIMESTAMP('2000-01-01 23:59:59')) AS" + + " `'2000-01-02' - '2000-01-01'`, DATEDIFF(DATE('2001-02-01')," + + " TIMESTAMP('2004-01-01 00:00:00')) AS `'2001-02-01' - '2004-01-01'`," + + " DATEDIFF(TIMESTAMP('2004-01-01 00:00:00'), DATETIME('2002-02-01 14:25:30')) AS" + + " `'2004-01-01' - '2002-02-01'`, DATEDIFF(TIME('23:59:59'), TIME('00:00:00')) AS" + + " `today - today`"); + verifySchema( + result, + schema( + "DATEDIFF(TIMESTAMP('2000-01-02 00:00:00'), TIMESTAMP('2000-01-01 23:59:59'))", + "'2000-01-02' - '2000-01-01'", + "long"), + schema( + "DATEDIFF(DATE('2001-02-01'), TIMESTAMP('2004-01-01 00:00:00'))", + "'2001-02-01' - '2004-01-01'", + "long"), + schema( + "DATEDIFF(TIMESTAMP('2004-01-01 00:00:00'), DATETIME('2002-02-01 14:25:30'))", + "'2004-01-01' - '2002-02-01'", + "long"), schema("DATEDIFF(TIME('23:59:59'), TIME('00:00:00'))", "today - today", "long")); verifyDataRows(result, rows(1, -1064, 699, 0)); } @@ -1258,7 +1364,8 @@ public void testTimeDiff() throws IOException { verifyDataRows(result, rows("10:59:59")); } - void verifyTimeFormat(String time, String type, String format, String formatted) throws IOException { + void verifyTimeFormat(String time, String type, String format, String formatted) + throws IOException { String query = String.format("time_format(%s('%s'), '%s')", type, time, format); JSONObject result = executeQuery("select " + query); verifySchema(result, schema(query, null, "keyword")); @@ -1358,16 +1465,16 @@ public void testBracketedEquivalent() throws IOException { compareBrackets("time", "time", "17:30:00"); compareBrackets("time", "t", "17:30:00"); } - + @Test public void testBracketFails() { - assertThrows(ResponseException.class, ()->executeQuery("select {time '2020-09-16'}")); - assertThrows(ResponseException.class, ()->executeQuery("select {t '2020-09-16'}")); - assertThrows(ResponseException.class, ()->executeQuery("select {date '17:30:00'}")); - assertThrows(ResponseException.class, ()->executeQuery("select {d '17:30:00'}")); - assertThrows(ResponseException.class, ()->executeQuery("select {timestamp '2020-09-16'}")); - assertThrows(ResponseException.class, ()->executeQuery("select {ts '2020-09-16'}")); - assertThrows(ResponseException.class, ()->executeQuery("select {timestamp '17:30:00'}")); - assertThrows(ResponseException.class, ()->executeQuery("select {ts '17:30:00'}")); + assertThrows(ResponseException.class, () -> executeQuery("select {time '2020-09-16'}")); + assertThrows(ResponseException.class, () -> executeQuery("select {t '2020-09-16'}")); + assertThrows(ResponseException.class, () -> executeQuery("select {date '17:30:00'}")); + assertThrows(ResponseException.class, () -> executeQuery("select {d '17:30:00'}")); + assertThrows(ResponseException.class, () -> executeQuery("select {timestamp '2020-09-16'}")); + assertThrows(ResponseException.class, () -> executeQuery("select {ts '2020-09-16'}")); + assertThrows(ResponseException.class, () -> executeQuery("select {timestamp '17:30:00'}")); + assertThrows(ResponseException.class, () -> executeQuery("select {ts '17:30:00'}")); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeImplementationIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeImplementationIT.java index ff2c4c07a6..8ffa1df8f3 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeImplementationIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/DateTimeImplementationIT.java @@ -5,18 +5,16 @@ package org.opensearch.sql.sql; -import org.junit.Test; -import org.opensearch.sql.legacy.SQLIntegTestCase; - -import java.io.IOException; - import static org.opensearch.sql.util.MatcherUtils.rows; import static org.opensearch.sql.util.MatcherUtils.schema; import static org.opensearch.sql.util.MatcherUtils.verifyDataRows; import static org.opensearch.sql.util.MatcherUtils.verifySchema; -public class DateTimeImplementationIT extends SQLIntegTestCase { +import java.io.IOException; +import org.junit.Test; +import org.opensearch.sql.legacy.SQLIntegTestCase; +public class DateTimeImplementationIT extends SQLIntegTestCase { @Override public void init() throws Exception { @@ -26,136 +24,118 @@ public void init() throws Exception { @Test public void inRangeZeroToStringTZ() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2008-12-25 05:30:00+00:00', 'America/Los_Angeles')"); - verifySchema(result, + var result = + executeJdbcRequest("SELECT DATETIME('2008-12-25 05:30:00+00:00', 'America/Los_Angeles')"); + verifySchema( + result, schema("DATETIME('2008-12-25 05:30:00+00:00', 'America/Los_Angeles')", null, "datetime")); verifyDataRows(result, rows("2008-12-24 21:30:00")); } @Test public void inRangeZeroToPositive() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2008-12-25 05:30:00+00:00', '+01:00')"); - verifySchema(result, - schema("DATETIME('2008-12-25 05:30:00+00:00', '+01:00')", null, "datetime")); + var result = executeJdbcRequest("SELECT DATETIME('2008-12-25 05:30:00+00:00', '+01:00')"); + verifySchema( + result, schema("DATETIME('2008-12-25 05:30:00+00:00', '+01:00')", null, "datetime")); verifyDataRows(result, rows("2008-12-25 06:30:00")); } @Test public void inRangeNegativeToPositive() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2008-12-25 05:30:00-05:00', '+05:00')"); - verifySchema(result, - schema("DATETIME('2008-12-25 05:30:00-05:00', '+05:00')", null, "datetime")); + var result = executeJdbcRequest("SELECT DATETIME('2008-12-25 05:30:00-05:00', '+05:00')"); + verifySchema( + result, schema("DATETIME('2008-12-25 05:30:00-05:00', '+05:00')", null, "datetime")); verifyDataRows(result, rows("2008-12-25 15:30:00")); } @Test public void inRangeTwentyHourOffset() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2004-02-28 23:00:00-10:00', '+10:00')"); - verifySchema(result, - schema("DATETIME('2004-02-28 23:00:00-10:00', '+10:00')", null, "datetime")); + var result = executeJdbcRequest("SELECT DATETIME('2004-02-28 23:00:00-10:00', '+10:00')"); + verifySchema( + result, schema("DATETIME('2004-02-28 23:00:00-10:00', '+10:00')", null, "datetime")); verifyDataRows(result, rows("2004-02-29 19:00:00")); } @Test public void inRangeYearChange() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2008-01-01 02:00:00+10:00', '-10:00')"); - verifySchema(result, - schema("DATETIME('2008-01-01 02:00:00+10:00', '-10:00')", null, "datetime")); + var result = executeJdbcRequest("SELECT DATETIME('2008-01-01 02:00:00+10:00', '-10:00')"); + verifySchema( + result, schema("DATETIME('2008-01-01 02:00:00+10:00', '-10:00')", null, "datetime")); verifyDataRows(result, rows("2007-12-31 06:00:00")); } @Test public void inRangeZeroNoToTZ() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2008-01-01 02:00:00+10:00')"); - verifySchema(result, - schema("DATETIME('2008-01-01 02:00:00+10:00')", null, "datetime")); + var result = executeJdbcRequest("SELECT DATETIME('2008-01-01 02:00:00+10:00')"); + verifySchema(result, schema("DATETIME('2008-01-01 02:00:00+10:00')", null, "datetime")); verifyDataRows(result, rows("2008-01-01 02:00:00")); } @Test public void inRangeZeroNoTZ() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2008-01-01 02:00:00')"); - verifySchema(result, - schema("DATETIME('2008-01-01 02:00:00')", null, "datetime")); + var result = executeJdbcRequest("SELECT DATETIME('2008-01-01 02:00:00')"); + verifySchema(result, schema("DATETIME('2008-01-01 02:00:00')", null, "datetime")); verifyDataRows(result, rows("2008-01-01 02:00:00")); } @Test public void inRangeZeroDayConvert() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2008-01-01 02:00:00+12:00', '-12:00')"); - verifySchema(result, - schema("DATETIME('2008-01-01 02:00:00+12:00', '-12:00')", null, "datetime")); + var result = executeJdbcRequest("SELECT DATETIME('2008-01-01 02:00:00+12:00', '-12:00')"); + verifySchema( + result, schema("DATETIME('2008-01-01 02:00:00+12:00', '-12:00')", null, "datetime")); verifyDataRows(result, rows("2007-12-31 02:00:00")); } @Test public void inRangeJustInRangeNegative() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2008-01-01 02:00:00+10:00', '-13:59')"); - verifySchema(result, - schema("DATETIME('2008-01-01 02:00:00+10:00', '-13:59')", null, "datetime")); + var result = executeJdbcRequest("SELECT DATETIME('2008-01-01 02:00:00+10:00', '-13:59')"); + verifySchema( + result, schema("DATETIME('2008-01-01 02:00:00+10:00', '-13:59')", null, "datetime")); verifyDataRows(result, rows("2007-12-31 02:01:00")); } @Test public void inRangeJustInRangePositive() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2008-01-01 02:00:00+14:00', '-10:00')"); - verifySchema(result, - schema("DATETIME('2008-01-01 02:00:00+14:00', '-10:00')", null, "datetime")); + var result = executeJdbcRequest("SELECT DATETIME('2008-01-01 02:00:00+14:00', '-10:00')"); + verifySchema( + result, schema("DATETIME('2008-01-01 02:00:00+14:00', '-10:00')", null, "datetime")); verifyDataRows(result, rows("2007-12-31 02:00:00")); } @Test public void nullField3Under() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2008-01-01 02:00:00+10:00', '-14:01')"); - verifySchema(result, - schema("DATETIME('2008-01-01 02:00:00+10:00', '-14:01')", null, "datetime")); - verifyDataRows(result, rows(new Object[]{null})); + var result = executeJdbcRequest("SELECT DATETIME('2008-01-01 02:00:00+10:00', '-14:01')"); + verifySchema( + result, schema("DATETIME('2008-01-01 02:00:00+10:00', '-14:01')", null, "datetime")); + verifyDataRows(result, rows(new Object[] {null})); } @Test public void nullField1Over() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2008-01-01 02:00:00+14:01', '-10:00')"); - verifySchema(result, - schema("DATETIME('2008-01-01 02:00:00+14:01', '-10:00')", null, "datetime")); - verifyDataRows(result, rows(new Object[]{null})); + var result = executeJdbcRequest("SELECT DATETIME('2008-01-01 02:00:00+14:01', '-10:00')"); + verifySchema( + result, schema("DATETIME('2008-01-01 02:00:00+14:01', '-10:00')", null, "datetime")); + verifyDataRows(result, rows(new Object[] {null})); } @Test public void nullDateTimeInvalidDateValueFebruary() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2021-02-30 10:00:00')"); - verifySchema(result, - schema("DATETIME('2021-02-30 10:00:00')", null, "datetime")); - verifyDataRows(result, rows(new Object[]{null})); + var result = executeJdbcRequest("SELECT DATETIME('2021-02-30 10:00:00')"); + verifySchema(result, schema("DATETIME('2021-02-30 10:00:00')", null, "datetime")); + verifyDataRows(result, rows(new Object[] {null})); } @Test public void nullDateTimeInvalidDateValueApril() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2021-04-31 10:00:00')"); - verifySchema(result, - schema("DATETIME('2021-04-31 10:00:00')", null, "datetime")); - verifyDataRows(result, rows(new Object[]{null})); + var result = executeJdbcRequest("SELECT DATETIME('2021-04-31 10:00:00')"); + verifySchema(result, schema("DATETIME('2021-04-31 10:00:00')", null, "datetime")); + verifyDataRows(result, rows(new Object[] {null})); } @Test public void nullDateTimeInvalidDateValueMonth() throws IOException { - var result = executeJdbcRequest( - "SELECT DATETIME('2021-13-03 10:00:00')"); - verifySchema(result, - schema("DATETIME('2021-13-03 10:00:00')", null, "datetime")); - verifyDataRows(result, rows(new Object[]{null})); + var result = executeJdbcRequest("SELECT DATETIME('2021-13-03 10:00:00')"); + verifySchema(result, schema("DATETIME('2021-13-03 10:00:00')", null, "datetime")); + verifyDataRows(result, rows(new Object[] {null})); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/ExpressionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/ExpressionIT.java index 30211366b1..be1471641e 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/ExpressionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/ExpressionIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.hamcrest.Matchers.is; @@ -23,15 +22,14 @@ import org.opensearch.sql.legacy.RestIntegTestCase; /** - * Integration test for different type of expressions such as literals, arithmetic, predicate - * and function expression. Since comparison test in {@link SQLCorrectnessIT} is enforced, - * this kind of manual written IT class will be focused on anomaly case test. + * Integration test for different type of expressions such as literals, arithmetic, predicate and + * function expression. Since comparison test in {@link SQLCorrectnessIT} is enforced, this kind of + * manual written IT class will be focused on anomaly case test. */ @Ignore public class ExpressionIT extends RestIntegTestCase { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); + @Rule public ExpectedException exceptionRule = ExpectedException.none(); @Override protected void init() throws Exception { @@ -44,8 +42,7 @@ public ResponseExceptionAssertion expectResponseException() { /** * Response exception assertion helper to assert property value in OpenSearch ResponseException - * and Response inside. This serves as syntax sugar to improve the readability of test - * code. + * and Response inside. This serves as syntax sugar to improve the readability of test code. */ private static class ResponseExceptionAssertion { private final ExpectedException exceptionRule; @@ -57,9 +54,12 @@ private ResponseExceptionAssertion(ExpectedException exceptionRule) { } ResponseExceptionAssertion hasStatusCode(int expected) { - exceptionRule.expect(featureValueOf("statusCode", is(expected), - (Function) e -> - e.getResponse().getStatusLine().getStatusCode())); + exceptionRule.expect( + featureValueOf( + "statusCode", + is(expected), + (Function) + e -> e.getResponse().getStatusLine().getStatusCode())); return this; } @@ -83,5 +83,4 @@ private static Response executeQuery(String query) throws IOException { return client().performRequest(request); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/HighlightFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/HighlightFunctionIT.java index 0ab6d5c70f..d0f890526b 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/HighlightFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/HighlightFunctionIT.java @@ -11,12 +11,12 @@ import static org.opensearch.sql.util.MatcherUtils.verifySchema; import com.google.common.collect.ImmutableMap; +import java.util.List; import org.json.JSONArray; import org.json.JSONObject; import org.junit.Test; import org.opensearch.sql.legacy.SQLIntegTestCase; import org.opensearch.sql.legacy.TestsConstants; -import java.util.List; public class HighlightFunctionIT extends SQLIntegTestCase { @@ -30,90 +30,126 @@ public void single_highlight_test() { String query = "SELECT Tags, highlight('Tags') FROM %s WHERE match(Tags, 'yeast') LIMIT 1"; JSONObject response = executeJdbcRequest(String.format(query, TestsConstants.TEST_INDEX_BEER)); - verifySchema(response, schema("Tags", null, "text"), - schema("highlight('Tags')", null, "nested")); + verifySchema( + response, schema("Tags", null, "text"), schema("highlight('Tags')", null, "nested")); assertEquals(1, response.getInt("total")); - verifyDataRows(response, - rows("alcohol-level yeast home-brew champagne", + verifyDataRows( + response, + rows( + "alcohol-level yeast home-brew champagne", new JSONArray(List.of("alcohol-level yeast home-brew champagne")))); } @Test public void highlight_optional_arguments_test() { - String query = "SELECT highlight('Tags', pre_tags='', post_tags='') " + - "FROM %s WHERE match(Tags, 'yeast') LIMIT 1"; + String query = + "SELECT highlight('Tags', pre_tags='', post_tags='') " + + "FROM %s WHERE match(Tags, 'yeast') LIMIT 1"; JSONObject response = executeJdbcRequest(String.format(query, TestsConstants.TEST_INDEX_BEER)); - verifySchema(response, schema("highlight('Tags', pre_tags='', post_tags='')", - null, "nested")); + verifySchema( + response, + schema("highlight('Tags', pre_tags='', post_tags='')", null, "nested")); assertEquals(1, response.getInt("total")); - verifyDataRows(response, + verifyDataRows( + response, rows(new JSONArray(List.of("alcohol-level yeast home-brew champagne")))); } @Test public void highlight_multiple_optional_arguments_test() { - String query = "SELECT highlight(Title), highlight(Body, pre_tags='', post_tags='') FROM %s WHERE multi_match([Title, Body], 'IPA') LIMIT 1"; + String query = + "SELECT highlight(Title), highlight(Body, pre_tags='', post_tags='') FROM %s WHERE multi_match([Title, Body], 'IPA')" + + " LIMIT 1"; JSONObject response = executeJdbcRequest(String.format(query, TestsConstants.TEST_INDEX_BEER)); - verifySchema(response, schema("highlight(Title)", null, "nested"), - schema("highlight(Body, pre_tags='', " + - "post_tags='')", null, "nested")); + verifySchema( + response, + schema("highlight(Title)", null, "nested"), + schema( + "highlight(Body, pre_tags='', " + + "post_tags='')", + null, + "nested")); assertEquals(1, response.getInt("size")); - verifyDataRows(response, rows(new JSONArray(List.of("What are the differences between an IPA" + - " and its variants?")), - new JSONArray(List.of("

I know what makes an IPA" + - " an IPA, but what are the unique characteristics of it's" + - " common variants?", - "To be specific, the ones I'm interested in are Double IPA " + - "and Black IPA, but general differences" + - " between")))); + verifyDataRows( + response, + rows( + new JSONArray( + List.of("What are the differences between an IPA" + " and its variants?")), + new JSONArray( + List.of( + "

I know what makes an IPA an" + + " IPA, but what are the" + + " unique characteristics of it's common variants?", + "To be specific, the ones I'm interested in are Double IPA and Black IPA, but general differences" + + " between")))); } @Test public void multiple_highlight_test() { - String query = "SELECT highlight(Title), highlight(Tags) FROM %s WHERE MULTI_MATCH([Title, Tags], 'hops') LIMIT 1"; + String query = + "SELECT highlight(Title), highlight(Tags) FROM %s WHERE MULTI_MATCH([Title, Tags], 'hops')" + + " LIMIT 1"; JSONObject response = executeJdbcRequest(String.format(query, TestsConstants.TEST_INDEX_BEER)); - verifySchema(response, schema("highlight(Title)", null, "nested"), + verifySchema( + response, + schema("highlight(Title)", null, "nested"), schema("highlight(Tags)", null, "nested")); assertEquals(1, response.getInt("total")); - verifyDataRows(response, - rows( new JSONArray(List.of("What uses do hops have outside of brewing?")), + verifyDataRows( + response, + rows( + new JSONArray(List.of("What uses do hops have outside of brewing?")), new JSONArray(List.of("hops history")))); } @Test public void wildcard_highlight_test() { - String query = "SELECT highlight('*itle') FROM %s WHERE MULTI_MATCH([Title, Tags], 'hops') LIMIT 1"; + String query = + "SELECT highlight('*itle') FROM %s WHERE MULTI_MATCH([Title, Tags], 'hops') LIMIT 1"; JSONObject response = executeJdbcRequest(String.format(query, TestsConstants.TEST_INDEX_BEER)); verifySchema(response, schema("highlight('*itle')", null, "object")); assertEquals(1, response.getInt("total")); - verifyDataRows(response, rows(new JSONObject(ImmutableMap.of( - "Title", new JSONArray(List.of("What uses do hops have outside of brewing?")))))); + verifyDataRows( + response, + rows( + new JSONObject( + ImmutableMap.of( + "Title", + new JSONArray( + List.of("What uses do hops have outside of brewing?")))))); } @Test public void wildcard_multi_field_highlight_test() { - String query = "SELECT highlight('T*') FROM %s WHERE MULTI_MATCH([Title, Tags], 'hops') LIMIT 1"; + String query = + "SELECT highlight('T*') FROM %s WHERE MULTI_MATCH([Title, Tags], 'hops') LIMIT 1"; JSONObject response = executeJdbcRequest(String.format(query, TestsConstants.TEST_INDEX_BEER)); verifySchema(response, schema("highlight('T*')", null, "object")); assertEquals(1, response.getInt("total")); - verifyDataRows(response, rows(new JSONObject(ImmutableMap.of( - "Title", new JSONArray(List.of("What uses do hops have outside of brewing?")), - "Tags", new JSONArray(List.of("hops history")))))); + verifyDataRows( + response, + rows( + new JSONObject( + ImmutableMap.of( + "Title", + new JSONArray( + List.of("What uses do hops have outside of brewing?")), + "Tags", new JSONArray(List.of("hops history")))))); } @Test @@ -124,9 +160,15 @@ public void highlight_all_test() { verifySchema(response, schema("highlight('*')", null, "object")); assertEquals(1, response.getInt("total")); - verifyDataRows(response, rows(new JSONObject(ImmutableMap.of( - "Title", new JSONArray(List.of("What uses do hops have outside of brewing?")), - "Tags", new JSONArray(List.of("hops history")))))); + verifyDataRows( + response, + rows( + new JSONObject( + ImmutableMap.of( + "Title", + new JSONArray( + List.of("What uses do hops have outside of brewing?")), + "Tags", new JSONArray(List.of("hops history")))))); } @Test @@ -136,14 +178,23 @@ public void highlight_no_limit_test() { verifySchema(response, schema("highlight(Body)", null, "nested")); assertEquals(2, response.getInt("total")); - verifyDataRows(response, rows(new JSONArray(List.of("Boiling affects hops, by boiling" + - " off the aroma and extracting more of the organic acids that provide"))), - - rows(new JSONArray(List.of("

Do hops have (or had in the past) any use outside of brewing beer?", - "when-was-the-first-beer-ever-brewed\">dating first modern beers we have the first record" + - " of cultivating hops", - "predating the first record of use of hops in beer by nearly a century.", - "Could the hops have been cultivated for any other purpose than brewing, " + - "or can we safely assume if they")))); + verifyDataRows( + response, + rows( + new JSONArray( + List.of( + "Boiling affects hops, by boiling" + + " off the aroma and extracting more of the organic acids that provide"))), + rows( + new JSONArray( + List.of( + "

Do hops have (or had in the past) any use outside of brewing" + + " beer?", + "when-was-the-first-beer-ever-brewed\">dating first modern beers we have" + + " the first record of cultivating hops", + "predating the first record of use of hops in beer by nearly a" + + " century.", + "Could the hops have been cultivated for any other purpose than" + + " brewing, or can we safely assume if they")))); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/IdentifierIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/IdentifierIT.java index 22632cc4de..2c1796f0c3 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/IdentifierIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/IdentifierIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.util.MatcherUtils.rows; @@ -14,16 +13,12 @@ import static org.opensearch.sql.util.TestUtils.performRequest; import java.io.IOException; -import java.util.ArrayList; -import org.json.JSONArray; import org.json.JSONObject; import org.junit.jupiter.api.Test; import org.opensearch.client.Request; import org.opensearch.sql.legacy.SQLIntegTestCase; -/** - * Integration tests for identifiers including index and field name symbol. - */ +/** Integration tests for identifiers including index and field name symbol. */ public class IdentifierIT extends SQLIntegTestCase { @Test @@ -48,12 +43,13 @@ public void testQuotedIndexNames() throws IOException { @Test public void testSpecialFieldName() throws IOException { - new Index("test") - .addDoc("{\"@timestamp\": 10, \"dimensions:major_version\": 30}"); - final JSONObject result = new JSONObject(executeQuery("SELECT @timestamp, " - + "`dimensions:major_version` FROM test", "jdbc")); + new Index("test").addDoc("{\"@timestamp\": 10, \"dimensions:major_version\": 30}"); + final JSONObject result = + new JSONObject( + executeQuery("SELECT @timestamp, " + "`dimensions:major_version` FROM test", "jdbc")); - verifySchema(result, + verifySchema( + result, schema("@timestamp", null, "long"), schema("dimensions:major_version", null, "long")); verifyDataRows(result, rows(10, 30)); @@ -68,12 +64,11 @@ public void testMultipleQueriesWithSpecialIndexNames() throws IOException { @Test public void testDoubleUnderscoreIdentifierTest() throws IOException { - new Index("test.twounderscores") - .addDoc("{\"__age\": 30}"); - final JSONObject result = new JSONObject(executeQuery("SELECT __age FROM test.twounderscores", "jdbc")); + new Index("test.twounderscores").addDoc("{\"__age\": 30}"); + final JSONObject result = + new JSONObject(executeQuery("SELECT __age FROM test.twounderscores", "jdbc")); - verifySchema(result, - schema("__age", null, "long")); + verifySchema(result, schema("__age", null, "long")); verifyDataRows(result, rows(30)); } @@ -85,19 +80,20 @@ public void testMetafieldIdentifierTest() throws IOException { new Index(index).addDoc("{\"age\": 30}", id); // Execute using field metadata values - final JSONObject result = new JSONObject(executeQuery( - "SELECT *, _id, _index, _score, _maxscore, _sort " - + "FROM " + index, - "jdbc")); + final JSONObject result = + new JSONObject( + executeQuery( + "SELECT *, _id, _index, _score, _maxscore, _sort " + "FROM " + index, "jdbc")); // Verify that the metadata values are returned when requested - verifySchema(result, - schema("age", null, "long"), - schema("_id", null, "keyword"), - schema("_index", null, "keyword"), - schema("_score", null, "float"), - schema("_maxscore", null, "float"), - schema("_sort", null, "long")); + verifySchema( + result, + schema("age", null, "long"), + schema("_id", null, "keyword"), + schema("_index", null, "keyword"), + schema("_score", null, "float"), + schema("_maxscore", null, "float"), + schema("_sort", null, "long")); verifyDataRows(result, rows(30, id, index, 1.0, 1.0, -2)); } @@ -115,13 +111,13 @@ public void testMetafieldIdentifierRoutingSelectTest() throws IOException { .addDocWithShardId("{\"age\": 35}", "test5", "test5"); // Execute using field metadata values filtering on the routing shard hash id - final JSONObject result = new JSONObject(executeQuery( - "SELECT age, _id, _index, _routing " - + "FROM " + index, - "jdbc")); + final JSONObject result = + new JSONObject( + executeQuery("SELECT age, _id, _index, _routing " + "FROM " + index, "jdbc")); // Verify that the metadata values are returned when requested - verifySchema(result, + verifySchema( + result, schema("age", null, "long"), schema("_id", null, "keyword"), schema("_index", null, "keyword"), @@ -153,14 +149,19 @@ public void testMetafieldIdentifierRoutingFilterTest() throws IOException { .addDocWithShardId("{\"age\": 36}", "test6", "test6"); // Execute using field metadata values filtering on the routing shard hash id - final JSONObject result = new JSONObject(executeQuery( - "SELECT _id, _index, _routing " - + "FROM " + index + " " - + "WHERE _routing = \\\"test4\\\"", - "jdbc")); + final JSONObject result = + new JSONObject( + executeQuery( + "SELECT _id, _index, _routing " + + "FROM " + + index + + " " + + "WHERE _routing = \\\"test4\\\"", + "jdbc")); // Verify that the metadata values are returned when requested - verifySchema(result, + verifySchema( + result, schema("_id", null, "keyword"), schema("_index", null, "keyword"), schema("_routing", null, "keyword")); @@ -172,7 +173,6 @@ public void testMetafieldIdentifierRoutingFilterTest() throws IOException { assertEquals("test4", datarows.getJSONArray(0).getString(0)); // note that _routing in the SELECT clause returns the shard, not the routing hash id assertTrue(datarows.getJSONArray(0).getString(2).contains("[" + index + "]")); - } @Test @@ -183,14 +183,21 @@ public void testMetafieldIdentifierWithAliasTest() throws IOException { new Index(index).addDoc("{\"age\": 30}", id); // Execute using field metadata values - final JSONObject result = new JSONObject(executeQuery( - "SELECT _id AS A, _index AS B, _score AS C, _maxscore AS D, _sort AS E " - + "FROM " + index + " " - + "WHERE _id = \\\"" + id + "\\\"", - "jdbc")); + final JSONObject result = + new JSONObject( + executeQuery( + "SELECT _id AS A, _index AS B, _score AS C, _maxscore AS D, _sort AS E " + + "FROM " + + index + + " " + + "WHERE _id = \\\"" + + id + + "\\\"", + "jdbc")); // Verify that the metadata values are returned when requested - verifySchema(result, + verifySchema( + result, schema("_id", "A", "keyword"), schema("_index", "B", "keyword"), schema("_score", "C", "float"), @@ -211,9 +218,7 @@ private void queryAndAssertTheDoc(String sql) { verifyDataRows(result, rows(30)); } - /** - * Index abstraction for test code readability. - */ + /** Index abstraction for test code readability. */ private static class Index { private final String indexName; @@ -243,18 +248,20 @@ void addDoc(String doc) { } public Index addDoc(String doc, String id) { - Request indexDoc = new Request("POST", String.format("/%s/_doc/%s?refresh=true", indexName, id)); + Request indexDoc = + new Request("POST", String.format("/%s/_doc/%s?refresh=true", indexName, id)); indexDoc.setJsonEntity(doc); performRequest(client(), indexDoc); return this; } public Index addDocWithShardId(String doc, String id, String routing) { - Request indexDoc = new Request("POST", String.format("/%s/_doc/%s?refresh=true&routing=%s", indexName, id, routing)); + Request indexDoc = + new Request( + "POST", String.format("/%s/_doc/%s?refresh=true&routing=%s", indexName, id, routing)); indexDoc.setJsonEntity(doc); performRequest(client(), indexDoc); return this; } } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/JdbcFormatIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/JdbcFormatIT.java index 4b158d73df..f36992b1d0 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/JdbcFormatIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/JdbcFormatIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; @@ -23,11 +22,16 @@ protected void init() throws Exception { @Test public void testSimpleDataTypesInSchema() { - JSONObject response = new JSONObject(executeQuery( - "SELECT account_number, address, age, birthdate, city, male, state " - + "FROM " + TEST_INDEX_BANK, "jdbc")); - - verifySchema(response, + JSONObject response = + new JSONObject( + executeQuery( + "SELECT account_number, address, age, birthdate, city, male, state " + + "FROM " + + TEST_INDEX_BANK, + "jdbc")); + + verifySchema( + response, schema("account_number", "long"), schema("address", "text"), schema("age", "integer"), @@ -39,10 +43,10 @@ public void testSimpleDataTypesInSchema() { @Test public void testAliasInSchema() { - JSONObject response = new JSONObject(executeQuery( - "SELECT account_number AS acc FROM " + TEST_INDEX_BANK, "jdbc")); + JSONObject response = + new JSONObject( + executeQuery("SELECT account_number AS acc FROM " + TEST_INDEX_BANK, "jdbc")); verifySchema(response, schema("account_number", "acc", "long")); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/LegacyAPICompatibilityIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/LegacyAPICompatibilityIT.java index adc40a24ec..e9c0fd2c55 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/LegacyAPICompatibilityIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/LegacyAPICompatibilityIT.java @@ -17,7 +17,6 @@ import java.io.IOException; import org.json.JSONObject; import org.junit.Assert; -import org.junit.Ignore; import org.junit.Test; import org.opensearch.client.Request; import org.opensearch.client.RequestOptions; @@ -25,9 +24,7 @@ import org.opensearch.sql.legacy.SQLIntegTestCase; import org.opensearch.sql.legacy.utils.StringUtils; -/** - * For backward compatibility, check if legacy API endpoints are accessible. - */ +/** For backward compatibility, check if legacy API endpoints are accessible. */ public class LegacyAPICompatibilityIT extends SQLIntegTestCase { @Override @@ -57,8 +54,8 @@ public void explain() throws IOException { @Test public void closeCursor() throws IOException { - String sql = StringUtils.format( - "SELECT firstname FROM %s WHERE balance > 100", TEST_INDEX_ACCOUNT); + String sql = + StringUtils.format("SELECT firstname FROM %s WHERE balance > 100", TEST_INDEX_ACCOUNT); JSONObject result = new JSONObject(executeFetchQuery(sql, 50, "jdbc")); Request request = new Request("POST", LEGACY_CURSOR_CLOSE_ENDPOINT); @@ -77,44 +74,36 @@ public void stats() throws IOException { @Test public void legacySettingsLegacyEndpoint() throws IOException { - String requestBody = "{" - + " \"persistent\": {" - + " \"opendistro.sql.query.slowlog\": \"10\"" - + " }" - + "}"; + String requestBody = + "{" + " \"persistent\": {" + " \"opendistro.sql.query.slowlog\": \"10\"" + " }" + "}"; Response response = updateSetting(LEGACY_SQL_SETTINGS_API_ENDPOINT, requestBody); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); } @Test public void legacySettingNewEndpoint() throws IOException { - String requestBody = "{" - + " \"persistent\": {" - + " \"opendistro.query.size_limit\": \"100\"" - + " }" - + "}"; + String requestBody = + "{" + " \"persistent\": {" + " \"opendistro.query.size_limit\": \"100\"" + " }" + "}"; Response response = updateSetting(SETTINGS_API_ENDPOINT, requestBody); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); } @Test public void newSettingsLegacyEndpoint() throws IOException { - String requestBody = "{" - + " \"persistent\": {" - + " \"plugins.sql.slowlog\": \"10\"" - + " }" - + "}"; + String requestBody = + "{" + " \"persistent\": {" + " \"plugins.sql.slowlog\": \"10\"" + " }" + "}"; Response response = updateSetting(LEGACY_SQL_SETTINGS_API_ENDPOINT, requestBody); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); } @Test public void newSettingNewEndpoint() throws IOException { - String requestBody = "{" - + " \"persistent\": {" - + " \"plugins.query.metrics.rolling_interval\": \"80\"" - + " }" - + "}"; + String requestBody = + "{" + + " \"persistent\": {" + + " \"plugins.query.metrics.rolling_interval\": \"80\"" + + " }" + + "}"; Response response = updateSetting(SETTINGS_API_ENDPOINT, requestBody); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); } @@ -131,5 +120,4 @@ private RequestOptions.Builder buildJsonOption() { restOptionsBuilder.addHeader("Content-Type", "application/json"); return restOptionsBuilder; } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/LikeQueryIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/LikeQueryIT.java index f0e82adb6f..c5ff50898a 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/LikeQueryIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/LikeQueryIT.java @@ -3,19 +3,17 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; -import org.json.JSONObject; -import org.junit.Test; -import org.opensearch.sql.legacy.SQLIntegTestCase; - -import java.io.IOException; - import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_WILDCARD; import static org.opensearch.sql.util.MatcherUtils.rows; import static org.opensearch.sql.util.MatcherUtils.verifyDataRows; +import java.io.IOException; +import org.json.JSONObject; +import org.junit.Test; +import org.opensearch.sql.legacy.SQLIntegTestCase; + public class LikeQueryIT extends SQLIntegTestCase { @Override protected void init() throws Exception { @@ -24,9 +22,11 @@ protected void init() throws Exception { @Test public void test_like_in_select() throws IOException { - String query = "SELECT KeywordBody, KeywordBody LIKE 'test wildcard%' FROM " + TEST_INDEX_WILDCARD; + String query = + "SELECT KeywordBody, KeywordBody LIKE 'test wildcard%' FROM " + TEST_INDEX_WILDCARD; JSONObject result = executeJdbcRequest(query); - verifyDataRows(result, + verifyDataRows( + result, rows("test wildcard", true), rows("test wildcard in the end of the text%", true), rows("%test wildcard in the beginning of the text", false), @@ -41,9 +41,11 @@ public void test_like_in_select() throws IOException { @Test public void test_like_in_select_with_escaped_percent() throws IOException { - String query = "SELECT KeywordBody, KeywordBody LIKE '\\\\%test wildcard%' FROM " + TEST_INDEX_WILDCARD; + String query = + "SELECT KeywordBody, KeywordBody LIKE '\\\\%test wildcard%' FROM " + TEST_INDEX_WILDCARD; JSONObject result = executeJdbcRequest(query); - verifyDataRows(result, + verifyDataRows( + result, rows("test wildcard", false), rows("test wildcard in the end of the text%", false), rows("%test wildcard in the beginning of the text", true), @@ -58,9 +60,11 @@ public void test_like_in_select_with_escaped_percent() throws IOException { @Test public void test_like_in_select_with_escaped_underscore() throws IOException { - String query = "SELECT KeywordBody, KeywordBody LIKE '\\\\_test wildcard%' FROM " + TEST_INDEX_WILDCARD; + String query = + "SELECT KeywordBody, KeywordBody LIKE '\\\\_test wildcard%' FROM " + TEST_INDEX_WILDCARD; JSONObject result = executeJdbcRequest(query); - verifyDataRows(result, + verifyDataRows( + result, rows("test wildcard", false), rows("test wildcard in the end of the text%", false), rows("%test wildcard in the beginning of the text", false), @@ -75,9 +79,13 @@ public void test_like_in_select_with_escaped_underscore() throws IOException { @Test public void test_like_in_where() throws IOException { - String query = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE KeywordBody LIKE 'test wildcard%'"; + String query = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE KeywordBody LIKE 'test wildcard%'"; JSONObject result = executeJdbcRequest(query); - verifyDataRows(result, + verifyDataRows( + result, rows("test wildcard"), rows("test wildcard in the end of the text%"), rows("test wildcard in % the middle of the text"), @@ -89,18 +97,22 @@ public void test_like_in_where() throws IOException { @Test public void test_like_in_where_with_escaped_percent() throws IOException { - String query = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE KeywordBody LIKE '\\\\%test wildcard%'"; + String query = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE KeywordBody LIKE '\\\\%test wildcard%'"; JSONObject result = executeJdbcRequest(query); - verifyDataRows(result, - rows("%test wildcard in the beginning of the text")); + verifyDataRows(result, rows("%test wildcard in the beginning of the text")); } @Test public void test_like_in_where_with_escaped_underscore() throws IOException { - String query = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE KeywordBody LIKE '\\\\_test wildcard%'"; + String query = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE KeywordBody LIKE '\\\\_test wildcard%'"; JSONObject result = executeJdbcRequest(query); - verifyDataRows(result, - rows("_test wildcard in the beginning of the text")); + verifyDataRows(result, rows("_test wildcard in the beginning of the text")); } @Test @@ -119,7 +131,8 @@ public void test_like_on_text_keyword_field_with_one_word() throws IOException { @Test public void test_like_on_text_keyword_field_with_greater_than_one_word() throws IOException { - String query = "SELECT * FROM " + TEST_INDEX_WILDCARD + " WHERE TextKeywordBody LIKE 'test wild*'"; + String query = + "SELECT * FROM " + TEST_INDEX_WILDCARD + " WHERE TextKeywordBody LIKE 'test wild*'"; JSONObject result = executeJdbcRequest(query); assertEquals(7, result.getInt("total")); } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/MatchBoolPrefixIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/MatchBoolPrefixIT.java index 1c959c5460..c81cc8e4f5 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/MatchBoolPrefixIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/MatchBoolPrefixIT.java @@ -23,32 +23,30 @@ public void init() throws IOException { @Test public void query_matches_test() throws IOException { - String query = "SELECT phrase FROM " - + TEST_INDEX_PHRASE + " WHERE match_bool_prefix(phrase, 'quick')"; + String query = + "SELECT phrase FROM " + TEST_INDEX_PHRASE + " WHERE match_bool_prefix(phrase, 'quick')"; var result = new JSONObject(executeQuery(query, "jdbc")); verifySchema(result, schema("phrase", "text")); - verifyDataRows(result, - rows("quick fox"), - rows("quick fox here")); + verifyDataRows(result, rows("quick fox"), rows("quick fox here")); } @Test public void additional_parameters_test() throws IOException { - String query = "SELECT phrase FROM " - + TEST_INDEX_PHRASE + " WHERE match_bool_prefix(phrase, '2 test', minimum_should_match=1, fuzziness=2)"; + String query = + "SELECT phrase FROM " + + TEST_INDEX_PHRASE + + " WHERE match_bool_prefix(phrase, '2 test', minimum_should_match=1, fuzziness=2)"; var result = new JSONObject(executeQuery(query, "jdbc")); verifySchema(result, schema("phrase", "text")); - verifyDataRows(result, - rows("my test"), - rows("my test 2")); + verifyDataRows(result, rows("my test"), rows("my test 2")); } @Test public void no_matches_test() throws IOException { - String query = "SELECT * FROM " - + TEST_INDEX_PHRASE + " WHERE match_bool_prefix(phrase, 'rice')"; + String query = + "SELECT * FROM " + TEST_INDEX_PHRASE + " WHERE match_bool_prefix(phrase, 'rice')"; var result = new JSONObject(executeQuery(query, "jdbc")); assertEquals(0, result.getInt("total")); } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/MatchIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/MatchIT.java index 9885ddfa33..5bde838e19 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/MatchIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/MatchIT.java @@ -29,93 +29,119 @@ public void init() throws IOException { @Test public void match_in_where() throws IOException { - JSONObject result = executeJdbcRequest("SELECT firstname FROM " + TEST_INDEX_ACCOUNT + " WHERE match(lastname, 'Bates')"); + JSONObject result = + executeJdbcRequest( + "SELECT firstname FROM " + TEST_INDEX_ACCOUNT + " WHERE match(lastname, 'Bates')"); verifySchema(result, schema("firstname", "text")); verifyDataRows(result, rows("Nanette")); } @Test public void match_in_having() throws IOException { - JSONObject result = executeJdbcRequest("SELECT lastname FROM " + TEST_INDEX_ACCOUNT + " HAVING match(firstname, 'Nanette')"); + JSONObject result = + executeJdbcRequest( + "SELECT lastname FROM " + TEST_INDEX_ACCOUNT + " HAVING match(firstname, 'Nanette')"); verifySchema(result, schema("lastname", "text")); verifyDataRows(result, rows("Bates")); } @Test public void missing_field_test() { - String query = StringUtils.format("SELECT * FROM %s WHERE match(invalid, 'Bates')", TEST_INDEX_ACCOUNT); + String query = + StringUtils.format("SELECT * FROM %s WHERE match(invalid, 'Bates')", TEST_INDEX_ACCOUNT); final RuntimeException exception = expectThrows(RuntimeException.class, () -> executeJdbcRequest(query)); - assertTrue(exception.getMessage() - .contains("can't resolve Symbol(namespace=FIELD_NAME, name=invalid) in type env")); + assertTrue( + exception + .getMessage() + .contains("can't resolve Symbol(namespace=FIELD_NAME, name=invalid) in type env")); assertTrue(exception.getMessage().contains("SemanticCheckException")); } @Test public void missing_quoted_field_test() { - String query = StringUtils.format("SELECT * FROM %s WHERE match('invalid', 'Bates')", TEST_INDEX_ACCOUNT); + String query = + StringUtils.format("SELECT * FROM %s WHERE match('invalid', 'Bates')", TEST_INDEX_ACCOUNT); final RuntimeException exception = expectThrows(RuntimeException.class, () -> executeJdbcRequest(query)); - assertTrue(exception.getMessage() - .contains("can't resolve Symbol(namespace=FIELD_NAME, name=invalid) in type env")); + assertTrue( + exception + .getMessage() + .contains("can't resolve Symbol(namespace=FIELD_NAME, name=invalid) in type env")); assertTrue(exception.getMessage().contains("SemanticCheckException")); } @Test public void missing_backtick_field_test() { - String query = StringUtils.format("SELECT * FROM %s WHERE match(`invalid`, 'Bates')", TEST_INDEX_ACCOUNT); + String query = + StringUtils.format("SELECT * FROM %s WHERE match(`invalid`, 'Bates')", TEST_INDEX_ACCOUNT); final RuntimeException exception = expectThrows(RuntimeException.class, () -> executeJdbcRequest(query)); - assertTrue(exception.getMessage() - .contains("can't resolve Symbol(namespace=FIELD_NAME, name=invalid) in type env")); + assertTrue( + exception + .getMessage() + .contains("can't resolve Symbol(namespace=FIELD_NAME, name=invalid) in type env")); assertTrue(exception.getMessage().contains("SemanticCheckException")); } @Test public void matchquery_in_where() throws IOException { - JSONObject result = executeJdbcRequest("SELECT firstname FROM " + TEST_INDEX_ACCOUNT + " WHERE matchquery(lastname, 'Bates')"); + JSONObject result = + executeJdbcRequest( + "SELECT firstname FROM " + TEST_INDEX_ACCOUNT + " WHERE matchquery(lastname, 'Bates')"); verifySchema(result, schema("firstname", "text")); verifyDataRows(result, rows("Nanette")); } @Test public void matchquery_in_having() throws IOException { - JSONObject result = executeJdbcRequest("SELECT lastname FROM " + TEST_INDEX_ACCOUNT + " HAVING matchquery(firstname, 'Nanette')"); + JSONObject result = + executeJdbcRequest( + "SELECT lastname FROM " + + TEST_INDEX_ACCOUNT + + " HAVING matchquery(firstname, 'Nanette')"); verifySchema(result, schema("lastname", "text")); verifyDataRows(result, rows("Bates")); } @Test public void match_query_in_where() throws IOException { - JSONObject result = executeJdbcRequest("SELECT firstname FROM " + TEST_INDEX_ACCOUNT + " WHERE match_query(lastname, 'Bates')"); + JSONObject result = + executeJdbcRequest( + "SELECT firstname FROM " + + TEST_INDEX_ACCOUNT + + " WHERE match_query(lastname, 'Bates')"); verifySchema(result, schema("firstname", "text")); verifyDataRows(result, rows("Nanette")); } @Test public void match_query_in_having() throws IOException { - JSONObject result = executeJdbcRequest( - "SELECT lastname FROM " + TEST_INDEX_ACCOUNT + " HAVING match_query(firstname, 'Nanette')"); + JSONObject result = + executeJdbcRequest( + "SELECT lastname FROM " + + TEST_INDEX_ACCOUNT + + " HAVING match_query(firstname, 'Nanette')"); verifySchema(result, schema("lastname", "text")); verifyDataRows(result, rows("Bates")); } @Test public void match_aliases_return_the_same_results() throws IOException { - String query1 = "SELECT lastname FROM " - + TEST_INDEX_ACCOUNT + " HAVING match(firstname, 'Nanette')"; + String query1 = + "SELECT lastname FROM " + TEST_INDEX_ACCOUNT + " HAVING match(firstname, 'Nanette')"; JSONObject result1 = executeJdbcRequest(query1); - String query2 = "SELECT lastname FROM " - + TEST_INDEX_ACCOUNT + " HAVING matchquery(firstname, 'Nanette')"; + String query2 = + "SELECT lastname FROM " + TEST_INDEX_ACCOUNT + " HAVING matchquery(firstname, 'Nanette')"; JSONObject result2 = executeJdbcRequest(query2); - String query3 = "SELECT lastname FROM " - + TEST_INDEX_ACCOUNT + " HAVING match_query(firstname, 'Nanette')"; + String query3 = + "SELECT lastname FROM " + TEST_INDEX_ACCOUNT + " HAVING match_query(firstname, 'Nanette')"; JSONObject result3 = executeJdbcRequest(query3); assertEquals(result1.getInt("total"), result2.getInt("total")); assertEquals(result1.getInt("total"), result3.getInt("total")); @@ -123,30 +149,33 @@ public void match_aliases_return_the_same_results() throws IOException { @Test public void match_query_alternate_syntax() throws IOException { - JSONObject result = executeJdbcRequest( - "SELECT lastname FROM " + TEST_INDEX_ACCOUNT + " WHERE lastname = match_query('Bates')"); + JSONObject result = + executeJdbcRequest( + "SELECT lastname FROM " + + TEST_INDEX_ACCOUNT + + " WHERE lastname = match_query('Bates')"); verifySchema(result, schema("lastname", "text")); verifyDataRows(result, rows("Bates")); } @Test public void matchquery_alternate_syntax() throws IOException { - JSONObject result = executeJdbcRequest( - "SELECT lastname FROM " + TEST_INDEX_ACCOUNT + " WHERE lastname = matchquery('Bates')"); + JSONObject result = + executeJdbcRequest( + "SELECT lastname FROM " + TEST_INDEX_ACCOUNT + " WHERE lastname = matchquery('Bates')"); verifySchema(result, schema("lastname", "text")); verifyDataRows(result, rows("Bates")); } @Test public void match_alternate_syntaxes_return_the_same_results() throws IOException { - String query1 = "SELECT * FROM " - + TEST_INDEX_ACCOUNT + " WHERE match(firstname, 'Nanette')"; + String query1 = "SELECT * FROM " + TEST_INDEX_ACCOUNT + " WHERE match(firstname, 'Nanette')"; JSONObject result1 = executeJdbcRequest(query1); - String query2 = "SELECT * FROM " - + TEST_INDEX_ACCOUNT + " WHERE firstname = match_query('Nanette')"; + String query2 = + "SELECT * FROM " + TEST_INDEX_ACCOUNT + " WHERE firstname = match_query('Nanette')"; JSONObject result2 = executeJdbcRequest(query2); - String query3 = "SELECT * FROM " - + TEST_INDEX_ACCOUNT + " WHERE firstname = matchquery('Nanette')"; + String query3 = + "SELECT * FROM " + TEST_INDEX_ACCOUNT + " WHERE firstname = matchquery('Nanette')"; JSONObject result3 = executeJdbcRequest(query3); assertEquals(result1.getInt("total"), result2.getInt("total")); assertEquals(result1.getInt("total"), result3.getInt("total")); @@ -154,11 +183,16 @@ public void match_alternate_syntaxes_return_the_same_results() throws IOExceptio @Test public void matchPhraseQueryTest() throws IOException { - final String result = explainQuery(String.format(Locale.ROOT, - "select address from %s " + - "where address= matchPhrase('671 Bristol Street') order by _score desc limit 3", - TestsConstants.TEST_INDEX_ACCOUNT)); - Assert.assertThat(result, - containsString("{\\\"match_phrase\\\":{\\\"address\\\":{\\\"query\\\":\\\"671 Bristol Street\\\"")); + final String result = + explainQuery( + String.format( + Locale.ROOT, + "select address from %s where address= matchPhrase('671 Bristol Street') order by" + + " _score desc limit 3", + TestsConstants.TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result, + containsString( + "{\\\"match_phrase\\\":{\\\"address\\\":{\\\"query\\\":\\\"671 Bristol Street\\\"")); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/MatchPhraseIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/MatchPhraseIT.java index 3b7e65dcc6..d08149aa96 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/MatchPhraseIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/MatchPhraseIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_PHRASE; diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/MatchPhrasePrefixIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/MatchPhrasePrefixIT.java index dd2a8384d6..f181a18689 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/MatchPhrasePrefixIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/MatchPhrasePrefixIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BEER; @@ -26,7 +25,8 @@ protected void init() throws Exception { public void required_parameters() throws IOException { String query = "SELECT Title FROM %s WHERE match_phrase_prefix(Title, 'champagne be')"; JSONObject result = executeJdbcRequest(String.format(query, TEST_INDEX_BEER)); - verifyDataRows(result, + verifyDataRows( + result, rows("Can old flat champagne be used for vinegar?"), rows("Elder flower champagne best to use natural yeast or add a wine yeast?")); } @@ -34,9 +34,10 @@ public void required_parameters() throws IOException { @Test public void all_optional_parameters() throws IOException { // The values for optional parameters are valid but arbitrary. - String query = "SELECT Title FROM %s " + - "WHERE match_phrase_prefix(Title, 'flat champ', boost = 1.0, zero_terms_query='ALL', " + - "max_expansions = 2, analyzer=standard, slop=0)"; + String query = + "SELECT Title FROM %s " + + "WHERE match_phrase_prefix(Title, 'flat champ', boost = 1.0, zero_terms_query='ALL', " + + "max_expansions = 2, analyzer=standard, slop=0)"; JSONObject result = executeJdbcRequest(String.format(query, TEST_INDEX_BEER)); verifyDataRows(result, rows("Can old flat champagne be used for vinegar?")); } @@ -47,21 +48,22 @@ public void max_expansions_is_3() throws IOException { // It tells OpenSearch to consider only the first 3 terms that start with 'bottl' // In this dataset these are 'bottle-conditioning', 'bottling', 'bottles'. - String query = "SELECT Tags FROM %s " + - "WHERE match_phrase_prefix(Tags, 'draught bottl', max_expansions=3)"; + String query = + "SELECT Tags FROM %s " + + "WHERE match_phrase_prefix(Tags, 'draught bottl', max_expansions=3)"; JSONObject result = executeJdbcRequest(String.format(query, TEST_INDEX_BEER)); - verifyDataRows(result, rows("brewing draught bottling"), - rows("draught bottles")); + verifyDataRows(result, rows("brewing draught bottling"), rows("draught bottles")); } @Test public void analyzer_english() throws IOException { // English analyzer removes 'in' and 'to' as they are common words. // This results in an empty query. - String query = "SELECT Title FROM %s " + - "WHERE match_phrase_prefix(Title, 'in to', analyzer=english)"; + String query = + "SELECT Title FROM %s " + "WHERE match_phrase_prefix(Title, 'in to', analyzer=english)"; JSONObject result = executeJdbcRequest(String.format(query, TEST_INDEX_BEER)); - assertTrue("Expect English analyzer to filter out common words 'in' and 'to'", + assertTrue( + "Expect English analyzer to filter out common words 'in' and 'to'", result.getInt("total") == 0); } @@ -69,8 +71,8 @@ public void analyzer_english() throws IOException { public void analyzer_standard() throws IOException { // Standard analyzer does not treat 'in' and 'to' as special terms. // This results in 'to' being used as a phrase prefix given us 'Tokyo'. - String query = "SELECT Title FROM %s " + - "WHERE match_phrase_prefix(Title, 'in to', analyzer=standard)"; + String query = + "SELECT Title FROM %s " + "WHERE match_phrase_prefix(Title, 'in to', analyzer=standard)"; JSONObject result = executeJdbcRequest(String.format(query, TEST_INDEX_BEER)); verifyDataRows(result, rows("Local microbreweries and craft beer in Tokyo")); } @@ -80,15 +82,15 @@ public void zero_term_query_all() throws IOException { // English analyzer removes 'in' and 'to' as they are common words. // zero_terms_query of 'ALL' causes all rows to be returned. // ORDER BY ... LIMIT helps make the test understandable. - String query = "SELECT Title FROM %s" + - " WHERE match_phrase_prefix(Title, 'in to', analyzer=english, zero_terms_query='ALL')" + - " ORDER BY Title DESC" + - " LIMIT 1"; + String query = + "SELECT Title FROM %s" + + " WHERE match_phrase_prefix(Title, 'in to', analyzer=english, zero_terms_query='ALL')" + + " ORDER BY Title DESC" + + " LIMIT 1"; JSONObject result = executeJdbcRequest(String.format(query, TEST_INDEX_BEER)); verifyDataRows(result, rows("was working great, now all foam")); } - @Test public void slop_is_2() throws IOException { // When slop is 2, the terms are matched exactly in the order specified. @@ -103,8 +105,6 @@ public void slop_is_3() throws IOException { // When slop is 3, results will include phrases where the query terms are transposed. String query = "SELECT Tags from %s where match_phrase_prefix(Tags, 'gas ta', slop=3)"; JSONObject result = executeJdbcRequest(String.format(query, TEST_INDEX_BEER)); - verifyDataRows(result, - rows("taste draught gas"), - rows("taste gas")); + verifyDataRows(result, rows("taste draught gas"), rows("taste gas")); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/MathematicalFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/MathematicalFunctionIT.java index 2e05706269..60b7632ad0 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/MathematicalFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/MathematicalFunctionIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; @@ -34,12 +33,11 @@ public void init() throws Exception { @Test public void testPI() throws IOException { JSONObject result = - executeQuery(String.format("SELECT PI() FROM %s HAVING (COUNT(1) > 0)",TEST_INDEX_BANK) ); - verifySchema(result, - schema("PI()", null, "double")); + executeQuery(String.format("SELECT PI() FROM %s HAVING (COUNT(1) > 0)", TEST_INDEX_BANK)); + verifySchema(result, schema("PI()", null, "double")); verifyDataRows(result, rows(3.141592653589793)); } - + @Test public void testCeil() throws IOException { JSONObject result = executeQuery("select ceil(0)"); @@ -97,7 +95,8 @@ public void testE() throws IOException { @Test public void testExpm1() throws IOException { - JSONObject result = executeQuery("select expm1(account_number) FROM " + TEST_INDEX_BANK + " LIMIT 2"); + JSONObject result = + executeQuery("select expm1(account_number) FROM " + TEST_INDEX_BANK + " LIMIT 2"); verifySchema(result, schema("expm1(account_number)", null, "double")); verifyDataRows(result, rows(Math.expm1(1)), rows(Math.expm1(6))); } @@ -333,36 +332,28 @@ public void testCbrt() throws IOException { @Test public void testLnReturnsNull() throws IOException { JSONObject result = executeQuery("select ln(0), ln(-2)"); - verifySchema(result, - schema("ln(0)", "double"), - schema("ln(-2)", "double")); + verifySchema(result, schema("ln(0)", "double"), schema("ln(-2)", "double")); verifyDataRows(result, rows(null, null)); } @Test public void testLogReturnsNull() throws IOException { JSONObject result = executeQuery("select log(0), log(-2)"); - verifySchema(result, - schema("log(0)", "double"), - schema("log(-2)", "double")); + verifySchema(result, schema("log(0)", "double"), schema("log(-2)", "double")); verifyDataRows(result, rows(null, null)); } @Test public void testLog10ReturnsNull() throws IOException { JSONObject result = executeQuery("select log10(0), log10(-2)"); - verifySchema(result, - schema("log10(0)", "double"), - schema("log10(-2)", "double")); + verifySchema(result, schema("log10(0)", "double"), schema("log10(-2)", "double")); verifyDataRows(result, rows(null, null)); } @Test public void testLog2ReturnsNull() throws IOException { JSONObject result = executeQuery("select log2(0), log2(-2)"); - verifySchema(result, - schema("log2(0)", "double"), - schema("log2(-2)", "double")); + verifySchema(result, schema("log2(0)", "double"), schema("log2(-2)", "double")); verifyDataRows(result, rows(null, null)); } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/MetricsIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/MetricsIT.java index 2a26eb19fe..4bbab4f167 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/MetricsIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/MetricsIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK; @@ -40,9 +39,7 @@ public void requestCount() throws IOException, InterruptedException { } private Request makeStatRequest() { - return new Request( - "GET", STATS_API_ENDPOINT - ); + return new Request("GET", STATS_API_ENDPOINT); } private int requestTotal() throws IOException { diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/MultiMatchIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/MultiMatchIT.java index 07c89b4cdf..97763b2c8a 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/MultiMatchIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/MultiMatchIT.java @@ -30,96 +30,111 @@ public void init() throws IOException { @Test public void test_mandatory_params() { - String query = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE multi_match([\\\"Tags\\\" ^ 1.5, Title, `Body` 4.2], 'taste')"; + String query = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE multi_match([\\\"Tags\\\" ^ 1.5, Title, `Body` 4.2], 'taste')"; JSONObject result = executeJdbcRequest(query); assertEquals(16, result.getInt("total")); } @Test public void test_all_params() { - String query = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE multi_match(['Body', Tags], 'taste beer', operator='and', analyzer=english," - + "auto_generate_synonyms_phrase_query=true, boost = 0.77, cutoff_frequency=0.33," - + "fuzziness = 'AUTO:1,5', fuzzy_transpositions = false, lenient = true, max_expansions = 25," - + "minimum_should_match = '2<-25% 9<-3', prefix_length = 7, tie_breaker = 0.3," - + "type = most_fields, slop = 2, zero_terms_query = 'ALL');"; + String query = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE multi_match(['Body', Tags], 'taste beer', operator='and'," + + " analyzer=english,auto_generate_synonyms_phrase_query=true, boost = 0.77," + + " cutoff_frequency=0.33,fuzziness = 'AUTO:1,5', fuzzy_transpositions = false, lenient" + + " = true, max_expansions = 25,minimum_should_match = '2<-25% 9<-3', prefix_length =" + + " 7, tie_breaker = 0.3,type = most_fields, slop = 2, zero_terms_query = 'ALL');"; JSONObject result = executeJdbcRequest(query); assertEquals(10, result.getInt("total")); } @Test public void verify_wildcard_test() { - String query1 = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE multi_match(['Tags'], 'taste')"; + String query1 = "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE multi_match(['Tags'], 'taste')"; JSONObject result1 = executeJdbcRequest(query1); - String query2 = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE multi_match(['T*'], 'taste')"; + String query2 = "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE multi_match(['T*'], 'taste')"; JSONObject result2 = executeJdbcRequest(query2); assertNotEquals(result2.getInt("total"), result1.getInt("total")); - String query = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE multi_match(['*Date'], '2014-01-22');"; + String query = + "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE multi_match(['*Date'], '2014-01-22');"; JSONObject result = executeJdbcRequest(query); assertEquals(10, result.getInt("total")); } @Test public void test_multimatch_alternate_parameter_syntax() { - String query = "SELECT Tags FROM " + TEST_INDEX_BEER - + " WHERE multimatch('query'='taste', 'fields'='Tags')"; + String query = + "SELECT Tags FROM " + + TEST_INDEX_BEER + + " WHERE multimatch('query'='taste', 'fields'='Tags')"; JSONObject result = executeJdbcRequest(query); assertEquals(8, result.getInt("total")); } @Test public void test_multimatchquery_alternate_parameter_syntax() { - String query = "SELECT Tags FROM " + TEST_INDEX_BEER - + " WHERE multimatchquery(query='cicerone', fields='Tags')"; + String query = + "SELECT Tags FROM " + + TEST_INDEX_BEER + + " WHERE multimatchquery(query='cicerone', fields='Tags')"; JSONObject result = executeJdbcRequest(query); assertEquals(2, result.getInt("total")); - verifyDataRows(result, rows("serving cicerone restaurants"), - rows("taste cicerone")); + verifyDataRows(result, rows("serving cicerone restaurants"), rows("taste cicerone")); } @Test public void test_quoted_multi_match_alternate_parameter_syntax() { - String query = "SELECT Tags FROM " + TEST_INDEX_BEER - + " WHERE multi_match('query'='cicerone', 'fields'='Tags')"; + String query = + "SELECT Tags FROM " + + TEST_INDEX_BEER + + " WHERE multi_match('query'='cicerone', 'fields'='Tags')"; JSONObject result = executeJdbcRequest(query); assertEquals(2, result.getInt("total")); - verifyDataRows(result, rows("serving cicerone restaurants"), - rows("taste cicerone")); + verifyDataRows(result, rows("serving cicerone restaurants"), rows("taste cicerone")); } @Test public void test_multi_match_alternate_parameter_syntax() { - String query = "SELECT Tags FROM " + TEST_INDEX_BEER - + " WHERE multi_match(query='cicerone', fields='Tags')"; + String query = + "SELECT Tags FROM " + + TEST_INDEX_BEER + + " WHERE multi_match(query='cicerone', fields='Tags')"; JSONObject result = executeJdbcRequest(query); assertEquals(2, result.getInt("total")); - verifyDataRows(result, rows("serving cicerone restaurants"), - rows("taste cicerone")); + verifyDataRows(result, rows("serving cicerone restaurants"), rows("taste cicerone")); } @Test public void test_wildcard_multi_match_alternate_parameter_syntax() { - String query = "SELECT Body FROM " + TEST_INDEX_BEER - + " WHERE multi_match(query='IPA', fields='B*') LIMIT 1"; + String query = + "SELECT Body FROM " + + TEST_INDEX_BEER + + " WHERE multi_match(query='IPA', fields='B*') LIMIT 1"; JSONObject result = executeJdbcRequest(query); - verifyDataRows(result, rows("

I know what makes an IPA an IPA, but what are the unique" + - " characteristics of it's common variants? To be specific, the ones I'm interested in are Double IPA" + - " and Black IPA, but general differences between any other styles would be welcome too.

\n")); + verifyDataRows( + result, + rows( + "

I know what makes an IPA an IPA, but what are the unique characteristics of it's" + + " common variants? To be specific, the ones I'm interested in are Double IPA and" + + " Black IPA, but general differences between any other styles would be welcome" + + " too.

\n")); } @Test public void test_all_params_multimatchquery_alternate_parameter_syntax() { - String query = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE multimatchquery(query='cicerone', fields='Tags', 'operator'='or', analyzer=english," - + "auto_generate_synonyms_phrase_query=true, boost = 0.77, cutoff_frequency=0.33," - + "fuzziness = 'AUTO:1,5', fuzzy_transpositions = false, lenient = true, max_expansions = 25," - + "minimum_should_match = '2<-25% 9<-3', prefix_length = 7, tie_breaker = 0.3," - + "type = most_fields, slop = 2, zero_terms_query = 'ALL');"; + String query = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE multimatchquery(query='cicerone', fields='Tags', 'operator'='or'," + + " analyzer=english,auto_generate_synonyms_phrase_query=true, boost = 0.77," + + " cutoff_frequency=0.33,fuzziness = 'AUTO:1,5', fuzzy_transpositions = false, lenient" + + " = true, max_expansions = 25,minimum_should_match = '2<-25% 9<-3', prefix_length =" + + " 7, tie_breaker = 0.3,type = most_fields, slop = 2, zero_terms_query = 'ALL');"; JSONObject result = executeJdbcRequest(query); assertEquals(2, result.getInt("total")); @@ -127,28 +142,28 @@ public void test_all_params_multimatchquery_alternate_parameter_syntax() { @Test public void multi_match_alternate_syntax() throws IOException { - String query = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE CreationDate = multi_match('2014-01-22');"; + String query = + "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE CreationDate = multi_match('2014-01-22');"; var result = new JSONObject(executeQuery(query, "jdbc")); assertEquals(8, result.getInt("total")); } @Test public void multimatch_alternate_syntax() throws IOException { - String query = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE CreationDate = multimatch('2014-01-22');"; + String query = + "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE CreationDate = multimatch('2014-01-22');"; var result = new JSONObject(executeQuery(query, "jdbc")); assertEquals(8, result.getInt("total")); } @Test public void multi_match_alternate_syntaxes_return_the_same_results() throws IOException { - String query1 = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE multi_match(['CreationDate'], '2014-01-22');"; - String query2 = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE CreationDate = multi_match('2014-01-22');"; - String query3 = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE CreationDate = multimatch('2014-01-22');"; + String query1 = + "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE multi_match(['CreationDate'], '2014-01-22');"; + String query2 = + "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE CreationDate = multi_match('2014-01-22');"; + String query3 = + "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE CreationDate = multimatch('2014-01-22');"; var result1 = new JSONObject(executeQuery(query1, "jdbc")); var result2 = new JSONObject(executeQuery(query2, "jdbc")); var result3 = new JSONObject(executeQuery(query3, "jdbc")); diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/NestedIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/NestedIT.java index d3230188b7..54831cb561 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/NestedIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/NestedIT.java @@ -37,11 +37,13 @@ public void init() throws IOException { @Test public void nested_function_with_array_of_nested_field_test() { - String query = "SELECT nested(message.info), nested(comment.data) FROM " + TEST_INDEX_NESTED_TYPE; + String query = + "SELECT nested(message.info), nested(comment.data) FROM " + TEST_INDEX_NESTED_TYPE; JSONObject result = executeJdbcRequest(query); assertEquals(6, result.getInt("total")); - verifyDataRows(result, + verifyDataRows( + result, rows("c", "ab"), rows("a", "ab"), rows("b", "aa"), @@ -52,17 +54,20 @@ public void nested_function_with_array_of_nested_field_test() { @Test public void nested_function_in_select_test() { - String query = "SELECT nested(message.info), nested(comment.data), " - + "nested(message.dayOfWeek) FROM " - + TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS; + String query = + "SELECT nested(message.info), nested(comment.data), " + + "nested(message.dayOfWeek) FROM " + + TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS; JSONObject result = executeJdbcRequest(query); assertEquals(5, result.getInt("total")); - verifySchema(result, + verifySchema( + result, schema("nested(message.info)", null, "keyword"), schema("nested(comment.data)", null, "keyword"), schema("nested(message.dayOfWeek)", null, "long")); - verifyDataRows(result, + verifyDataRows( + result, rows("a", "ab", 1), rows("b", "aa", 2), rows("c", "aa", 1), @@ -74,8 +79,8 @@ public void nested_function_in_select_test() { // gets resolved @Disabled // TODO fix me when aggregation is supported public void nested_function_in_an_aggregate_function_in_select_test() { - String query = "SELECT sum(nested(message.dayOfWeek)) FROM " + - TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS; + String query = + "SELECT sum(nested(message.dayOfWeek)) FROM " + TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS; JSONObject result = executeJdbcRequest(query); verifyDataRows(result, rows(14)); } @@ -83,84 +88,67 @@ public void nested_function_in_an_aggregate_function_in_select_test() { // TODO Enable me when nested aggregation is supported @Disabled public void nested_function_with_arrays_in_an_aggregate_function_in_select_test() { - String query = "SELECT sum(nested(message.dayOfWeek)) FROM " + - TEST_INDEX_NESTED_TYPE; + String query = "SELECT sum(nested(message.dayOfWeek)) FROM " + TEST_INDEX_NESTED_TYPE; JSONObject result = executeJdbcRequest(query); verifyDataRows(result, rows(19)); } @Test public void nested_function_in_a_function_in_select_test() { - String query = "SELECT upper(nested(message.info)) FROM " + - TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS; + String query = + "SELECT upper(nested(message.info)) FROM " + TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS; JSONObject result = executeJdbcRequest(query); - verifyDataRows(result, - rows("A"), - rows("B"), - rows("C"), - rows("C"), - rows("ZZ")); + verifyDataRows(result, rows("A"), rows("B"), rows("C"), rows("C"), rows("ZZ")); } @Test public void nested_all_function_in_a_function_in_select_test() { - String query = "SELECT nested(message.*) FROM " + - TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS + " WHERE nested(message.info) = 'a'"; + String query = + "SELECT nested(message.*) FROM " + + TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS + + " WHERE nested(message.info) = 'a'"; JSONObject result = executeJdbcRequest(query); verifyDataRows(result, rows("e", 1, "a")); } @Test public void invalid_multiple_nested_all_function_in_a_function_in_select_test() { - String query = "SELECT nested(message.*), nested(message.info) FROM " + - TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS; - RuntimeException result = assertThrows( - RuntimeException.class, - () -> executeJdbcRequest(query) - ); + String query = + "SELECT nested(message.*), nested(message.info) FROM " + + TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS; + RuntimeException result = assertThrows(RuntimeException.class, () -> executeJdbcRequest(query)); assertTrue( result.getMessage().contains("IllegalArgumentException") - && result.getMessage().contains("Multiple entries with same key") - ); + && result.getMessage().contains("Multiple entries with same key")); } @Test public void nested_all_function_with_limit_test() { - String query = "SELECT nested(message.*) FROM " + - TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS + " LIMIT 3"; + String query = + "SELECT nested(message.*) FROM " + TEST_INDEX_NESTED_TYPE_WITHOUT_ARRAYS + " LIMIT 3"; JSONObject result = executeJdbcRequest(query); - verifyDataRows(result, - rows("e", 1, "a"), - rows("f", 2, "b"), - rows("g", 1, "c") - ); + verifyDataRows(result, rows("e", 1, "a"), rows("f", 2, "b"), rows("g", 1, "c")); } - @Test public void nested_function_with_array_of_multi_nested_field_test() { String query = "SELECT nested(message.author.name) FROM " + TEST_INDEX_MULTI_NESTED_TYPE; JSONObject result = executeJdbcRequest(query); assertEquals(6, result.getInt("total")); - verifyDataRows(result, - rows("e"), - rows("f"), - rows("g"), - rows("h"), - rows("p"), - rows("yy")); + verifyDataRows(result, rows("e"), rows("f"), rows("g"), rows("h"), rows("p"), rows("yy")); } @Test public void nested_function_with_null_and_missing_fields_test() { - String query = "SELECT nested(message.info), nested(comment.data) FROM " - + TEST_INDEX_NESTED_WITH_NULLS; + String query = + "SELECT nested(message.info), nested(comment.data) FROM " + TEST_INDEX_NESTED_WITH_NULLS; JSONObject result = executeJdbcRequest(query); assertEquals(10, result.getInt("total")); - verifyDataRows(result, + verifyDataRows( + result, rows(null, "hh"), rows("b", "aa"), rows("c", "aa"), @@ -176,12 +164,14 @@ public void nested_function_with_null_and_missing_fields_test() { @Test public void nested_function_multiple_fields_with_matched_and_mismatched_paths_test() { String query = - "SELECT nested(message.author), nested(message.dayOfWeek), nested(message.info), nested(comment.data), " - + "nested(comment.likes) FROM " + TEST_INDEX_NESTED_TYPE; + "SELECT nested(message.author), nested(message.dayOfWeek), nested(message.info)," + + " nested(comment.data), nested(comment.likes) FROM " + + TEST_INDEX_NESTED_TYPE; JSONObject result = executeJdbcRequest(query); assertEquals(6, result.getInt("total")); - verifyDataRows(result, + verifyDataRows( + result, rows("e", 1, "a", "ab", 3), rows("f", 2, "b", "aa", 2), rows("g", 1, "c", "aa", 3), @@ -192,12 +182,12 @@ public void nested_function_multiple_fields_with_matched_and_mismatched_paths_te @Test public void nested_function_mixed_with_non_nested_type_test() { - String query = - "SELECT nested(message.info), someField FROM " + TEST_INDEX_NESTED_TYPE; + String query = "SELECT nested(message.info), someField FROM " + TEST_INDEX_NESTED_TYPE; JSONObject result = executeJdbcRequest(query); assertEquals(6, result.getInt("total")); - verifyDataRows(result, + verifyDataRows( + result, rows("a", "b"), rows("b", "a"), rows("c", "a"), @@ -209,46 +199,38 @@ public void nested_function_mixed_with_non_nested_type_test() { @Test public void nested_function_with_order_by_clause() { String query = - "SELECT nested(message.info) FROM " + TEST_INDEX_NESTED_TYPE + "SELECT nested(message.info) FROM " + + TEST_INDEX_NESTED_TYPE + " ORDER BY nested(message.info)"; JSONObject result = executeJdbcRequest(query); assertEquals(6, result.getInt("total")); - verifyDataRows(result, - rows("a"), - rows("c"), - rows("a"), - rows("b"), - rows("c"), - rows("zz")); + verifyDataRows(result, rows("a"), rows("c"), rows("a"), rows("b"), rows("c"), rows("zz")); } @Test public void nested_function_with_order_by_clause_desc() { String query = - "SELECT nested(message.info) FROM " + TEST_INDEX_NESTED_TYPE + "SELECT nested(message.info) FROM " + + TEST_INDEX_NESTED_TYPE + " ORDER BY nested(message.info, message) DESC"; JSONObject result = executeJdbcRequest(query); assertEquals(6, result.getInt("total")); - verifyDataRows(result, - rows("zz"), - rows("c"), - rows("c"), - rows("a"), - rows("b"), - rows("a")); + verifyDataRows(result, rows("zz"), rows("c"), rows("c"), rows("a"), rows("b"), rows("a")); } @Test public void nested_function_and_field_with_order_by_clause() { String query = - "SELECT nested(message.info), myNum FROM " + TEST_INDEX_NESTED_TYPE + "SELECT nested(message.info), myNum FROM " + + TEST_INDEX_NESTED_TYPE + " ORDER BY nested(message.info, message), myNum"; JSONObject result = executeJdbcRequest(query); assertEquals(6, result.getInt("total")); - verifyDataRows(result, + verifyDataRows( + result, rows("a", 1), rows("c", 4), rows("a", 4), @@ -266,9 +248,12 @@ public void nested_function_with_group_by_clause() { "SELECT count(*) FROM " + TEST_INDEX_NESTED_TYPE + " GROUP BY nested(message.info)"; JSONObject result = executeJdbcRequest(query); - assertTrue(result.getJSONObject("error").get("details").toString().contains( - "Aggregation type nested is not yet implemented" - )); + assertTrue( + result + .getJSONObject("error") + .get("details") + .toString() + .contains("Aggregation type nested is not yet implemented")); } // Nested function in HAVING clause is not yet implemented for JDBC format. This test ensures @@ -277,12 +262,19 @@ public void nested_function_with_group_by_clause() { @Test public void nested_function_with_having_clause() { String query = - "SELECT count(*) FROM " + TEST_INDEX_NESTED_TYPE + " GROUP BY myNum HAVING nested(comment.likes) > 7"; + "SELECT count(*) FROM " + + TEST_INDEX_NESTED_TYPE + + " GROUP BY myNum HAVING nested(comment.likes) > 7"; JSONObject result = executeJdbcRequest(query); - assertTrue(result.getJSONObject("error").get("details").toString().contains( - "For more details, please send request for Json format to see the raw response from OpenSearch engine." - )); + assertTrue( + result + .getJSONObject("error") + .get("details") + .toString() + .contains( + "For more details, please send request for Json format to see the raw response from" + + " OpenSearch engine.")); } @Test @@ -292,13 +284,11 @@ public void nested_function_mixed_with_non_nested_types_test() { JSONObject result = executeJdbcRequest(query); assertEquals(6, result.getInt("total")); - verifyDataRows(result, - rows("a", - new JSONObject(Map.of("south", 3, "west", "ab")), "ab"), - rows("b", - new JSONObject(Map.of("south", 5, "west", "ff")), "ff"), - rows("c", - new JSONObject(Map.of("south", 3, "west", "ll")), "ll"), + verifyDataRows( + result, + rows("a", new JSONObject(Map.of("south", 3, "west", "ab")), "ab"), + rows("b", new JSONObject(Map.of("south", 5, "west", "ff")), "ff"), + rows("c", new JSONObject(Map.of("south", 3, "west", "ll")), "ll"), rows("d", null, null), rows("i", null, null), rows("zz", null, null)); @@ -308,11 +298,13 @@ public void nested_function_mixed_with_non_nested_types_test() { public void nested_function_with_relevance_query() { String query = "SELECT nested(message.info), highlight(someField) FROM " - + TEST_INDEX_NESTED_TYPE + " WHERE match(someField, 'b')"; + + TEST_INDEX_NESTED_TYPE + + " WHERE match(someField, 'b')"; JSONObject result = executeJdbcRequest(query); assertEquals(3, result.getInt("total")); - verifyDataRows(result, + verifyDataRows( + result, rows("a", new JSONArray(List.of("b"))), rows("c", new JSONArray(List.of("b"))), rows("a", new JSONArray(List.of("b")))); @@ -322,60 +314,68 @@ public void nested_function_with_relevance_query() { public void nested_with_non_nested_type_test() { String query = "SELECT nested(someField) FROM " + TEST_INDEX_NESTED_TYPE; - Exception exception = assertThrows(RuntimeException.class, - () -> executeJdbcRequest(query)); - assertTrue(exception.getMessage().contains( - "{\n" + - " \"error\": {\n" + - " \"reason\": \"Invalid SQL query\",\n" + - " \"details\": \"Illegal nested field name: someField\",\n" + - " \"type\": \"IllegalArgumentException\"\n" + - " },\n" + - " \"status\": 400\n" + - "}" - )); + Exception exception = assertThrows(RuntimeException.class, () -> executeJdbcRequest(query)); + assertTrue( + exception + .getMessage() + .contains( + "{\n" + + " \"error\": {\n" + + " \"reason\": \"Invalid SQL query\",\n" + + " \"details\": \"Illegal nested field name: someField\",\n" + + " \"type\": \"IllegalArgumentException\"\n" + + " },\n" + + " \"status\": 400\n" + + "}")); } @Test public void nested_missing_path() { String query = "SELECT nested(message.invalid) FROM " + TEST_INDEX_MULTI_NESTED_TYPE; - Exception exception = assertThrows(RuntimeException.class, - () -> executeJdbcRequest(query)); - assertTrue(exception.getMessage().contains("" + - "{\n" + - " \"error\": {\n" + - " \"reason\": \"Invalid SQL query\",\n" + - " \"details\": \"can't resolve Symbol(namespace=FIELD_NAME, name=message.invalid) in type env\",\n" + - " \"type\": \"SemanticCheckException\"\n" + - " },\n" + - " \"status\": 400\n" + - "}" - )); + Exception exception = assertThrows(RuntimeException.class, () -> executeJdbcRequest(query)); + assertTrue( + exception + .getMessage() + .contains( + "{\n" + + " \"error\": {\n" + + " \"reason\": \"Invalid SQL query\",\n" + + " \"details\": \"can't resolve Symbol(namespace=FIELD_NAME," + + " name=message.invalid) in type env\",\n" + + " \"type\": \"SemanticCheckException\"\n" + + " },\n" + + " \"status\": 400\n" + + "}")); } @Test public void nested_missing_path_argument() { - String query = "SELECT nested(message.author.name, invalid) FROM " + TEST_INDEX_MULTI_NESTED_TYPE; + String query = + "SELECT nested(message.author.name, invalid) FROM " + TEST_INDEX_MULTI_NESTED_TYPE; - Exception exception = assertThrows(RuntimeException.class, - () -> executeJdbcRequest(query)); - assertTrue(exception.getMessage().contains("" + - "{\n" + - " \"error\": {\n" + - " \"reason\": \"Invalid SQL query\",\n" + - " \"details\": \"can't resolve Symbol(namespace=FIELD_NAME, name=invalid) in type env\",\n" + - " \"type\": \"SemanticCheckException\"\n" + - " },\n" + - " \"status\": 400\n" + - "}" - )); + Exception exception = assertThrows(RuntimeException.class, () -> executeJdbcRequest(query)); + assertTrue( + exception + .getMessage() + .contains( + "{\n" + + " \"error\": {\n" + + " \"reason\": \"Invalid SQL query\",\n" + + " \"details\": \"can't resolve Symbol(namespace=FIELD_NAME, name=invalid)" + + " in type env\",\n" + + " \"type\": \"SemanticCheckException\"\n" + + " },\n" + + " \"status\": 400\n" + + "}")); } @Test public void test_nested_where_with_and_conditional() { - String query = "SELECT nested(message.info), nested(message.author) FROM " + TEST_INDEX_NESTED_TYPE - + " WHERE nested(message, message.info = 'a' AND message.author = 'e')"; + String query = + "SELECT nested(message.info), nested(message.author) FROM " + + TEST_INDEX_NESTED_TYPE + + " WHERE nested(message, message.info = 'a' AND message.author = 'e')"; JSONObject result = executeJdbcRequest(query); assertEquals(1, result.getInt("total")); verifyDataRows(result, rows("a", "e")); @@ -383,22 +383,19 @@ public void test_nested_where_with_and_conditional() { @Test public void test_nested_in_select_and_where_as_predicate_expression() { - String query = "SELECT nested(message.info) FROM " + TEST_INDEX_NESTED_TYPE - + " WHERE nested(message.info) = 'a'"; + String query = + "SELECT nested(message.info) FROM " + + TEST_INDEX_NESTED_TYPE + + " WHERE nested(message.info) = 'a'"; JSONObject result = executeJdbcRequest(query); assertEquals(3, result.getInt("total")); - verifyDataRows( - result, - rows("a"), - rows("c"), - rows("a") - ); + verifyDataRows(result, rows("a"), rows("c"), rows("a")); } @Test public void test_nested_in_where_as_predicate_expression() { - String query = "SELECT message.info FROM " + TEST_INDEX_NESTED_TYPE - + " WHERE nested(message.info) = 'a'"; + String query = + "SELECT message.info FROM " + TEST_INDEX_NESTED_TYPE + " WHERE nested(message.info) = 'a'"; JSONObject result = executeJdbcRequest(query); assertEquals(2, result.getInt("total")); // Only first index of array is returned. Second index has 'a' @@ -407,8 +404,10 @@ public void test_nested_in_where_as_predicate_expression() { @Test public void test_nested_in_where_as_predicate_expression_with_like() { - String query = "SELECT message.info FROM " + TEST_INDEX_NESTED_TYPE - + " WHERE nested(message.info) LIKE 'a'"; + String query = + "SELECT message.info FROM " + + TEST_INDEX_NESTED_TYPE + + " WHERE nested(message.info) LIKE 'a'"; JSONObject result = executeJdbcRequest(query); assertEquals(2, result.getInt("total")); // Only first index of array is returned. Second index has 'a' @@ -417,21 +416,22 @@ public void test_nested_in_where_as_predicate_expression_with_like() { @Test public void test_nested_in_where_as_predicate_expression_with_multiple_conditions() { - String query = "SELECT message.info, comment.data, message.dayOfWeek FROM " + TEST_INDEX_NESTED_TYPE - + " WHERE nested(message.info) = 'zz' OR nested(comment.data) = 'ab' AND nested(message.dayOfWeek) >= 4"; + String query = + "SELECT message.info, comment.data, message.dayOfWeek FROM " + + TEST_INDEX_NESTED_TYPE + + " WHERE nested(message.info) = 'zz' OR nested(comment.data) = 'ab' AND" + + " nested(message.dayOfWeek) >= 4"; JSONObject result = executeJdbcRequest(query); assertEquals(2, result.getInt("total")); - verifyDataRows( - result, - rows("c", "ab", 4), - rows("zz", "aa", 6) - ); + verifyDataRows(result, rows("c", "ab", 4), rows("zz", "aa", 6)); } @Test public void test_nested_in_where_as_predicate_expression_with_relevance_query() { - String query = "SELECT comment.likes, someField FROM " + TEST_INDEX_NESTED_TYPE - + " WHERE nested(comment.likes) = 10 AND match(someField, 'a')"; + String query = + "SELECT comment.likes, someField FROM " + + TEST_INDEX_NESTED_TYPE + + " WHERE nested(comment.likes) = 10 AND match(someField, 'a')"; JSONObject result = executeJdbcRequest(query); assertEquals(1, result.getInt("total")); verifyDataRows(result, rows(10, "a")); @@ -443,11 +443,13 @@ public void nested_function_all_subfields() { JSONObject result = executeJdbcRequest(query); assertEquals(6, result.getInt("total")); - verifySchema(result, + verifySchema( + result, schema("nested(message.author)", null, "keyword"), schema("nested(message.dayOfWeek)", null, "long"), schema("nested(message.info)", null, "keyword")); - verifyDataRows(result, + verifyDataRows( + result, rows("e", 1, "a"), rows("f", 2, "b"), rows("g", 1, "c"), @@ -458,17 +460,18 @@ public void nested_function_all_subfields() { @Test public void nested_function_all_subfields_and_specified_subfield() { - String query = "SELECT nested(message.*), nested(comment.data) FROM " - + TEST_INDEX_NESTED_TYPE; + String query = "SELECT nested(message.*), nested(comment.data) FROM " + TEST_INDEX_NESTED_TYPE; JSONObject result = executeJdbcRequest(query); assertEquals(6, result.getInt("total")); - verifySchema(result, + verifySchema( + result, schema("nested(message.author)", null, "keyword"), schema("nested(message.dayOfWeek)", null, "long"), schema("nested(message.info)", null, "keyword"), schema("nested(comment.data)", null, "keyword")); - verifyDataRows(result, + verifyDataRows( + result, rows("e", 1, "a", "ab"), rows("f", 2, "b", "aa"), rows("g", 1, "c", "aa"), @@ -479,15 +482,16 @@ public void nested_function_all_subfields_and_specified_subfield() { @Test public void nested_function_all_deep_nested_subfields() { - String query = "SELECT nested(message.author.address.*) FROM " - + TEST_INDEX_MULTI_NESTED_TYPE; + String query = "SELECT nested(message.author.address.*) FROM " + TEST_INDEX_MULTI_NESTED_TYPE; JSONObject result = executeJdbcRequest(query); assertEquals(6, result.getInt("total")); - verifySchema(result, + verifySchema( + result, schema("nested(message.author.address.number)", null, "integer"), schema("nested(message.author.address.street)", null, "keyword")); - verifyDataRows(result, + verifyDataRows( + result, rows(1, "bc"), rows(2, "ab"), rows(3, "sk"), @@ -498,18 +502,19 @@ public void nested_function_all_deep_nested_subfields() { @Test public void nested_function_all_subfields_for_two_nested_fields() { - String query = "SELECT nested(message.*), nested(comment.*) FROM " - + TEST_INDEX_NESTED_TYPE; + String query = "SELECT nested(message.*), nested(comment.*) FROM " + TEST_INDEX_NESTED_TYPE; JSONObject result = executeJdbcRequest(query); assertEquals(6, result.getInt("total")); - verifySchema(result, + verifySchema( + result, schema("nested(message.author)", null, "keyword"), schema("nested(message.dayOfWeek)", null, "long"), schema("nested(message.info)", null, "keyword"), schema("nested(comment.data)", null, "keyword"), schema("nested(comment.likes)", null, "long")); - verifyDataRows(result, + verifyDataRows( + result, rows("e", 1, "a", "ab", 3), rows("f", 2, "b", "aa", 2), rows("g", 1, "c", "aa", 3), @@ -524,12 +529,14 @@ public void nested_function_all_subfields_and_non_nested_field() { JSONObject result = executeJdbcRequest(query); assertEquals(6, result.getInt("total")); - verifySchema(result, + verifySchema( + result, schema("nested(message.author)", null, "keyword"), schema("nested(message.dayOfWeek)", null, "long"), schema("nested(message.info)", null, "keyword"), schema("myNum", null, "long")); - verifyDataRows(result, + verifyDataRows( + result, rows("e", 1, "a", 1), rows("f", 2, "b", 2), rows("g", 1, "c", 3), @@ -544,17 +551,15 @@ public void nested_function_with_date_types_as_object_arrays_within_arrays_test( JSONObject result = executeJdbcRequest(query); assertEquals(11, result.getInt("total")); - verifySchema(result, - schema("nested(address.moveInDate)", null, "object") - ); - verifyDataRows(result, - rows(new JSONObject(Map.of("dateAndTime","1984-04-12 09:07:42"))), - rows(new JSONArray( - List.of( - Map.of("dateAndTime", "2023-05-03 08:07:42"), - Map.of("dateAndTime", "2001-11-11 04:07:44")) - ) - ), + verifySchema(result, schema("nested(address.moveInDate)", null, "object")); + verifyDataRows( + result, + rows(new JSONObject(Map.of("dateAndTime", "1984-04-12 09:07:42"))), + rows( + new JSONArray( + List.of( + Map.of("dateAndTime", "2023-05-03 08:07:42"), + Map.of("dateAndTime", "2001-11-11 04:07:44")))), rows(new JSONObject(Map.of("dateAndTime", "1966-03-19 03:04:55"))), rows(new JSONObject(Map.of("dateAndTime", "2011-06-01 01:01:42"))), rows(new JSONObject(Map.of("dateAndTime", "1901-08-11 04:03:33"))), @@ -563,30 +568,27 @@ public void nested_function_with_date_types_as_object_arrays_within_arrays_test( rows(new JSONObject(Map.of("dateAndTime", "1977-07-13 09:04:41"))), rows(new JSONObject(Map.of("dateAndTime", "1933-12-12 05:05:45"))), rows(new JSONObject(Map.of("dateAndTime", "1909-06-17 01:04:21"))), - rows(new JSONArray( - List.of( - Map.of("dateAndTime", "2001-11-11 04:07:44")) - ) - ) - ); + rows(new JSONArray(List.of(Map.of("dateAndTime", "2001-11-11 04:07:44"))))); } @Test public void nested_function_all_subfields_in_wrong_clause() { String query = "SELECT * FROM " + TEST_INDEX_NESTED_TYPE + " ORDER BY nested(message.*)"; - Exception exception = assertThrows(RuntimeException.class, () -> - executeJdbcRequest(query)); - - assertTrue(exception.getMessage().contains("" + - "{\n" + - " \"error\": {\n" + - " \"reason\": \"There was internal problem at backend\",\n" + - " \"details\": \"Invalid use of expression nested(message.*)\",\n" + - " \"type\": \"UnsupportedOperationException\"\n" + - " },\n" + - " \"status\": 503\n" + - "}" - )); + Exception exception = assertThrows(RuntimeException.class, () -> executeJdbcRequest(query)); + + assertTrue( + exception + .getMessage() + .contains( + "" + + "{\n" + + " \"error\": {\n" + + " \"reason\": \"There was internal problem at backend\",\n" + + " \"details\": \"Invalid use of expression nested(message.*)\",\n" + + " \"type\": \"UnsupportedOperationException\"\n" + + " },\n" + + " \"status\": 503\n" + + "}")); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/NowLikeFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/NowLikeFunctionIT.java index de3dd0fe98..547c88859e 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/NowLikeFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/NowLikeFunctionIT.java @@ -77,8 +77,7 @@ public NowLikeFunctionIT( @Name("constValue") Boolean constValue, @Name("referenceGetter") Supplier referenceGetter, @Name("parser") BiFunction parser, - @Name("serializationPatternStr") String serializationPatternStr - ) { + @Name("serializationPatternStr") String serializationPatternStr) { this.name = name; this.hasFsp = hasFsp; this.hasShortcut = hasShortcut; @@ -90,56 +89,104 @@ public NowLikeFunctionIT( @ParametersFactory(argumentFormatting = "%1$s") public static Iterable compareTwoDates() { - return Arrays.asList($$( - $("now", false, false, true, - (Supplier) LocalDateTime::now, - (BiFunction) LocalDateTime::parse, - "uuuu-MM-dd HH:mm:ss"), - $("current_timestamp", false, false, true, - (Supplier) LocalDateTime::now, - (BiFunction) LocalDateTime::parse, - "uuuu-MM-dd HH:mm:ss"), - $("localtimestamp", false, false, true, - (Supplier) LocalDateTime::now, - (BiFunction) LocalDateTime::parse, - "uuuu-MM-dd HH:mm:ss"), - $("localtime", false, false, true, - (Supplier) LocalDateTime::now, - (BiFunction) LocalDateTime::parse, - "uuuu-MM-dd HH:mm:ss"), - $("sysdate", true, false, false, - (Supplier) LocalDateTime::now, - (BiFunction) LocalDateTime::parse, - "uuuu-MM-dd HH:mm:ss"), - $("curtime", false, false, false, - (Supplier) LocalTime::now, - (BiFunction) LocalTime::parse, - "HH:mm:ss"), - $("current_time", false, false, false, - (Supplier) LocalTime::now, - (BiFunction) LocalTime::parse, - "HH:mm:ss"), - $("curdate", false, false, false, - (Supplier) LocalDate::now, - (BiFunction) LocalDate::parse, - "uuuu-MM-dd"), - $("current_date", false, false, false, - (Supplier) LocalDate::now, - (BiFunction) LocalDate::parse, - "uuuu-MM-dd"), - $("utc_date", false, false, true, - (Supplier) (() -> utcDateTimeNow().toLocalDate()), - (BiFunction) LocalDate::parse, - "uuuu-MM-dd"), - $("utc_time", false, false, true, - (Supplier) (() -> utcDateTimeNow().toLocalTime()), - (BiFunction) LocalTime::parse, - "HH:mm:ss"), - $("utc_timestamp", false, false, true, - (Supplier) (NowLikeFunctionIT::utcDateTimeNow), - (BiFunction) LocalDateTime::parse, - "uuuu-MM-dd HH:mm:ss") - )); + return Arrays.asList( + $$( + $( + "now", + false, + false, + true, + (Supplier) LocalDateTime::now, + (BiFunction) LocalDateTime::parse, + "uuuu-MM-dd HH:mm:ss"), + $( + "current_timestamp", + false, + false, + true, + (Supplier) LocalDateTime::now, + (BiFunction) LocalDateTime::parse, + "uuuu-MM-dd HH:mm:ss"), + $( + "localtimestamp", + false, + false, + true, + (Supplier) LocalDateTime::now, + (BiFunction) LocalDateTime::parse, + "uuuu-MM-dd HH:mm:ss"), + $( + "localtime", + false, + false, + true, + (Supplier) LocalDateTime::now, + (BiFunction) LocalDateTime::parse, + "uuuu-MM-dd HH:mm:ss"), + $( + "sysdate", + true, + false, + false, + (Supplier) LocalDateTime::now, + (BiFunction) LocalDateTime::parse, + "uuuu-MM-dd HH:mm:ss"), + $( + "curtime", + false, + false, + false, + (Supplier) LocalTime::now, + (BiFunction) LocalTime::parse, + "HH:mm:ss"), + $( + "current_time", + false, + false, + false, + (Supplier) LocalTime::now, + (BiFunction) LocalTime::parse, + "HH:mm:ss"), + $( + "curdate", + false, + false, + false, + (Supplier) LocalDate::now, + (BiFunction) LocalDate::parse, + "uuuu-MM-dd"), + $( + "current_date", + false, + false, + false, + (Supplier) LocalDate::now, + (BiFunction) LocalDate::parse, + "uuuu-MM-dd"), + $( + "utc_date", + false, + false, + true, + (Supplier) (() -> utcDateTimeNow().toLocalDate()), + (BiFunction) LocalDate::parse, + "uuuu-MM-dd"), + $( + "utc_time", + false, + false, + true, + (Supplier) (() -> utcDateTimeNow().toLocalTime()), + (BiFunction) LocalTime::parse, + "HH:mm:ss"), + $( + "utc_timestamp", + false, + false, + true, + (Supplier) (NowLikeFunctionIT::utcDateTimeNow), + (BiFunction) LocalDateTime::parse, + "uuuu-MM-dd HH:mm:ss"))); } private long getDiff(Temporal sample, Temporal reference) { @@ -150,14 +197,14 @@ private long getDiff(Temporal sample, Temporal reference) { } public static LocalDateTime utcDateTimeNow() { - ZonedDateTime zonedDateTime = - LocalDateTime.now().atZone(TimeZone.getDefault().toZoneId()); + ZonedDateTime zonedDateTime = LocalDateTime.now().atZone(TimeZone.getDefault().toZoneId()); return zonedDateTime.withZoneSameInstant(ZoneId.of("UTC")).toLocalDateTime(); } @Test public void testNowLikeFunctions() throws IOException { - var serializationPattern = new DateTimeFormatterBuilder() + var serializationPattern = + new DateTimeFormatterBuilder() .appendPattern(serializationPatternStr) .optionalStart() .appendFraction(ChronoField.NANO_OF_SECOND, 0, 9, true) @@ -167,15 +214,16 @@ public void testNowLikeFunctions() throws IOException { double delta = 2d; // acceptable time diff, secs if (reference instanceof LocalDate) delta = 1d; // Max date delta could be 1 if test runs on the very edge of two days - // We ignore probability of a test run on edge of month or year to simplify the checks + // We ignore probability of a test run on edge of month or year to simplify the checks - var calls = new ArrayList() {{ - add(name + "()"); - }}; - if (hasShortcut) - calls.add(name); - if (hasFsp) - calls.add(name + "(0)"); + var calls = + new ArrayList() { + { + add(name + "()"); + } + }; + if (hasShortcut) calls.add(name); + if (hasFsp) calls.add(name + "(0)"); // Column order is: func(), func, func(0) // shortcut ^ fsp ^ @@ -185,20 +233,25 @@ public void testNowLikeFunctions() throws IOException { JSONArray firstRow = rows.getJSONArray(0); for (int i = 0; i < rows.length(); i++) { var row = rows.getJSONArray(i); - if (constValue) - assertTrue(firstRow.similar(row)); + if (constValue) assertTrue(firstRow.similar(row)); int column = 0; - assertEquals(0, - getDiff(reference, parser.apply(row.getString(column++), serializationPattern)), delta); + assertEquals( + 0, + getDiff(reference, parser.apply(row.getString(column++), serializationPattern)), + delta); if (hasShortcut) { - assertEquals(0, - getDiff(reference, parser.apply(row.getString(column++), serializationPattern)), delta); + assertEquals( + 0, + getDiff(reference, parser.apply(row.getString(column++), serializationPattern)), + delta); } if (hasFsp) { - assertEquals(0, - getDiff(reference, parser.apply(row.getString(column), serializationPattern)), delta); + assertEquals( + 0, + getDiff(reference, parser.apply(row.getString(column), serializationPattern)), + delta); } } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/NullLiteralIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/NullLiteralIT.java index b8bf0963b5..f885b6d4e0 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/NullLiteralIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/NullLiteralIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.util.MatcherUtils.rows; @@ -34,28 +33,22 @@ public void testNullLiteralSchema() { @Test public void testNullLiteralInOperator() { - verifyDataRows( - query("SELECT NULL = NULL, NULL AND TRUE"), - rows(null, null)); + verifyDataRows(query("SELECT NULL = NULL, NULL AND TRUE"), rows(null, null)); } @Test public void testNullLiteralInFunction() { - verifyDataRows( - query("SELECT ABS(NULL), POW(2, FLOOR(NULL))"), - rows(null, null)); + verifyDataRows(query("SELECT ABS(NULL), POW(2, FLOOR(NULL))"), rows(null, null)); } @Test public void testNullLiteralInInterval() { verifyDataRows( query("SELECT INTERVAL NULL DAY, INTERVAL 60 * 60 * 24 * (NULL - FLOOR(NULL)) SECOND"), - rows(null, null) - ); + rows(null, null)); } private JSONObject query(String sql) { return new JSONObject(executeQuery(sql, "jdbc")); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/PaginationBlackboxIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationBlackboxIT.java index 2a34dabd79..84289d8f57 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/PaginationBlackboxIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationBlackboxIT.java @@ -3,15 +3,13 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import java.io.IOException; import java.util.ArrayList; import java.util.List; - -import com.carrotsearch.randomizedtesting.annotations.Name; -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import lombok.SneakyThrows; import org.json.JSONArray; import org.json.JSONObject; @@ -28,8 +26,7 @@ public class PaginationBlackboxIT extends SQLIntegTestCase { private final Index index; private final Integer pageSize; - public PaginationBlackboxIT(@Name("index") Index index, - @Name("pageSize") Integer pageSize) { + public PaginationBlackboxIT(@Name("index") Index index, @Name("pageSize") Integer pageSize) { this.index = index; this.pageSize = pageSize; } @@ -46,7 +43,7 @@ public static Iterable compareTwoDates() { var testData = new ArrayList(); for (var index : indices) { for (var pageSize : pageSizes) { - testData.add(new Object[] { index, pageSize }); + testData.add(new Object[] {index, pageSize}); } } return testData; @@ -65,14 +62,19 @@ public void test_pagination_blackbox() { var responseCounter = 1; this.logger.info(testReportPrefix + "first response"); - response = new JSONObject(executeFetchQuery( - String.format("select * from %s", index.getName()), pageSize, "jdbc")); + response = + new JSONObject( + executeFetchQuery( + String.format("select * from %s", index.getName()), pageSize, "jdbc")); - var cursor = response.has("cursor")? response.getString("cursor") : ""; + var cursor = response.has("cursor") ? response.getString("cursor") : ""; do { - this.logger.info(testReportPrefix - + String.format("subsequent response %d/%d", responseCounter++, (indexSize / pageSize) + 1)); - assertTrue("Paged response schema doesn't match to non-paged", + this.logger.info( + testReportPrefix + + String.format( + "subsequent response %d/%d", responseCounter++, (indexSize / pageSize) + 1)); + assertTrue( + "Paged response schema doesn't match to non-paged", schema.similar(response.getJSONArray("schema"))); rowsReturned += response.getInt("size"); @@ -89,13 +91,17 @@ public void test_pagination_blackbox() { cursor = ""; } - } while(!cursor.isEmpty()); - assertTrue("Paged response schema doesn't match to non-paged", + } while (!cursor.isEmpty()); + assertTrue( + "Paged response schema doesn't match to non-paged", schema.similar(response.getJSONArray("schema"))); - assertEquals(testReportPrefix + "Paged responses return another row count that non-paged", - indexSize, rowsReturned); - assertTrue(testReportPrefix + "Paged accumulated result has other rows than non-paged", + assertEquals( + testReportPrefix + "Paged responses return another row count that non-paged", + indexSize, + rowsReturned); + assertTrue( + testReportPrefix + "Paged accumulated result has other rows than non-paged", rows.similar(rowsPaged)); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/PaginationFallbackIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationFallbackIT.java index 1f97ddefd1..dfb0bb2080 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/PaginationFallbackIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationFallbackIT.java @@ -11,7 +11,6 @@ import static org.opensearch.sql.util.TestUtils.verifyIsV2Cursor; import java.io.IOException; -import org.json.JSONObject; import org.junit.Test; import org.opensearch.sql.legacy.SQLIntegTestCase; import org.opensearch.sql.util.TestUtils; @@ -37,23 +36,24 @@ public void testSelectAll() throws IOException { @Test public void testSelectWithOpenSearchFuncInFilter() throws IOException { - var response = executeQueryTemplate( - "SELECT * FROM %s WHERE `11` = match_phrase('96')", TEST_INDEX_ONLINE); + var response = + executeQueryTemplate("SELECT * FROM %s WHERE `11` = match_phrase('96')", TEST_INDEX_ONLINE); verifyIsV2Cursor(response); } @Test public void testSelectWithHighlight() throws IOException { - var response = executeQueryTemplate( - "SELECT highlight(`11`) FROM %s WHERE match_query(`11`, '96')", TEST_INDEX_ONLINE); + var response = + executeQueryTemplate( + "SELECT highlight(`11`) FROM %s WHERE match_query(`11`, '96')", TEST_INDEX_ONLINE); verifyIsV2Cursor(response); } @Test public void testSelectWithFullTextSearch() throws IOException { - var response = executeQueryTemplate( - "SELECT * FROM %s WHERE match_phrase(`11`, '96')", TEST_INDEX_ONLINE); + var response = + executeQueryTemplate("SELECT * FROM %s WHERE match_phrase(`11`, '96')", TEST_INDEX_ONLINE); verifyIsV2Cursor(response); } @@ -65,8 +65,7 @@ public void testSelectFromIndexWildcard() throws IOException { @Test public void testSelectFromDataSource() throws IOException { - var response = executeQueryTemplate("SELECT * FROM @opensearch.%s", - TEST_INDEX_ONLINE); + var response = executeQueryTemplate("SELECT * FROM @opensearch.%s", TEST_INDEX_ONLINE); verifyIsV2Cursor(response); } @@ -78,31 +77,29 @@ public void testSelectColumnReference() throws IOException { @Test public void testSubquery() throws IOException { - var response = executeQueryTemplate("SELECT `107` from (SELECT * FROM %s)", - TEST_INDEX_ONLINE); + var response = executeQueryTemplate("SELECT `107` from (SELECT * FROM %s)", TEST_INDEX_ONLINE); verifyIsV1Cursor(response); } @Test public void testSelectExpression() throws IOException { - var response = executeQueryTemplate("SELECT 1 + 1 - `107` from %s", - TEST_INDEX_ONLINE); + var response = executeQueryTemplate("SELECT 1 + 1 - `107` from %s", TEST_INDEX_ONLINE); verifyIsV2Cursor(response); } @Test public void testGroupBy() throws IOException { // GROUP BY is not paged by either engine. - var response = executeQueryTemplate("SELECT * FROM %s GROUP BY `107`", - TEST_INDEX_ONLINE); + var response = executeQueryTemplate("SELECT * FROM %s GROUP BY `107`", TEST_INDEX_ONLINE); TestUtils.verifyNoCursor(response); } @Test public void testGroupByHaving() throws IOException { // GROUP BY is not paged by either engine. - var response = executeQueryTemplate("SELECT * FROM %s GROUP BY `107` HAVING `107` > 400", - TEST_INDEX_ONLINE); + var response = + executeQueryTemplate( + "SELECT * FROM %s GROUP BY `107` HAVING `107` > 400", TEST_INDEX_ONLINE); TestUtils.verifyNoCursor(response); } @@ -114,15 +111,13 @@ public void testLimit() throws IOException { @Test public void testLimitOffset() throws IOException { - var response = executeQueryTemplate("SELECT * FROM %s LIMIT 8 OFFSET 4", - TEST_INDEX_ONLINE); + var response = executeQueryTemplate("SELECT * FROM %s LIMIT 8 OFFSET 4", TEST_INDEX_ONLINE); verifyIsV1Cursor(response); } @Test public void testOrderBy() throws IOException { - var response = executeQueryTemplate("SELECT * FROM %s ORDER By `107`", - TEST_INDEX_ONLINE); + var response = executeQueryTemplate("SELECT * FROM %s ORDER By `107`", TEST_INDEX_ONLINE); verifyIsV2Cursor(response); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/PaginationFilterIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationFilterIT.java index 6ebc05efad..038596cf57 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/PaginationFilterIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationFilterIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import com.carrotsearch.randomizedtesting.annotations.Name; @@ -22,38 +21,48 @@ import org.opensearch.sql.legacy.TestsConstants; /** - * Test pagination with `WHERE` clause using a parametrized test. - * See constructor {@link #PaginationFilterIT} for list of parameters - * and {@link #generateParameters} and {@link #STATEMENT_TO_NUM_OF_PAGES} - * to see how these parameters are generated. + * Test pagination with `WHERE` clause using a parametrized test. See constructor {@link + * #PaginationFilterIT} for list of parameters and {@link #generateParameters} and {@link + * #STATEMENT_TO_NUM_OF_PAGES} to see how these parameters are generated. */ @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) public class PaginationFilterIT extends SQLIntegTestCase { /** - * Map of the OS-SQL statement sent to SQL-plugin, and the total number - * of expected hits (on all pages) from the filtered result + * Map of the OS-SQL statement sent to SQL-plugin, and the total number of expected hits (on all + * pages) from the filtered result */ - final private static Map STATEMENT_TO_NUM_OF_PAGES = Map.of( - "SELECT * FROM " + TestsConstants.TEST_INDEX_ACCOUNT, 1000, - "SELECT * FROM " + TestsConstants.TEST_INDEX_ACCOUNT + " WHERE match(address, 'street')", 385, - "SELECT * FROM " + TestsConstants.TEST_INDEX_ACCOUNT + " WHERE match(address, 'street') AND match(city, 'Ola')", 1, - "SELECT firstname, lastname, highlight(address) FROM " + TestsConstants.TEST_INDEX_ACCOUNT + " WHERE match(address, 'street') AND match(state, 'OH')", 5, - "SELECT firstname, lastname, highlight('*') FROM " + TestsConstants.TEST_INDEX_ACCOUNT + " WHERE match(address, 'street') AND match(state, 'OH')", 5, - "SELECT * FROM " + TestsConstants.TEST_INDEX_BEER + " WHERE true", 60, - "SELECT * FROM " + TestsConstants.TEST_INDEX_BEER + " WHERE Id=10", 1, - "SELECT * FROM " + TestsConstants.TEST_INDEX_BEER + " WHERE Id + 5=15", 1, - "SELECT * FROM " + TestsConstants.TEST_INDEX_BANK, 7 - ); + private static final Map STATEMENT_TO_NUM_OF_PAGES = + Map.of( + "SELECT * FROM " + TestsConstants.TEST_INDEX_ACCOUNT, 1000, + "SELECT * FROM " + TestsConstants.TEST_INDEX_ACCOUNT + " WHERE match(address, 'street')", + 385, + "SELECT * FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " WHERE match(address, 'street') AND match(city, 'Ola')", + 1, + "SELECT firstname, lastname, highlight(address) FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " WHERE match(address, 'street') AND match(state, 'OH')", + 5, + "SELECT firstname, lastname, highlight('*') FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " WHERE match(address, 'street') AND match(state, 'OH')", + 5, + "SELECT * FROM " + TestsConstants.TEST_INDEX_BEER + " WHERE true", 60, + "SELECT * FROM " + TestsConstants.TEST_INDEX_BEER + " WHERE Id=10", 1, + "SELECT * FROM " + TestsConstants.TEST_INDEX_BEER + " WHERE Id + 5=15", 1, + "SELECT * FROM " + TestsConstants.TEST_INDEX_BANK, 7); private final String sqlStatement; private final Integer totalHits; private final Integer pageSize; - public PaginationFilterIT(@Name("statement") String sqlStatement, - @Name("total_hits") Integer totalHits, - @Name("page_size") Integer pageSize) { + public PaginationFilterIT( + @Name("statement") String sqlStatement, + @Name("total_hits") Integer totalHits, + @Name("page_size") Integer pageSize) { this.sqlStatement = sqlStatement; this.totalHits = totalHits; this.pageSize = pageSize; @@ -72,18 +81,18 @@ public static Iterable generateParameters() { List pageSizes = List.of(5, 1000); List testData = new ArrayList(); - STATEMENT_TO_NUM_OF_PAGES.forEach((statement, totalHits) -> { - for (var pageSize : pageSizes) { - testData.add(new Object[] { statement, totalHits, pageSize }); - } - }); + STATEMENT_TO_NUM_OF_PAGES.forEach( + (statement, totalHits) -> { + for (var pageSize : pageSizes) { + testData.add(new Object[] {statement, totalHits, pageSize}); + } + }); return testData; } /** - * Test compares non-paginated results with paginated results - * To ensure that the pushdowns return the same number of hits even - * with filter WHERE pushed down + * Test compares non-paginated results with paginated results To ensure that the pushdowns return + * the same number of hits even with filter WHERE pushed down */ @Test @SneakyThrows @@ -93,7 +102,10 @@ public void test_pagination_with_where() { int totalResultsCount = nonPaginatedResponse.getInt("total"); JSONArray rows = nonPaginatedResponse.getJSONArray("datarows"); JSONArray schema = nonPaginatedResponse.getJSONArray("schema"); - var testReportPrefix = String.format("query: %s; total hits: %d; page size: %d || ", sqlStatement, totalResultsCount, pageSize); + var testReportPrefix = + String.format( + "query: %s; total hits: %d; page size: %d || ", + sqlStatement, totalResultsCount, pageSize); assertEquals(totalHits.intValue(), totalResultsCount); var rowsPaged = new JSONArray(); @@ -101,7 +113,8 @@ public void test_pagination_with_where() { var responseCounter = 1; // make first request - with a cursor - JSONObject paginatedResponse = new JSONObject(executeFetchQuery(sqlStatement, pageSize, "jdbc")); + JSONObject paginatedResponse = + new JSONObject(executeFetchQuery(sqlStatement, pageSize, "jdbc")); this.logger.info(testReportPrefix + ""); do { var cursor = paginatedResponse.has("cursor") ? paginatedResponse.getString("cursor") : null; @@ -117,27 +130,34 @@ public void test_pagination_with_where() { if (cursor != null) { assertTrue( - testReportPrefix + "Cursor returned from legacy engine", - cursor.startsWith("n:")); + testReportPrefix + "Cursor returned from legacy engine", cursor.startsWith("n:")); paginatedResponse = executeCursorQuery(cursor); - this.logger.info(testReportPrefix - + String.format("response %d/%d", responseCounter++, (totalResultsCount / pageSize) + 1)); + this.logger.info( + testReportPrefix + + String.format( + "response %d/%d", responseCounter++, (totalResultsCount / pageSize) + 1)); } else { break; } } while (true); // last page expected results: - assertEquals(testReportPrefix + "Last page", - totalHits % pageSize, paginatedResponse.getInt("size")); - assertEquals(testReportPrefix + "Last page", - totalHits % pageSize, paginatedResponse.getJSONArray("datarows").length()); + assertEquals( + testReportPrefix + "Last page", totalHits % pageSize, paginatedResponse.getInt("size")); + assertEquals( + testReportPrefix + "Last page", + totalHits % pageSize, + paginatedResponse.getJSONArray("datarows").length()); // compare paginated and non-paginated counts - assertEquals(testReportPrefix + "Paged responses returned an unexpected total", - totalResultsCount, pagedSize); - assertEquals(testReportPrefix + "Paged responses returned an unexpected rows count", - rows.length(), rowsPaged.length()); + assertEquals( + testReportPrefix + "Paged responses returned an unexpected total", + totalResultsCount, + pagedSize); + assertEquals( + testReportPrefix + "Paged responses returned an unexpected rows count", + rows.length(), + rowsPaged.length()); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/PaginationIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationIT.java index 69a3607d56..49ef7c583e 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/PaginationIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationIT.java @@ -9,10 +9,8 @@ import static org.junit.Assert.assertTrue; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_CALCS; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ONLINE; -import static org.opensearch.sql.legacy.plugin.RestSqlAction.EXPLAIN_API_ENDPOINT; import java.io.IOException; - import lombok.SneakyThrows; import org.json.JSONArray; import org.json.JSONObject; @@ -75,12 +73,16 @@ public void testCursorTimeout() throws IOException, InterruptedException { ResponseException exception = expectThrows(ResponseException.class, () -> executeCursorQuery(cursor)); response = new JSONObject(TestUtils.getResponseBody(exception.getResponse())); - assertEquals(response.getJSONObject("error").getString("reason"), + assertEquals( + response.getJSONObject("error").getString("reason"), "Error occurred in OpenSearch engine: all shards failed"); - assertTrue(response.getJSONObject("error").getString("details") - .contains("SearchContextMissingException[No search context found for id")); - assertEquals(response.getJSONObject("error").getString("type"), - "SearchPhaseExecutionException"); + assertTrue( + response + .getJSONObject("error") + .getString("details") + .contains("SearchContextMissingException[No search context found for id")); + assertEquals( + response.getJSONObject("error").getString("type"), "SearchPhaseExecutionException"); wipeAllClusterSettings(); } @@ -108,12 +110,16 @@ public void testCloseCursor() { ResponseException exception = expectThrows(ResponseException.class, () -> executeCursorQuery(cursor)); response = new JSONObject(TestUtils.getResponseBody(exception.getResponse())); - assertEquals(response.getJSONObject("error").getString("reason"), + assertEquals( + response.getJSONObject("error").getString("reason"), "Error occurred in OpenSearch engine: all shards failed"); - assertTrue(response.getJSONObject("error").getString("details") - .contains("SearchContextMissingException[No search context found for id")); - assertEquals(response.getJSONObject("error").getString("type"), - "SearchPhaseExecutionException"); + assertTrue( + response + .getJSONObject("error") + .getString("details") + .contains("SearchContextMissingException[No search context found for id")); + assertEquals( + response.getJSONObject("error").getString("type"), "SearchPhaseExecutionException"); } @Test @@ -136,7 +142,8 @@ public void testQueryWithOrderBy() { var cursor = response.getString("cursor"); do { assertTrue(cursor.isEmpty() || cursor.startsWith("n:")); - assertTrue("Paged response schema doesn't match to non-paged", + assertTrue( + "Paged response schema doesn't match to non-paged", schema.similar(response.getJSONArray("schema"))); rowsReturnedAsc += response.getInt("size"); @@ -153,7 +160,7 @@ public void testQueryWithOrderBy() { cursor = ""; } - } while(!cursor.isEmpty()); + } while (!cursor.isEmpty()); query = String.format("SELECT * from %s ORDER BY num1 DESC", TEST_INDEX_CALCS); response = new JSONObject(executeFetchQuery(query, 7, "jdbc")); @@ -162,7 +169,8 @@ public void testQueryWithOrderBy() { cursor = response.getString("cursor"); do { assertTrue(cursor.isEmpty() || cursor.startsWith("n:")); - assertTrue("Paged response schema doesn't match to non-paged", + assertTrue( + "Paged response schema doesn't match to non-paged", schema.similar(response.getJSONArray("schema"))); rowsReturnedDesc += response.getInt("size"); @@ -179,19 +187,22 @@ public void testQueryWithOrderBy() { cursor = ""; } - } while(!cursor.isEmpty()); + } while (!cursor.isEmpty()); - assertEquals("Paged responses return another row count that non-paged", - indexSize, rowsReturnedAsc); - assertEquals("Paged responses return another row count that non-paged", - indexSize, rowsReturnedDesc); - assertTrue("Paged accumulated result has other rows than non-paged", + assertEquals( + "Paged responses return another row count that non-paged", indexSize, rowsReturnedAsc); + assertEquals( + "Paged responses return another row count that non-paged", indexSize, rowsReturnedDesc); + assertTrue( + "Paged accumulated result has other rows than non-paged", rows.toList().containsAll(rowsPagedAsc.toList())); - assertTrue("Paged accumulated result has other rows than non-paged", + assertTrue( + "Paged accumulated result has other rows than non-paged", rows.toList().containsAll(rowsPagedDesc.toList())); for (int row = 0; row < indexSize; row++) { - assertTrue(String.format("Row %d: row order is incorrect", row), + assertTrue( + String.format("Row %d: row order is incorrect", row), rowsPagedAsc.getJSONArray(row).similar(rowsPagedDesc.getJSONArray(indexSize - row - 1))); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/PaginationWindowIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationWindowIT.java index be208cd137..246cbfc4a0 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/PaginationWindowIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationWindowIT.java @@ -40,10 +40,11 @@ public void testFetchSizeLessThanMaxResultWindow() throws IOException { } while (response.has("cursor")); numRows += response.getJSONArray("datarows").length(); - var countRows = executeJdbcRequest("SELECT COUNT(*) FROM " + TEST_INDEX_PHRASE) - .getJSONArray("datarows") - .getJSONArray(0) - .get(0); + var countRows = + executeJdbcRequest("SELECT COUNT(*) FROM " + TEST_INDEX_PHRASE) + .getJSONArray("datarows") + .getJSONArray(0) + .get(0); assertEquals(countRows, numRows); } @@ -62,10 +63,11 @@ public void testQuerySizeLimitDoesNotEffectTotalRowsReturned() throws IOExceptio response = executeCursorQuery(cursor); } while (response.has("cursor")); numRows += response.getJSONArray("datarows").length(); - var countRows = executeJdbcRequest("SELECT COUNT(*) FROM " + TEST_INDEX_PHRASE) - .getJSONArray("datarows") - .getJSONArray(0) - .get(0); + var countRows = + executeJdbcRequest("SELECT COUNT(*) FROM " + TEST_INDEX_PHRASE) + .getJSONArray("datarows") + .getJSONArray(0) + .get(0); assertEquals(countRows, numRows); assertTrue(numRows > querySizeLimit); } @@ -74,12 +76,10 @@ public void testQuerySizeLimitDoesNotEffectTotalRowsReturned() throws IOExceptio public void testQuerySizeLimitDoesNotEffectPageSize() throws IOException { setQuerySizeLimit(3); setMaxResultWindow(TEST_INDEX_PHRASE, 4); - var response - = executeQueryTemplate("SELECT * FROM %s", TEST_INDEX_PHRASE, 4); + var response = executeQueryTemplate("SELECT * FROM %s", TEST_INDEX_PHRASE, 4); assertEquals(4, response.getInt("size")); - var response2 - = executeQueryTemplate("SELECT * FROM %s", TEST_INDEX_PHRASE, 2); + var response2 = executeQueryTemplate("SELECT * FROM %s", TEST_INDEX_PHRASE, 2); assertEquals(2, response2.getInt("size")); } @@ -87,11 +87,9 @@ public void testQuerySizeLimitDoesNotEffectPageSize() throws IOException { public void testFetchSizeLargerThanResultWindowFails() throws IOException { final int window = 2; setMaxResultWindow(TEST_INDEX_PHRASE, 2); - assertThrows(ResponseException.class, - () -> executeQueryTemplate("SELECT * FROM %s", - TEST_INDEX_PHRASE, window + 1)); + assertThrows( + ResponseException.class, + () -> executeQueryTemplate("SELECT * FROM %s", TEST_INDEX_PHRASE, window + 1)); resetMaxResultWindow(TEST_INDEX_PHRASE); } - - } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/PositionFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/PositionFunctionIT.java index f51a3a0977..6a9d40e7c3 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/PositionFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/PositionFunctionIT.java @@ -5,16 +5,16 @@ package org.opensearch.sql.sql; -import org.json.JSONObject; -import org.junit.Test; -import org.opensearch.sql.legacy.SQLIntegTestCase; -import org.opensearch.sql.legacy.TestsConstants; - import static org.opensearch.sql.util.MatcherUtils.rows; import static org.opensearch.sql.util.MatcherUtils.schema; import static org.opensearch.sql.util.MatcherUtils.verifyDataRows; import static org.opensearch.sql.util.MatcherUtils.verifySchema; +import org.json.JSONObject; +import org.junit.Test; +import org.opensearch.sql.legacy.SQLIntegTestCase; +import org.opensearch.sql.legacy.TestsConstants; + public class PositionFunctionIT extends SQLIntegTestCase { @Override @@ -26,19 +26,29 @@ protected void init() throws Exception { @Test public void position_function_test() { String query = "SELECT firstname, position('a' IN firstname) FROM %s"; - JSONObject response = executeJdbcRequest(String.format(query, TestsConstants.TEST_INDEX_PEOPLE2)); + JSONObject response = + executeJdbcRequest(String.format(query, TestsConstants.TEST_INDEX_PEOPLE2)); - verifySchema(response, schema("firstname", null, "keyword"), - schema("position('a' IN firstname)", null, "integer")); + verifySchema( + response, + schema("firstname", null, "keyword"), + schema("position('a' IN firstname)", null, "integer")); assertEquals(12, response.getInt("total")); - verifyDataRows(response, - rows("Daenerys", 2), rows("Hattie", 2), - rows("Nanette", 2), rows("Dale", 2), - rows("Elinor", 0), rows("Virginia", 8), - rows("Dillard", 5), rows("Mcgee", 0), - rows("Aurelia", 7), rows("Fulton", 0), - rows("Burton", 0), rows("Josie", 0)); + verifyDataRows( + response, + rows("Daenerys", 2), + rows("Hattie", 2), + rows("Nanette", 2), + rows("Dale", 2), + rows("Elinor", 0), + rows("Virginia", 8), + rows("Dillard", 5), + rows("Mcgee", 0), + rows("Aurelia", 7), + rows("Fulton", 0), + rows("Burton", 0), + rows("Josie", 0)); } @Test @@ -46,20 +56,31 @@ public void position_function_with_nulls_test() { String query = "SELECT str2, position('ee' IN str2) FROM %s"; JSONObject response = executeJdbcRequest(String.format(query, TestsConstants.TEST_INDEX_CALCS)); - verifySchema(response, schema("str2", null, "keyword"), - schema("position('ee' IN str2)", null, "integer")); + verifySchema( + response, + schema("str2", null, "keyword"), + schema("position('ee' IN str2)", null, "integer")); assertEquals(17, response.getInt("total")); - verifyDataRows(response, - rows("one", 0), rows("two", 0), - rows("three", 4), rows(null, null), - rows("five", 0), rows("six", 0), - rows(null, null), rows("eight", 0), - rows("nine", 0), rows("ten", 0), - rows("eleven", 0), rows("twelve", 0), - rows(null, null), rows("fourteen", 6), - rows("fifteen", 5), rows("sixteen", 5), - rows(null, null)); + verifyDataRows( + response, + rows("one", 0), + rows("two", 0), + rows("three", 4), + rows(null, null), + rows("five", 0), + rows("six", 0), + rows(null, null), + rows("eight", 0), + rows("nine", 0), + rows("ten", 0), + rows("eleven", 0), + rows("twelve", 0), + rows(null, null), + rows("fourteen", 6), + rows("fifteen", 5), + rows("sixteen", 5), + rows(null, null)); } @Test @@ -86,7 +107,8 @@ public void position_function_with_only_fields_as_args_test() { @Test public void position_function_with_function_as_arg_test() { - String query = "SELECT position(upper(str3) IN str1) FROM %s WHERE str1 LIKE 'BINDING SUPPLIES'"; + String query = + "SELECT position(upper(str3) IN str1) FROM %s WHERE str1 LIKE 'BINDING SUPPLIES'"; JSONObject response = executeJdbcRequest(String.format(query, TestsConstants.TEST_INDEX_CALCS)); verifySchema(response, schema("position(upper(str3) IN str1)", null, "integer")); @@ -110,17 +132,21 @@ public void position_function_in_where_clause_test() { public void position_function_with_null_args_test() { String query1 = "SELECT str2, position(null IN str2) FROM %s WHERE str2 IN ('one')"; String query2 = "SELECT str2, position(str2 IN null) FROM %s WHERE str2 IN ('one')"; - JSONObject response1 = executeJdbcRequest(String.format(query1, TestsConstants.TEST_INDEX_CALCS)); - JSONObject response2 = executeJdbcRequest(String.format(query2, TestsConstants.TEST_INDEX_CALCS)); - - verifySchema(response1, - schema("str2", null, "keyword"), - schema("position(null IN str2)", null, "integer")); + JSONObject response1 = + executeJdbcRequest(String.format(query1, TestsConstants.TEST_INDEX_CALCS)); + JSONObject response2 = + executeJdbcRequest(String.format(query2, TestsConstants.TEST_INDEX_CALCS)); + + verifySchema( + response1, + schema("str2", null, "keyword"), + schema("position(null IN str2)", null, "integer")); assertEquals(1, response1.getInt("total")); - verifySchema(response2, - schema("str2", null, "keyword"), - schema("position(str2 IN null)", null, "integer")); + verifySchema( + response2, + schema("str2", null, "keyword"), + schema("position(str2 IN null)", null, "integer")); assertEquals(1, response2.getInt("total")); verifyDataRows(response1, rows("one", null)); diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/PreparedStatementIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/PreparedStatementIT.java index 38ff32b0d7..8200f64b66 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/PreparedStatementIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/PreparedStatementIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import org.json.JSONObject; @@ -20,16 +19,21 @@ protected void init() throws Exception { @Test public void testPreparedStatement() { - JSONObject response = new JSONObject( - executeQuery(String.format("{\n" - + " \"query\": \"SELECT state FROM %s WHERE state = ? GROUP BY state\",\n" - + " \"parameters\": [\n" - + " {\n" - + " \"type\": \"string\",\n" - + " \"value\": \"WA\"\n" - + " }\n" - + " ]\n" - + "}", TestsConstants.TEST_INDEX_ACCOUNT), "jdbc")); + JSONObject response = + new JSONObject( + executeQuery( + String.format( + "{\n" + + " \"query\": \"SELECT state FROM %s WHERE state = ? GROUP BY state\",\n" + + " \"parameters\": [\n" + + " {\n" + + " \"type\": \"string\",\n" + + " \"value\": \"WA\"\n" + + " }\n" + + " ]\n" + + "}", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc")); assertFalse(response.getJSONArray("datarows").isEmpty()); } @@ -39,5 +43,4 @@ protected String makeRequest(String query) { // Avoid wrap with "query" again return query; } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/QueryIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/QueryIT.java index e61593eb21..b2991138c4 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/QueryIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/QueryIT.java @@ -13,72 +13,66 @@ import org.opensearch.sql.legacy.SQLIntegTestCase; public class QueryIT extends SQLIntegTestCase { - @Override - public void init() throws IOException { - loadIndex(Index.BEER); - } + @Override + public void init() throws IOException { + loadIndex(Index.BEER); + } - @Test - public void all_fields_test() throws IOException { - String query = "SELECT * FROM " - + TEST_INDEX_BEER + " WHERE query('*:taste')"; - JSONObject result = executeJdbcRequest(query); - assertEquals(16, result.getInt("total")); - } + @Test + public void all_fields_test() throws IOException { + String query = "SELECT * FROM " + TEST_INDEX_BEER + " WHERE query('*:taste')"; + JSONObject result = executeJdbcRequest(query); + assertEquals(16, result.getInt("total")); + } - @Test - public void mandatory_params_test() throws IOException { - String query = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE query('Tags:taste OR Body:taste')"; - JSONObject result = executeJdbcRequest(query); - assertEquals(16, result.getInt("total")); - } + @Test + public void mandatory_params_test() throws IOException { + String query = "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE query('Tags:taste OR Body:taste')"; + JSONObject result = executeJdbcRequest(query); + assertEquals(16, result.getInt("total")); + } - @Test - public void all_params_test() throws IOException { - String query = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE query('Tags:taste', escape=false," - + "allow_leading_wildcard=true, enable_position_increments=true," - + "fuzziness= 1, fuzzy_rewrite='constant_score', max_determinized_states = 10000," - + "analyzer='standard', analyze_wildcard = false, quote_field_suffix = '.exact'," - + "auto_generate_synonyms_phrase_query=true, boost = 0.77," - + "quote_analyzer='standard', phrase_slop=0, rewrite='constant_score', type='best_fields'," - + "tie_breaker=0.3, time_zone='Canada/Pacific', default_operator='or'," - + "fuzzy_transpositions = false, lenient = true, fuzzy_max_expansions = 25," - + "minimum_should_match = '2<-25% 9<-3', fuzzy_prefix_length = 7);"; - JSONObject result = executeJdbcRequest(query); - assertEquals(8, result.getInt("total")); - } + @Test + public void all_params_test() throws IOException { + String query = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE query('Tags:taste', escape=false,allow_leading_wildcard=true," + + " enable_position_increments=true,fuzziness= 1, fuzzy_rewrite='constant_score'," + + " max_determinized_states = 10000,analyzer='standard', analyze_wildcard = false," + + " quote_field_suffix = '.exact',auto_generate_synonyms_phrase_query=true, boost =" + + " 0.77,quote_analyzer='standard', phrase_slop=0, rewrite='constant_score'," + + " type='best_fields',tie_breaker=0.3, time_zone='Canada/Pacific'," + + " default_operator='or',fuzzy_transpositions = false, lenient = true," + + " fuzzy_max_expansions = 25,minimum_should_match = '2<-25% 9<-3', fuzzy_prefix_length" + + " = 7);"; + JSONObject result = executeJdbcRequest(query); + assertEquals(8, result.getInt("total")); + } - @Test - public void wildcard_test() throws IOException { - String query1 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE query('Tags:taste')"; - JSONObject result1 = executeJdbcRequest(query1); - String query2 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE query('*:taste')"; - JSONObject result2 = executeJdbcRequest(query2); - assertNotEquals(result2.getInt("total"), result1.getInt("total")); + @Test + public void wildcard_test() throws IOException { + String query1 = "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE query('Tags:taste')"; + JSONObject result1 = executeJdbcRequest(query1); + String query2 = "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE query('*:taste')"; + JSONObject result2 = executeJdbcRequest(query2); + assertNotEquals(result2.getInt("total"), result1.getInt("total")); - String query3 = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE query('Tags:tas*');"; - JSONObject result3 = executeJdbcRequest(query3); - assertEquals(8, result3.getInt("total")); + String query3 = "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE query('Tags:tas*');"; + JSONObject result3 = executeJdbcRequest(query3); + assertEquals(8, result3.getInt("total")); - String query4 = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE query('Tags:tas?e');"; - JSONObject result4 = executeJdbcRequest(query3); - assertEquals(8, result4.getInt("total")); - } + String query4 = "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE query('Tags:tas?e');"; + JSONObject result4 = executeJdbcRequest(query3); + assertEquals(8, result4.getInt("total")); + } - @Test - public void query_string_and_query_return_the_same_results_test() throws IOException { - String query1 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE query('Tags:taste')"; - JSONObject result1 = executeJdbcRequest(query1); - String query2 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE query_string(['Tags'],'taste')"; - JSONObject result2 = executeJdbcRequest(query2); - assertEquals(result2.getInt("total"), result1.getInt("total")); - } + @Test + public void query_string_and_query_return_the_same_results_test() throws IOException { + String query1 = "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE query('Tags:taste')"; + JSONObject result1 = executeJdbcRequest(query1); + String query2 = "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE query_string(['Tags'],'taste')"; + JSONObject result2 = executeJdbcRequest(query2); + assertEquals(result2.getInt("total"), result1.getInt("total")); + } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/QueryStringIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/QueryStringIT.java index 398a7a9d94..40ea1dfc98 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/QueryStringIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/QueryStringIT.java @@ -20,48 +20,50 @@ public void init() throws IOException { @Test public void all_fields_test() throws IOException { - String query = "SELECT * FROM " - + TEST_INDEX_BEER + " WHERE query_string([`*`], 'taste')"; + String query = "SELECT * FROM " + TEST_INDEX_BEER + " WHERE query_string([`*`], 'taste')"; JSONObject result = executeJdbcRequest(query); assertEquals(16, result.getInt("total")); } @Test public void mandatory_params_test() throws IOException { - String query = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE query_string([\\\"Tags\\\" ^ 1.5, Title, `Body` 4.2], 'taste')"; + String query = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE query_string([\\\"Tags\\\" ^ 1.5, Title, `Body` 4.2], 'taste')"; JSONObject result = executeJdbcRequest(query); assertEquals(16, result.getInt("total")); } @Test public void all_params_test() throws IOException { - String query = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE query_string(['Body', Tags, Title], 'taste beer', escape=false," - + "allow_leading_wildcard=true, enable_position_increments=true," - + "fuzziness= 1, fuzzy_rewrite='constant_score', max_determinized_states = 10000," - + "analyzer='english', analyze_wildcard = false, quote_field_suffix = '.exact'," - + "auto_generate_synonyms_phrase_query=true, boost = 0.77," - + "quote_analyzer='standard', phrase_slop=0, rewrite='constant_score', type='best_fields'," - + "tie_breaker=0.3, time_zone='Canada/Pacific', default_operator='or'," - + "fuzzy_transpositions = false, lenient = true, fuzzy_max_expansions = 25," - + "minimum_should_match = '2<-25% 9<-3', fuzzy_prefix_length = 7);"; + String query = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE query_string(['Body', Tags, Title], 'taste beer'," + + " escape=false,allow_leading_wildcard=true," + + " enable_position_increments=true,fuzziness= 1, fuzzy_rewrite='constant_score'," + + " max_determinized_states = 10000,analyzer='english', analyze_wildcard = false," + + " quote_field_suffix = '.exact',auto_generate_synonyms_phrase_query=true, boost =" + + " 0.77,quote_analyzer='standard', phrase_slop=0, rewrite='constant_score'," + + " type='best_fields',tie_breaker=0.3, time_zone='Canada/Pacific'," + + " default_operator='or',fuzzy_transpositions = false, lenient = true," + + " fuzzy_max_expansions = 25,minimum_should_match = '2<-25% 9<-3', fuzzy_prefix_length" + + " = 7);"; JSONObject result = executeJdbcRequest(query); assertEquals(49, result.getInt("total")); } @Test public void wildcard_test() throws IOException { - String query1 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE query_string(['Tags'], 'taste')"; + String query1 = "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE query_string(['Tags'], 'taste')"; JSONObject result1 = executeJdbcRequest(query1); - String query2 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE query_string(['T*'], 'taste')"; + String query2 = "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE query_string(['T*'], 'taste')"; JSONObject result2 = executeJdbcRequest(query2); assertNotEquals(result2.getInt("total"), result1.getInt("total")); - String query3 = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE query_string(['*Date'], '2014-01-22');"; + String query3 = + "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE query_string(['*Date'], '2014-01-22');"; JSONObject result3 = executeJdbcRequest(query3); assertEquals(10, result3.getInt("total")); } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/QueryValidationIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/QueryValidationIT.java index 5a16cd3f64..e42b68631f 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/QueryValidationIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/QueryValidationIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.hamcrest.Matchers.is; @@ -25,28 +24,29 @@ import org.opensearch.sql.legacy.SQLIntegTestCase; /** - * The query validation IT only covers test for error cases that not doable in comparison test. - * For all other tests, comparison test should be favored over manual written test like this. + * The query validation IT only covers test for error cases that not doable in comparison test. For + * all other tests, comparison test should be favored over manual written test like this. */ public class QueryValidationIT extends SQLIntegTestCase { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); + @Rule public ExpectedException exceptionRule = ExpectedException.none(); @Override protected void init() throws Exception { loadIndex(Index.ACCOUNT); } - @Ignore("Will add this validation in analyzer later. This test should be enabled once " + - "https://github.com/opensearch-project/sql/issues/910 has been resolved") + @Ignore( + "Will add this validation in analyzer later. This test should be enabled once " + + "https://github.com/opensearch-project/sql/issues/910 has been resolved") @Test public void testNonAggregatedSelectColumnMissingInGroupByClause() throws IOException { expectResponseException() .hasStatusCode(BAD_REQUEST) .hasErrorType("SemanticCheckException") - .containsMessage("Expression [state] that contains non-aggregated column " - + "is not present in group by clause") + .containsMessage( + "Expression [state] that contains non-aggregated column " + + "is not present in group by clause") .whenExecute("SELECT state FROM opensearch-sql_test_index_account GROUP BY age"); } @@ -55,8 +55,9 @@ public void testNonAggregatedSelectColumnPresentWithoutGroupByClause() throws IO expectResponseException() .hasStatusCode(BAD_REQUEST) .hasErrorType("SemanticCheckException") - .containsMessage("Explicit GROUP BY clause is required because expression [state] " - + "contains non-aggregated column") + .containsMessage( + "Explicit GROUP BY clause is required because expression [state] " + + "contains non-aggregated column") .whenExecute("SELECT state, AVG(age) FROM opensearch-sql_test_index_account"); } @@ -87,8 +88,7 @@ public ResponseExceptionAssertion expectResponseException() { /** * Response exception assertion helper to assert property value in OpenSearch ResponseException - * and Response inside. This serves as syntax sugar to improve the readability of test - * code. + * and Response inside. This serves as syntax sugar to improve the readability of test code. */ private static class ResponseExceptionAssertion { private final ExpectedException exceptionRule; @@ -100,9 +100,12 @@ private ResponseExceptionAssertion(ExpectedException exceptionRule) { } ResponseExceptionAssertion hasStatusCode(RestStatus code) { - exceptionRule.expect(featureValueOf("statusCode", is(code), - (Function) e -> - RestStatus.fromCode(e.getResponse().getStatusLine().getStatusCode()))); + exceptionRule.expect( + featureValueOf( + "statusCode", + is(code), + (Function) + e -> RestStatus.fromCode(e.getResponse().getStatusLine().getStatusCode()))); return this; } @@ -133,5 +136,4 @@ private static void execute(String query) throws IOException { client().performRequest(request); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/RawFormatIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/RawFormatIT.java index 47b2a1f013..af0256fba2 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/RawFormatIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/RawFormatIT.java @@ -3,10 +3,8 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; -import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK_CSV_SANITIZE; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK_RAW_SANITIZE; import static org.opensearch.sql.protocol.response.format.FlatResponseFormatter.CONTENT_TYPE; @@ -27,21 +25,27 @@ public void init() throws IOException { @Test public void rawFormatWithPipeFieldTest() { - String result = executeQuery( - String.format(Locale.ROOT, "SELECT firstname, lastname FROM %s", TEST_INDEX_BANK_RAW_SANITIZE), "raw"); - assertEquals(StringUtils.format( - "firstname|lastname%n" - + "+Amber JOHnny|Duke Willmington+%n" - + "-Hattie|Bond-%n" - + "=Nanette|Bates=%n" - + "@Dale|Adams@%n" - + "@Elinor|\"Ratliff|||\"%n"), + String result = + executeQuery( + String.format( + Locale.ROOT, "SELECT firstname, lastname FROM %s", TEST_INDEX_BANK_RAW_SANITIZE), + "raw"); + assertEquals( + StringUtils.format( + "firstname|lastname%n" + + "+Amber JOHnny|Duke Willmington+%n" + + "-Hattie|Bond-%n" + + "=Nanette|Bates=%n" + + "@Dale|Adams@%n" + + "@Elinor|\"Ratliff|||\"%n"), result); } @Test public void contentHeaderTest() throws IOException { - String query = String.format(Locale.ROOT, "SELECT firstname, lastname FROM %s", TEST_INDEX_BANK_RAW_SANITIZE); + String query = + String.format( + Locale.ROOT, "SELECT firstname, lastname FROM %s", TEST_INDEX_BANK_RAW_SANITIZE); String requestBody = makeRequest(query); Request sqlRequest = new Request("POST", "/_plugins/_sql?format=raw"); diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/RelevanceFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/RelevanceFunctionIT.java index 26fe735f12..755493c167 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/RelevanceFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/RelevanceFunctionIT.java @@ -24,11 +24,15 @@ public void init() throws IOException { */ @Test public void verify_flags_in_simple_query_string() throws IOException { - String query1 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE simple_query_string(['Body'], '-free', flags='NONE|PREFIX|ESCAPE')"; + String query1 = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE simple_query_string(['Body'], '-free', flags='NONE|PREFIX|ESCAPE')"; var result1 = new JSONObject(executeQuery(query1, "jdbc")); - String query2 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE simple_query_string([Body], '-free', flags='NOT|AND|OR')"; + String query2 = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE simple_query_string([Body], '-free', flags='NOT|AND|OR')"; var result2 = new JSONObject(executeQuery(query2, "jdbc")); assertNotEquals(result2.getInt("total"), result1.getInt("total")); @@ -44,11 +48,11 @@ public void verify_flags_in_simple_query_string() throws IOException { */ @Test public void verify_escape_in_query_string() throws IOException { - String query1 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE query_string([Title], '?', escape=true);"; + String query1 = + "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE query_string([Title], '?', escape=true);"; var result1 = new JSONObject(executeQuery(query1, "jdbc")); - String query2 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE query_string([Title], '?', escape=false);"; + String query2 = + "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE query_string([Title], '?', escape=false);"; var result2 = new JSONObject(executeQuery(query2, "jdbc")); assertEquals(0, result1.getInt("total")); assertEquals(8, result2.getInt("total")); @@ -61,11 +65,15 @@ public void verify_escape_in_query_string() throws IOException { */ @Test public void verify_default_operator_in_query_string() throws IOException { - String query1 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE query_string([Title], 'beer taste', default_operator='OR')"; + String query1 = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE query_string([Title], 'beer taste', default_operator='OR')"; var result1 = new JSONObject(executeQuery(query1, "jdbc")); - String query2 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE query_string([Title], 'beer taste', default_operator='AND')"; + String query2 = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE query_string([Title], 'beer taste', default_operator='AND')"; var result2 = new JSONObject(executeQuery(query2, "jdbc")); assertEquals(16, result1.getInt("total")); assertEquals(4, result2.getInt("total")); @@ -73,11 +81,15 @@ public void verify_default_operator_in_query_string() throws IOException { @Test public void verify_default_operator_in_simple_query_string() throws IOException { - String query1 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE simple_query_string([Title], 'beer taste', default_operator='OR')"; + String query1 = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE simple_query_string([Title], 'beer taste', default_operator='OR')"; var result1 = new JSONObject(executeQuery(query1, "jdbc")); - String query2 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE simple_query_string([Title], 'beer taste', default_operator='AND')"; + String query2 = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE simple_query_string([Title], 'beer taste', default_operator='AND')"; var result2 = new JSONObject(executeQuery(query2, "jdbc")); assertEquals(16, result1.getInt("total")); assertEquals(4, result2.getInt("total")); @@ -85,11 +97,15 @@ public void verify_default_operator_in_simple_query_string() throws IOException @Test public void verify_default_operator_in_multi_match() throws IOException { - String query1 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE multi_match([Title], 'beer taste', operator='OR')"; + String query1 = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE multi_match([Title], 'beer taste', operator='OR')"; var result1 = new JSONObject(executeQuery(query1, "jdbc")); - String query2 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE multi_match([Title], 'beer taste', operator='AND')"; + String query2 = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE multi_match([Title], 'beer taste', operator='AND')"; var result2 = new JSONObject(executeQuery(query2, "jdbc")); assertEquals(16, result1.getInt("total")); assertEquals(4, result2.getInt("total")); @@ -97,11 +113,11 @@ public void verify_default_operator_in_multi_match() throws IOException { @Test public void verify_operator_in_match() throws IOException { - String query1 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE match(Title, 'beer taste', operator='OR')"; + String query1 = + "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE match(Title, 'beer taste', operator='OR')"; var result1 = new JSONObject(executeQuery(query1, "jdbc")); - String query2 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE match(Title, 'beer taste', operator='AND')"; + String query2 = + "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE match(Title, 'beer taste', operator='AND')"; var result2 = new JSONObject(executeQuery(query2, "jdbc")); assertEquals(16, result1.getInt("total")); assertEquals(4, result2.getInt("total")); diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/SQLCorrectnessIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/SQLCorrectnessIT.java index 30f23547ec..6056a1c416 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/SQLCorrectnessIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/SQLCorrectnessIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import com.google.common.io.Resources; @@ -14,14 +13,12 @@ import java.util.function.Function; import org.junit.Test; -/** - * SQL integration test automated by comparison test framework. - */ +/** SQL integration test automated by comparison test framework. */ public class SQLCorrectnessIT extends CorrectnessTestBase { private static final String ROOT_DIR = "correctness/"; - private static final String[] EXPR_TEST_DIR = { "expressions" }; - private static final String[] QUERY_TEST_DIR = { "queries", "bugfixes" }; + private static final String[] EXPR_TEST_DIR = {"expressions"}; + private static final String[] QUERY_TEST_DIR = {"queries", "bugfixes"}; @Override protected void init() throws Exception { @@ -35,32 +32,30 @@ public void runAllTests() throws Exception { } /** - * Verify queries in files in directories with a converter to preprocess query. - * For example, for expressions it is converted to a SELECT clause before testing. + * Verify queries in files in directories with a converter to preprocess query. For example, for + * expressions it is converted to a SELECT clause before testing. */ @SuppressWarnings("UnstableApiUsage") private void verifyQueries(String[] dirs, Function converter) throws Exception { for (String dir : dirs) { Path dirPath = Paths.get(Resources.getResource(ROOT_DIR + dir).toURI()); Files.walk(dirPath) - .filter(Files::isRegularFile) - .forEach(file -> verifyQueries(file, converter)); + .filter(Files::isRegularFile) + .forEach(file -> verifyQueries(file, converter)); } } - /** - * Comment start with # - */ + /** Comment start with # */ private void verifyQueries(Path file, Function converter) { try { - String[] queries = Files.lines(file) - .filter(line -> !line.startsWith("#")) - .map(converter) - .toArray(String[]::new); + String[] queries = + Files.lines(file) + .filter(line -> !line.startsWith("#")) + .map(converter) + .toArray(String[]::new); verify(queries); } catch (IOException e) { throw new IllegalStateException("Failed to read file: " + file, e); } } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/ScoreQueryIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/ScoreQueryIT.java index 03df7d0e29..783fa2db2c 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/ScoreQueryIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/ScoreQueryIT.java @@ -5,21 +5,20 @@ package org.opensearch.sql.sql; -import org.json.JSONObject; -import org.junit.Assert; -import org.junit.Test; -import org.opensearch.sql.legacy.SQLIntegTestCase; -import org.opensearch.sql.legacy.TestsConstants; - -import java.io.IOException; -import java.util.Locale; - import static org.hamcrest.Matchers.containsString; import static org.opensearch.sql.util.MatcherUtils.rows; import static org.opensearch.sql.util.MatcherUtils.schema; import static org.opensearch.sql.util.MatcherUtils.verifyDataRows; import static org.opensearch.sql.util.MatcherUtils.verifySchema; +import java.io.IOException; +import java.util.Locale; +import org.json.JSONObject; +import org.junit.Assert; +import org.junit.Test; +import org.opensearch.sql.legacy.SQLIntegTestCase; +import org.opensearch.sql.legacy.TestsConstants; + public class ScoreQueryIT extends SQLIntegTestCase { @Override protected void init() throws Exception { @@ -27,75 +26,33 @@ protected void init() throws Exception { } /** - * "query" : { - * "from": 0, - * "size": 3, - * "timeout": "1m", - * "query": { - * "bool": { - * "should": [ - * { - * "match": { - * "address": { - * "query": "Lane", - * "operator": "OR", - * "prefix_length": 0, - * "max_expansions": 50, - * "fuzzy_transpositions": true, - * "lenient": false, - * "zero_terms_query": "NONE", - * "auto_generate_synonyms_phrase_query": true, - * "boost": 100.0 - * } - * } - * }, - * { - * "match": { - * "address": { - * "query": "Street", - * "operator": "OR", - * "prefix_length": 0, - * "max_expansions": 50, - * "fuzzy_transpositions": true, - * "lenient": false, - * "zero_terms_query": "NONE", - * "auto_generate_synonyms_phrase_query": true, - * "boost": 0.5 - * } - * } - * } - * ], - * "adjust_pure_negative": true, - * "boost": 1.0 - * } - * }, - * "_source": { - * "includes": [ - * "address" - * ], - * "excludes": [] - * }, - * "sort": [ - * { - * "_score": { - * "order": "desc" - * } - * } - * ], - * "track_scores": true - * } + * "query" : { "from": 0, "size": 3, "timeout": "1m", "query": { "bool": { "should": [ { "match": + * { "address": { "query": "Lane", "operator": "OR", "prefix_length": 0, "max_expansions": 50, + * "fuzzy_transpositions": true, "lenient": false, "zero_terms_query": "NONE", + * "auto_generate_synonyms_phrase_query": true, "boost": 100.0 } } }, { "match": { "address": { + * "query": "Street", "operator": "OR", "prefix_length": 0, "max_expansions": 50, + * "fuzzy_transpositions": true, "lenient": false, "zero_terms_query": "NONE", + * "auto_generate_synonyms_phrase_query": true, "boost": 0.5 } } } ], "adjust_pure_negative": + * true, "boost": 1.0 } }, "_source": { "includes": [ "address" ], "excludes": [] }, "sort": [ { + * "_score": { "order": "desc" } } ], "track_scores": true } + * * @throws IOException */ @Test public void scoreQueryExplainTest() throws IOException { - final String result = explainQuery(String.format(Locale.ROOT, - "select address from %s " + - "where score(matchQuery(address, 'Douglass'), 100) " + - "or score(matchQuery(address, 'Hall'), 0.5) order by _score desc limit 2", - TestsConstants.TEST_INDEX_ACCOUNT)); - Assert.assertThat(result, containsString("\\\"match\\\":{\\\"address\\\":{\\\"query\\\":\\\"Douglass\\\"")); + final String result = + explainQuery( + String.format( + Locale.ROOT, + "select address from %s " + + "where score(matchQuery(address, 'Douglass'), 100) " + + "or score(matchQuery(address, 'Hall'), 0.5) order by _score desc limit 2", + TestsConstants.TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result, containsString("\\\"match\\\":{\\\"address\\\":{\\\"query\\\":\\\"Douglass\\\"")); Assert.assertThat(result, containsString("\\\"boost\\\":100.0")); - Assert.assertThat(result, containsString("\\\"match\\\":{\\\"address\\\":{\\\"query\\\":\\\"Hall\\\"")); + Assert.assertThat( + result, containsString("\\\"match\\\":{\\\"address\\\":{\\\"query\\\":\\\"Hall\\\"")); Assert.assertThat(result, containsString("\\\"boost\\\":0.5")); Assert.assertThat(result, containsString("\\\"sort\\\":[{\\\"_score\\\"")); Assert.assertThat(result, containsString("\\\"track_scores\\\":true")); @@ -103,26 +60,32 @@ public void scoreQueryExplainTest() throws IOException { @Test public void scoreQueryTest() throws IOException { - final JSONObject result = new JSONObject(executeQuery(String.format(Locale.ROOT, - "select address, _score from %s " + - "where score(matchQuery(address, 'Douglass'), 100) " + - "or score(matchQuery(address, 'Hall'), 0.5) order by _score desc limit 2", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc")); - verifySchema(result, - schema("address", null, "text"), - schema("_score", null, "float")); - verifyDataRows(result, - rows("154 Douglass Street", 650.1515), - rows("565 Hall Street", 3.2507575)); + final JSONObject result = + new JSONObject( + executeQuery( + String.format( + Locale.ROOT, + "select address, _score from %s " + + "where score(matchQuery(address, 'Douglass'), 100) " + + "or score(matchQuery(address, 'Hall'), 0.5) order by _score desc limit 2", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc")); + verifySchema(result, schema("address", null, "text"), schema("_score", null, "float")); + verifyDataRows( + result, rows("154 Douglass Street", 650.1515), rows("565 Hall Street", 3.2507575)); } @Test public void scoreQueryDefaultBoostExplainTest() throws IOException { - final String result = explainQuery(String.format(Locale.ROOT, - "select address from %s " + - "where score(matchQuery(address, 'Lane')) order by _score desc limit 2", - TestsConstants.TEST_INDEX_ACCOUNT)); - Assert.assertThat(result, containsString("\\\"match\\\":{\\\"address\\\":{\\\"query\\\":\\\"Lane\\\"")); + final String result = + explainQuery( + String.format( + Locale.ROOT, + "select address from %s " + + "where score(matchQuery(address, 'Lane')) order by _score desc limit 2", + TestsConstants.TEST_INDEX_ACCOUNT)); + Assert.assertThat( + result, containsString("\\\"match\\\":{\\\"address\\\":{\\\"query\\\":\\\"Lane\\\"")); Assert.assertThat(result, containsString("\\\"boost\\\":1.0")); Assert.assertThat(result, containsString("\\\"sort\\\":[{\\\"_score\\\"")); Assert.assertThat(result, containsString("\\\"track_scores\\\":true")); @@ -130,13 +93,16 @@ public void scoreQueryDefaultBoostExplainTest() throws IOException { @Test public void scoreQueryDefaultBoostQueryTest() throws IOException { - final JSONObject result = new JSONObject(executeQuery(String.format(Locale.ROOT, - "select address, _score from %s " + - "where score(matchQuery(address, 'Powell')) order by _score desc limit 2", - TestsConstants.TEST_INDEX_ACCOUNT), "jdbc")); - verifySchema(result, - schema("address", null, "text"), - schema("_score", null, "float")); + final JSONObject result = + new JSONObject( + executeQuery( + String.format( + Locale.ROOT, + "select address, _score from %s " + + "where score(matchQuery(address, 'Powell')) order by _score desc limit 2", + TestsConstants.TEST_INDEX_ACCOUNT), + "jdbc")); + verifySchema(result, schema("address", null, "text"), schema("_score", null, "float")); verifyDataRows(result, rows("305 Powell Street", 6.501515)); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/SimpleQueryStringIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/SimpleQueryStringIT.java index 9a0343bcfa..54d2fb7e03 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/SimpleQueryStringIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/SimpleQueryStringIT.java @@ -5,13 +5,10 @@ package org.opensearch.sql.sql; -import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BANK_CSV_SANITIZE; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_BEER; import static org.opensearch.sql.protocol.response.format.JsonResponseFormatter.CONTENT_TYPE; import java.io.IOException; -import java.util.Locale; - import org.json.JSONObject; import org.junit.Test; import org.opensearch.client.Request; @@ -34,43 +31,52 @@ public void init() throws IOException { @Test public void test_mandatory_params() throws IOException { - String query = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE simple_query_string([\\\"Tags\\\" ^ 1.5, Title, `Body` 4.2], 'taste')"; + String query = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE simple_query_string([\\\"Tags\\\" ^ 1.5, Title, `Body` 4.2], 'taste')"; var result = new JSONObject(executeQuery(query, "jdbc")); assertEquals(16, result.getInt("total")); } @Test public void test_all_params() throws IOException { - String query = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE simple_query_string(['Body', Tags, Title], 'taste beer', default_operator='or'," - + "analyzer=english, analyze_wildcard = false, quote_field_suffix = '.exact'," - + "auto_generate_synonyms_phrase_query=true, boost = 0.77, flags='PREFIX'," - + "fuzzy_transpositions = false, lenient = true, fuzzy_max_expansions = 25," - + "minimum_should_match = '2<-25% 9<-3', fuzzy_prefix_length = 7);"; + String query = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE simple_query_string(['Body', Tags, Title], 'taste beer'," + + " default_operator='or',analyzer=english, analyze_wildcard = false," + + " quote_field_suffix = '.exact',auto_generate_synonyms_phrase_query=true, boost =" + + " 0.77, flags='PREFIX',fuzzy_transpositions = false, lenient = true," + + " fuzzy_max_expansions = 25,minimum_should_match = '2<-25% 9<-3', fuzzy_prefix_length" + + " = 7);"; var result = new JSONObject(executeQuery(query, "jdbc")); assertEquals(49, result.getInt("total")); } @Test public void verify_wildcard_test() throws IOException { - String query1 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE simple_query_string(['Tags'], 'taste')"; + String query1 = + "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE simple_query_string(['Tags'], 'taste')"; var result1 = new JSONObject(executeQuery(query1, "jdbc")); - String query2 = "SELECT Id FROM " - + TEST_INDEX_BEER + " WHERE simple_query_string(['T*'], 'taste')"; + String query2 = + "SELECT Id FROM " + TEST_INDEX_BEER + " WHERE simple_query_string(['T*'], 'taste')"; var result2 = new JSONObject(executeQuery(query2, "jdbc")); assertNotEquals(result2.getInt("total"), result1.getInt("total")); - String query = "SELECT Id FROM " + TEST_INDEX_BEER - + " WHERE simple_query_string(['*Date'], '2014-01-22');"; + String query = + "SELECT Id FROM " + + TEST_INDEX_BEER + + " WHERE simple_query_string(['*Date'], '2014-01-22');"; var result = new JSONObject(executeQuery(query, "jdbc")); assertEquals(10, result.getInt("total")); } @Test public void contentHeaderTest() throws IOException { - String query = "SELECT Id FROM " + TEST_INDEX_BEER + String query = + "SELECT Id FROM " + + TEST_INDEX_BEER + " WHERE simple_query_string([\\\"Tags\\\" ^ 1.5, Title, 'Body' 4.2], 'taste')"; String requestBody = makeRequest(query); diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/StandalonePaginationIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/StandalonePaginationIT.java index aad39c4074..e884734c96 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/StandalonePaginationIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/StandalonePaginationIT.java @@ -33,13 +33,13 @@ import org.opensearch.sql.datasource.DataSourceService; import org.opensearch.sql.datasources.service.DataSourceServiceImpl; import org.opensearch.sql.executor.ExecutionEngine; -import org.opensearch.sql.executor.pagination.PlanSerializer; import org.opensearch.sql.executor.QueryService; +import org.opensearch.sql.executor.pagination.Cursor; +import org.opensearch.sql.executor.pagination.PlanSerializer; import org.opensearch.sql.expression.DSL; import org.opensearch.sql.legacy.SQLIntegTestCase; import org.opensearch.sql.opensearch.client.OpenSearchClient; import org.opensearch.sql.opensearch.client.OpenSearchRestClient; -import org.opensearch.sql.executor.pagination.Cursor; import org.opensearch.sql.opensearch.storage.OpenSearchDataSourceFactory; import org.opensearch.sql.opensearch.storage.OpenSearchIndex; import org.opensearch.sql.planner.PlanContext; @@ -66,17 +66,19 @@ public class StandalonePaginationIT extends SQLIntegTestCase { public void init() { RestHighLevelClient restClient = new InternalRestHighLevelClient(client()); client = new OpenSearchRestClient(restClient); - DataSourceService dataSourceService = new DataSourceServiceImpl( - new ImmutableSet.Builder() - .add(new OpenSearchDataSourceFactory(client, defaultSettings())) - .build(), - getDataSourceMetadataStorage(), - getDataSourceUserRoleHelper() - ); + DataSourceService dataSourceService = + new DataSourceServiceImpl( + new ImmutableSet.Builder() + .add(new OpenSearchDataSourceFactory(client, defaultSettings())) + .build(), + getDataSourceMetadataStorage(), + getDataSourceUserRoleHelper()); dataSourceService.createDataSource(defaultOpenSearchDataSourceMetadata()); ModulesBuilder modules = new ModulesBuilder(); - modules.add(new StandaloneModule(new InternalRestHighLevelClient(client()), defaultSettings(), dataSourceService)); + modules.add( + new StandaloneModule( + new InternalRestHighLevelClient(client()), defaultSettings(), dataSourceService)); Injector injector = modules.createInjector(); queryService = injector.getInstance(QueryService.class); @@ -85,10 +87,9 @@ public void init() { @Test public void test_pagination_whitebox() throws IOException { - class TestResponder - implements ResponseListener { - @Getter - Cursor cursor = Cursor.None; + class TestResponder implements ResponseListener { + @Getter Cursor cursor = Cursor.None; + @Override public void onResponse(ExecutionEngine.QueryResponse response) { cursor = response.getCursor(); @@ -113,13 +114,16 @@ public void onFailure(Exception e) { // act 1, asserts in firstResponder var t = new OpenSearchIndex(client, defaultSettings(), "test"); - LogicalPlan p = new LogicalPaginate(1, List.of( - new LogicalProject( - new LogicalRelation("test", t), List.of( - DSL.named("name", DSL.ref("name", ExprCoreType.STRING)), - DSL.named("age", DSL.ref("age", ExprCoreType.LONG))), - List.of() - ))); + LogicalPlan p = + new LogicalPaginate( + 1, + List.of( + new LogicalProject( + new LogicalRelation("test", t), + List.of( + DSL.named("name", DSL.ref("name", ExprCoreType.STRING)), + DSL.named("age", DSL.ref("age", ExprCoreType.LONG))), + List.of()))); var firstResponder = new TestResponder(); queryService.executePlan(p, PlanContext.emptyPlanContext(), firstResponder); @@ -139,24 +143,30 @@ public void test_explain_not_supported() { // Request should be rejected before index names are resolved request.setJsonEntity("{ \"query\": \"select * from something\", \"fetch_size\": 10 }"); var exception = assertThrows(ResponseException.class, () -> client().performRequest(request)); - var response = new JSONObject(new String(exception.getResponse().getEntity().getContent().readAllBytes())); - assertEquals("`explain` feature for paginated requests is not implemented yet.", + var response = + new JSONObject(new String(exception.getResponse().getEntity().getContent().readAllBytes())); + assertEquals( + "`explain` feature for paginated requests is not implemented yet.", response.getJSONObject("error").getString("details")); // Request should be rejected before cursor parsed request.setJsonEntity("{ \"cursor\" : \"n:0000\" }"); exception = assertThrows(ResponseException.class, () -> client().performRequest(request)); - response = new JSONObject(new String(exception.getResponse().getEntity().getContent().readAllBytes())); - assertEquals("Explain of a paged query continuation is not supported. Use `explain` for the initial query request.", + response = + new JSONObject(new String(exception.getResponse().getEntity().getContent().readAllBytes())); + assertEquals( + "Explain of a paged query continuation is not supported. Use `explain` for the initial" + + " query request.", response.getJSONObject("error").getString("details")); } private Settings defaultSettings() { return new Settings() { - private final Map defaultSettings = new ImmutableMap.Builder() - .put(Key.QUERY_SIZE_LIMIT, 200) - .put(Key.SQL_CURSOR_KEEP_ALIVE, TimeValue.timeValueMinutes(1)) - .build(); + private final Map defaultSettings = + new ImmutableMap.Builder() + .put(Key.QUERY_SIZE_LIMIT, 200) + .put(Key.SQL_CURSOR_KEEP_ALIVE, TimeValue.timeValueMinutes(1)) + .build(); @Override public T getSettingValue(Key key) { diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/SystemFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/SystemFunctionIT.java index 584cdd05dd..4b39e2925c 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/SystemFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/SystemFunctionIT.java @@ -24,37 +24,47 @@ protected void init() throws Exception { @Test public void typeof_sql_types() { - JSONObject response = executeJdbcRequest("SELECT typeof('pewpew'), typeof(NULL), typeof(1.0)," - + "typeof(12345), typeof(1234567891011), typeof(INTERVAL 2 DAY);"); - verifyDataRows(response, - rows("KEYWORD", "UNDEFINED", "DOUBLE", "INTEGER", "LONG", "INTERVAL")); - - response = executeJdbcRequest("SELECT" - + " typeof(CAST('1961-04-12 09:07:00' AS TIMESTAMP))," - + " typeof(CAST('09:07:00' AS TIME))," - + " typeof(CAST('1961-04-12' AS DATE))," - + " typeof(DATETIME('1961-04-12 09:07:00'))"); - verifyDataRows(response, - rows("TIMESTAMP", "TIME", "DATE", "DATETIME")); + JSONObject response = + executeJdbcRequest( + "SELECT typeof('pewpew'), typeof(NULL), typeof(1.0)," + + "typeof(12345), typeof(1234567891011), typeof(INTERVAL 2 DAY);"); + verifyDataRows(response, rows("KEYWORD", "UNDEFINED", "DOUBLE", "INTEGER", "LONG", "INTERVAL")); + + response = + executeJdbcRequest( + "SELECT" + + " typeof(CAST('1961-04-12 09:07:00' AS TIMESTAMP))," + + " typeof(CAST('09:07:00' AS TIME))," + + " typeof(CAST('1961-04-12' AS DATE))," + + " typeof(DATETIME('1961-04-12 09:07:00'))"); + verifyDataRows(response, rows("TIMESTAMP", "TIME", "DATE", "DATETIME")); } @Test public void typeof_opensearch_types() { - JSONObject response = executeJdbcRequest(String.format("SELECT typeof(double_number)," - + "typeof(long_number), typeof(integer_number), typeof(byte_number), typeof(short_number)," - + "typeof(float_number), typeof(half_float_number), typeof(scaled_float_number)" - + " from %s;", TEST_INDEX_DATATYPE_NUMERIC)); - verifyDataRows(response, - rows("DOUBLE", "LONG", "INTEGER", "BYTE", "SHORT", "FLOAT", "FLOAT", "DOUBLE")); - - response = executeJdbcRequest(String.format("SELECT typeof(text_value)," - + "typeof(date_value), typeof(boolean_value), typeof(object_value), typeof(keyword_value)," - + "typeof(ip_value), typeof(binary_value), typeof(geo_point_value)" - // TODO activate this test once `ARRAY` type supported, see ExpressionAnalyzer::isTypeNotSupported - //+ ", typeof(nested_value)" - + " from %s;", TEST_INDEX_DATATYPE_NONNUMERIC)); - verifyDataRows(response, - rows("TEXT", "TIMESTAMP", "BOOLEAN", "OBJECT", "KEYWORD", - "IP", "BINARY", "GEO_POINT")); + JSONObject response = + executeJdbcRequest( + String.format( + "SELECT typeof(double_number),typeof(long_number), typeof(integer_number)," + + " typeof(byte_number), typeof(short_number),typeof(float_number)," + + " typeof(half_float_number), typeof(scaled_float_number) from %s;", + TEST_INDEX_DATATYPE_NUMERIC)); + verifyDataRows( + response, rows("DOUBLE", "LONG", "INTEGER", "BYTE", "SHORT", "FLOAT", "FLOAT", "DOUBLE")); + + response = + executeJdbcRequest( + String.format( + "SELECT typeof(text_value),typeof(date_value), typeof(boolean_value)," + + " typeof(object_value), typeof(keyword_value),typeof(ip_value)," + + " typeof(binary_value), typeof(geo_point_value)" + // TODO activate this test once `ARRAY` type supported, see + // ExpressionAnalyzer::isTypeNotSupported + // + ", typeof(nested_value)" + + " from %s;", + TEST_INDEX_DATATYPE_NONNUMERIC)); + verifyDataRows( + response, + rows("TEXT", "TIMESTAMP", "BOOLEAN", "OBJECT", "KEYWORD", "IP", "BINARY", "GEO_POINT")); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/TextFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/TextFunctionIT.java index 94677354e4..314132fed0 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/TextFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/TextFunctionIT.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.legacy.plugin.RestSqlAction.QUERY_API_ENDPOINT; @@ -43,8 +42,7 @@ void verifyQuery(String query, String type, Integer output) throws IOException { void verifyQueryWithNullOutput(String query, String type) throws IOException { JSONObject result = executeQuery("select 'test null'," + query); - verifySchema(result, schema(query, null, type), - schema("'test null'", null, type)); + verifySchema(result, schema(query, null, type), schema("'test null'", null, type)); verifyDataRows(result, rows("test null", null)); } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/WildcardQueryIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/WildcardQueryIT.java index ee636ed5ce..c6e43010d9 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/WildcardQueryIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/WildcardQueryIT.java @@ -3,19 +3,17 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_WILDCARD; +import static org.opensearch.sql.util.MatcherUtils.rows; +import static org.opensearch.sql.util.MatcherUtils.verifyDataRows; import java.io.IOException; import org.json.JSONObject; import org.junit.Test; import org.opensearch.sql.legacy.SQLIntegTestCase; -import static org.opensearch.sql.util.MatcherUtils.rows; -import static org.opensearch.sql.util.MatcherUtils.verifyDataRows; - public class WildcardQueryIT extends SQLIntegTestCase { @Override protected void init() throws Exception { @@ -26,11 +24,17 @@ protected void init() throws Exception { public void test_wildcard_query_asterisk_function() throws IOException { String expected = "test wildcard"; - String query1 = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, 't*') LIMIT 1"; + String query1 = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, 't*') LIMIT 1"; JSONObject result1 = executeJdbcRequest(query1); verifyDataRows(result1, rows(expected)); - String query2 = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcardquery(KeywordBody, 't*') LIMIT 1"; + String query2 = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcardquery(KeywordBody, 't*') LIMIT 1"; JSONObject result2 = executeJdbcRequest(query2); verifyDataRows(result2, rows(expected)); } @@ -39,11 +43,17 @@ public void test_wildcard_query_asterisk_function() throws IOException { public void test_wildcard_query_question_mark_function() throws IOException { String expected = "test wildcard"; - String query1 = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, 'test wild??rd')"; + String query1 = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, 'test wild??rd')"; JSONObject result1 = executeJdbcRequest(query1); verifyDataRows(result1, rows(expected)); - String query2 = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcardquery(KeywordBody, 'test wild??rd')"; + String query2 = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcardquery(KeywordBody, 'test wild??rd')"; JSONObject result2 = executeJdbcRequest(query2); verifyDataRows(result2, rows(expected)); } @@ -51,11 +61,17 @@ public void test_wildcard_query_question_mark_function() throws IOException { // SQL uses ? as a wildcard which is converted to * in WildcardQuery.java @Test public void test_wildcard_query_sql_wildcard_percent_conversion() throws IOException { - String query1 = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, 'test%')"; + String query1 = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, 'test%')"; JSONObject result1 = executeJdbcRequest(query1); assertEquals(8, result1.getInt("total")); - String query2 = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, 'test*')"; + String query2 = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, 'test*')"; JSONObject result2 = executeJdbcRequest(query2); assertEquals(result1.getInt("total"), result2.getInt("total")); } @@ -63,27 +79,41 @@ public void test_wildcard_query_sql_wildcard_percent_conversion() throws IOExcep // SQL uses _ as a wildcard which is converted to ? in WildcardQuery.java @Test public void test_wildcard_query_sql_wildcard_underscore_conversion() throws IOException { - String query1 = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, 'test wild_ard*')"; + String query1 = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, 'test wild_ard*')"; JSONObject result1 = executeJdbcRequest(query1); assertEquals(7, result1.getInt("total")); - String query2 = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, 'test wild?ard*')"; + String query2 = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, 'test wild?ard*')"; JSONObject result2 = executeJdbcRequest(query2); assertEquals(result1.getInt("total"), result2.getInt("total")); } @Test public void test_escaping_wildcard_percent_in_the_beginning_of_text() throws IOException { - String query = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, '\\\\%*')"; + String query = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, '\\\\%*')"; JSONObject result = executeJdbcRequest(query); verifyDataRows(result, rows("%test wildcard in the beginning of the text")); } @Test public void test_escaping_wildcard_percent_in_text() throws IOException { - String query = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, '*\\\\%%')"; + String query = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, '*\\\\%%')"; JSONObject result = executeJdbcRequest(query); - verifyDataRows(result, rows("test wildcard in % the middle of the text"), + verifyDataRows( + result, + rows("test wildcard in % the middle of the text"), rows("test wildcard %% beside each other"), rows("test wildcard in the end of the text%"), rows("%test wildcard in the beginning of the text")); @@ -91,30 +121,44 @@ public void test_escaping_wildcard_percent_in_text() throws IOException { @Test public void test_escaping_wildcard_percent_in_the_end_of_text() throws IOException { - String query = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, '*\\\\%')"; + String query = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, '*\\\\%')"; JSONObject result = executeJdbcRequest(query); verifyDataRows(result, rows("test wildcard in the end of the text%")); } @Test public void test_double_escaped_wildcard_percent() throws IOException { - String query = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, '*\\\\%\\\\%*')"; + String query = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, '*\\\\%\\\\%*')"; JSONObject result = executeJdbcRequest(query); verifyDataRows(result, rows("test wildcard %% beside each other")); } @Test public void test_escaping_wildcard_underscore_in_the_beginning_of_text() throws IOException { - String query = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, '\\\\_*')"; + String query = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, '\\\\_*')"; JSONObject result = executeJdbcRequest(query); verifyDataRows(result, rows("_test wildcard in the beginning of the text")); } @Test public void test_escaping_wildcard_underscore_in_text() throws IOException { - String query = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, '*\\\\_*')"; + String query = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, '*\\\\_*')"; JSONObject result = executeJdbcRequest(query); - verifyDataRows(result, rows("test wildcard in _ the middle of the text"), + verifyDataRows( + result, + rows("test wildcard in _ the middle of the text"), rows("test wildcard __ beside each other"), rows("test wildcard in the end of the text_"), rows("_test wildcard in the beginning of the text"), @@ -123,60 +167,77 @@ public void test_escaping_wildcard_underscore_in_text() throws IOException { @Test public void test_escaping_wildcard_underscore_in_the_end_of_text() throws IOException { - String query = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, '*\\\\_')"; + String query = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, '*\\\\_')"; JSONObject result = executeJdbcRequest(query); - verifyDataRows(result, - rows("test wildcard in the end of the text_"), - rows("test backslash wildcard \\_")); + verifyDataRows( + result, rows("test wildcard in the end of the text_"), rows("test backslash wildcard \\_")); } @Test public void test_double_escaped_wildcard_underscore() throws IOException { - String query = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, '*\\\\_\\\\_*')"; + String query = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, '*\\\\_\\\\_*')"; JSONObject result = executeJdbcRequest(query); verifyDataRows(result, rows("test wildcard __ beside each other")); } @Test public void test_backslash_wildcard() throws IOException { - String query = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(KeywordBody, '*\\\\\\\\\\\\_')"; + String query = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, '*\\\\\\\\\\\\_')"; JSONObject result = executeJdbcRequest(query); verifyDataRows(result, rows("test backslash wildcard \\_")); } @Test public void all_params_test() throws IOException { - String query = "SELECT KeywordBody FROM " + TEST_INDEX_WILDCARD - + " WHERE wildcard_query(KeywordBody, 'test*', boost = 0.9," - + " case_insensitive=true, rewrite='constant_score')"; + String query = + "SELECT KeywordBody FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(KeywordBody, 'test*', boost = 0.9," + + " case_insensitive=true, rewrite='constant_score')"; JSONObject result = executeJdbcRequest(query); assertEquals(8, result.getInt("total")); } @Test public void test_wildcard_query_on_text_field_with_one_word() throws IOException { - String query = "SELECT * FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(TextBody, 'test*')"; + String query = + "SELECT * FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(TextBody, 'test*')"; JSONObject result = executeJdbcRequest(query); assertEquals(9, result.getInt("total")); } @Test public void test_wildcard_query_on_text_keyword_field_with_one_word() throws IOException { - String query = "SELECT * FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(TextKeywordBody, 'test*')"; + String query = + "SELECT * FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(TextKeywordBody, 'test*')"; JSONObject result = executeJdbcRequest(query); assertEquals(9, result.getInt("total")); } @Test public void test_wildcard_query_on_text_field_with_greater_than_one_word() throws IOException { - String query = "SELECT * FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(TextBody, 'test wild*')"; + String query = + "SELECT * FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(TextBody, 'test wild*')"; JSONObject result = executeJdbcRequest(query); assertEquals(0, result.getInt("total")); } @Test - public void test_wildcard_query_on_text_keyword_field_with_greater_than_one_word() throws IOException { - String query = "SELECT * FROM " + TEST_INDEX_WILDCARD + " WHERE wildcard_query(TextKeywordBody, 'test wild*')"; + public void test_wildcard_query_on_text_keyword_field_with_greater_than_one_word() + throws IOException { + String query = + "SELECT * FROM " + + TEST_INDEX_WILDCARD + + " WHERE wildcard_query(TextKeywordBody, 'test wild*')"; JSONObject result = executeJdbcRequest(query); assertEquals(0, result.getInt("total")); } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/WindowFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/WindowFunctionIT.java index ac042b4a47..86257e6a22 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/WindowFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/WindowFunctionIT.java @@ -3,14 +3,12 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import static org.opensearch.sql.util.MatcherUtils.rows; import static org.opensearch.sql.util.MatcherUtils.verifyDataRows; import static org.opensearch.sql.util.MatcherUtils.verifyDataRowsInOrder; - import org.json.JSONObject; import org.junit.Test; import org.opensearch.sql.legacy.SQLIntegTestCase; @@ -26,11 +24,16 @@ protected void init() throws Exception { @Test public void testOrderByNullFirst() { - JSONObject response = new JSONObject( - executeQuery("SELECT age, ROW_NUMBER() OVER(ORDER BY age DESC NULLS FIRST) " - + "FROM " + TestsConstants.TEST_INDEX_BANK_WITH_NULL_VALUES, "jdbc")); + JSONObject response = + new JSONObject( + executeQuery( + "SELECT age, ROW_NUMBER() OVER(ORDER BY age DESC NULLS FIRST) " + + "FROM " + + TestsConstants.TEST_INDEX_BANK_WITH_NULL_VALUES, + "jdbc")); - verifyDataRows(response, + verifyDataRows( + response, rows(null, 1), rows(36, 2), rows(36, 3), @@ -42,11 +45,16 @@ public void testOrderByNullFirst() { @Test public void testOrderByNullLast() { - JSONObject response = new JSONObject( - executeQuery("SELECT age, ROW_NUMBER() OVER(ORDER BY age NULLS LAST) " - + "FROM " + TestsConstants.TEST_INDEX_BANK_WITH_NULL_VALUES, "jdbc")); + JSONObject response = + new JSONObject( + executeQuery( + "SELECT age, ROW_NUMBER() OVER(ORDER BY age NULLS LAST) " + + "FROM " + + TestsConstants.TEST_INDEX_BANK_WITH_NULL_VALUES, + "jdbc")); - verifyDataRows(response, + verifyDataRows( + response, rows(28, 1), rows(32, 2), rows(33, 3), @@ -58,10 +66,15 @@ public void testOrderByNullLast() { @Test public void testDistinctCountOverNull() { - JSONObject response = new JSONObject(executeQuery( - "SELECT lastname, COUNT(DISTINCT gender) OVER() " - + "FROM " + TestsConstants.TEST_INDEX_BANK, "jdbc")); - verifyDataRows(response, + JSONObject response = + new JSONObject( + executeQuery( + "SELECT lastname, COUNT(DISTINCT gender) OVER() " + + "FROM " + + TestsConstants.TEST_INDEX_BANK, + "jdbc")); + verifyDataRows( + response, rows("Duke Willmington", 2), rows("Bond", 2), rows("Bates", 2), @@ -73,10 +86,15 @@ public void testDistinctCountOverNull() { @Test public void testDistinctCountOver() { - JSONObject response = new JSONObject(executeQuery( - "SELECT lastname, COUNT(DISTINCT gender) OVER(ORDER BY lastname) " - + "FROM " + TestsConstants.TEST_INDEX_BANK, "jdbc")); - verifyDataRowsInOrder(response, + JSONObject response = + new JSONObject( + executeQuery( + "SELECT lastname, COUNT(DISTINCT gender) OVER(ORDER BY lastname) " + + "FROM " + + TestsConstants.TEST_INDEX_BANK, + "jdbc")); + verifyDataRowsInOrder( + response, rows("Adams", 1), rows("Ayala", 2), rows("Bates", 2), @@ -88,10 +106,15 @@ public void testDistinctCountOver() { @Test public void testDistinctCountPartition() { - JSONObject response = new JSONObject(executeQuery( - "SELECT lastname, COUNT(DISTINCT gender) OVER(PARTITION BY gender ORDER BY lastname) " - + "FROM " + TestsConstants.TEST_INDEX_BANK, "jdbc")); - verifyDataRowsInOrder(response, + JSONObject response = + new JSONObject( + executeQuery( + "SELECT lastname, COUNT(DISTINCT gender) OVER(PARTITION BY gender ORDER BY" + + " lastname) FROM " + + TestsConstants.TEST_INDEX_BANK, + "jdbc")); + verifyDataRowsInOrder( + response, rows("Ayala", 1), rows("Bates", 1), rows("Mcpherson", 1), @@ -100,5 +123,4 @@ public void testDistinctCountPartition() { rows("Duke Willmington", 1), rows("Ratliff", 1)); } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/util/ExecuteOnCallerThreadQueryManager.java b/integ-test/src/test/java/org/opensearch/sql/util/ExecuteOnCallerThreadQueryManager.java index 0a42dc83e3..39437ffc5f 100644 --- a/integ-test/src/test/java/org/opensearch/sql/util/ExecuteOnCallerThreadQueryManager.java +++ b/integ-test/src/test/java/org/opensearch/sql/util/ExecuteOnCallerThreadQueryManager.java @@ -12,7 +12,7 @@ /** * ONLY USED FOR TEST PURPOSE. * - * Execute {@link AbstractPlan} on caller thread. + *

Execute {@link AbstractPlan} on caller thread. */ public class ExecuteOnCallerThreadQueryManager implements QueryManager { @Override diff --git a/integ-test/src/test/java/org/opensearch/sql/util/InternalRestHighLevelClient.java b/integ-test/src/test/java/org/opensearch/sql/util/InternalRestHighLevelClient.java index 57726089ae..0897a508e3 100644 --- a/integ-test/src/test/java/org/opensearch/sql/util/InternalRestHighLevelClient.java +++ b/integ-test/src/test/java/org/opensearch/sql/util/InternalRestHighLevelClient.java @@ -9,9 +9,7 @@ import org.opensearch.client.RestClient; import org.opensearch.client.RestHighLevelClient; -/** - * Internal RestHighLevelClient only for testing purpose. - */ +/** Internal RestHighLevelClient only for testing purpose. */ public class InternalRestHighLevelClient extends RestHighLevelClient { public InternalRestHighLevelClient(RestClient restClient) { super(restClient, RestClient::close, Collections.emptyList()); diff --git a/integ-test/src/test/java/org/opensearch/sql/util/MatcherUtils.java b/integ-test/src/test/java/org/opensearch/sql/util/MatcherUtils.java index f5fbcf9666..d444218c66 100644 --- a/integ-test/src/test/java/org/opensearch/sql/util/MatcherUtils.java +++ b/integ-test/src/test/java/org/opensearch/sql/util/MatcherUtils.java @@ -3,9 +3,9 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.util; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.arrayContainingInAnyOrder; @@ -17,7 +17,6 @@ import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.hasItems; import static org.junit.Assert.assertEquals; -import static org.hamcrest.MatcherAssert.assertThat; import com.google.common.base.Strings; import com.google.gson.JsonParser; @@ -45,16 +44,15 @@ public class MatcherUtils { /** * Assert field value in object by a custom matcher and getter to access the field. * - * @param name description + * @param name description * @param subMatcher sub-matcher for field - * @param getter getter function to access the field - * @param type of outer object - * @param type of inner field + * @param getter getter function to access the field + * @param type of outer object + * @param type of inner field * @return matcher */ - public static FeatureMatcher featureValueOf(String name, - Matcher subMatcher, - Function getter) { + public static FeatureMatcher featureValueOf( + String name, Matcher subMatcher, Function getter) { return new FeatureMatcher(subMatcher, name, name) { @Override protected U featureValueOf(T actual) { @@ -68,8 +66,8 @@ public static Matcher hits(Matcher... hitMatchers) { if (hitMatchers.length == 0) { return featureValueOf("SearchHits", emptyArray(), SearchHits::getHits); } - return featureValueOf("SearchHits", arrayContainingInAnyOrder(hitMatchers), - SearchHits::getHits); + return featureValueOf( + "SearchHits", arrayContainingInAnyOrder(hitMatchers), SearchHits::getHits); } @SafeVarargs @@ -92,14 +90,17 @@ public static Matcher> kv(String key, Object value) { } public static Matcher hitAny(String query, Matcher... matcher) { - return featureValueOf("SearchHits", hasItems(matcher), actual -> { - JSONArray array = (JSONArray) (actual.query(query)); - List results = new ArrayList<>(array.length()); - for (Object element : array) { - results.add((JSONObject) element); - } - return results; - }); + return featureValueOf( + "SearchHits", + hasItems(matcher), + actual -> { + JSONArray array = (JSONArray) (actual.query(query)); + List results = new ArrayList<>(array.length()); + for (Object element : array) { + results.add((JSONObject) element); + } + return results; + }); } public static Matcher hitAny(Matcher... matcher) { @@ -107,14 +108,17 @@ public static Matcher hitAny(Matcher... matcher) { } public static Matcher hitAll(Matcher... matcher) { - return featureValueOf("SearchHits", containsInAnyOrder(matcher), actual -> { - JSONArray array = (JSONArray) (actual.query("/hits/hits")); - List results = new ArrayList<>(array.length()); - for (Object element : array) { - results.add((JSONObject) element); - } - return results; - }); + return featureValueOf( + "SearchHits", + containsInAnyOrder(matcher), + actual -> { + JSONArray array = (JSONArray) (actual.query("/hits/hits")); + List results = new ArrayList<>(array.length()); + for (Object element : array) { + results.add((JSONObject) element); + } + return results; + }); } public static Matcher kvString(String key, Matcher matcher) { @@ -122,7 +126,8 @@ public static Matcher kvString(String key, Matcher matcher) } public static Matcher kvDouble(String key, Matcher matcher) { - return featureValueOf("Json Match", matcher, actual -> ((BigDecimal) actual.query(key)).doubleValue()); + return featureValueOf( + "Json Match", matcher, actual -> ((BigDecimal) actual.query(key)).doubleValue()); } public static Matcher kvInt(String key, Matcher matcher) { @@ -196,19 +201,18 @@ public static void verifyOrder(JSONArray array, Matcher... matchers) { assertThat(objects, containsInRelativeOrder(matchers)); } - public static TypeSafeMatcher schema(String expectedName, - String expectedType) { + public static TypeSafeMatcher schema(String expectedName, String expectedType) { return schema(expectedName, null, expectedType); } - public static TypeSafeMatcher schema(String expectedName, String expectedAlias, - String expectedType) { + public static TypeSafeMatcher schema( + String expectedName, String expectedAlias, String expectedType) { return new TypeSafeMatcher() { @Override public void describeTo(Description description) { description.appendText( - String - .format("(name=%s, alias=%s, type=%s)", expectedName, expectedAlias, expectedType)); + String.format( + "(name=%s, alias=%s, type=%s)", expectedName, expectedAlias, expectedType)); } @Override @@ -216,9 +220,9 @@ protected boolean matchesSafely(JSONObject jsonObject) { String actualName = (String) jsonObject.query("/name"); String actualAlias = (String) jsonObject.query("/alias"); String actualType = (String) jsonObject.query("/type"); - return expectedName.equals(actualName) && - (Strings.isNullOrEmpty(expectedAlias) || expectedAlias.equals(actualAlias)) && - expectedType.equals(actualType); + return expectedName.equals(actualName) + && (Strings.isNullOrEmpty(expectedAlias) || expectedAlias.equals(actualAlias)) + && expectedType.equals(actualType); } }; } @@ -288,10 +292,7 @@ public void describeTo(Description description) { }; } - - /** - * Tests if a string is equal to another string, ignore the case and whitespace. - */ + /** Tests if a string is equal to another string, ignore the case and whitespace. */ public static class IsEqualIgnoreCaseAndWhiteSpace extends TypeSafeMatcher { private final String string; @@ -314,7 +315,8 @@ public void describeMismatchSafely(String item, Description mismatchDescription) @Override public void describeTo(Description description) { - description.appendText("a string equal to ") + description + .appendText("a string equal to ") .appendValue(string) .appendText(" ignore case and white space"); } @@ -334,13 +336,11 @@ public static Matcher equalToIgnoreCaseAndWhiteSpace(String expectedStri /** * Compare two JSON string are equals. + * * @param expected expected JSON string. * @param actual actual JSON string. */ public static void assertJsonEquals(String expected, String actual) { - assertEquals( - JsonParser.parseString(expected), - JsonParser.parseString(actual) - ); + assertEquals(JsonParser.parseString(expected), JsonParser.parseString(actual)); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/util/StandaloneModule.java b/integ-test/src/test/java/org/opensearch/sql/util/StandaloneModule.java index c347ea5244..5d6f0b5a55 100644 --- a/integ-test/src/test/java/org/opensearch/sql/util/StandaloneModule.java +++ b/integ-test/src/test/java/org/opensearch/sql/util/StandaloneModule.java @@ -15,10 +15,10 @@ import org.opensearch.sql.common.setting.Settings; import org.opensearch.sql.datasource.DataSourceService; import org.opensearch.sql.executor.ExecutionEngine; -import org.opensearch.sql.executor.pagination.PlanSerializer; import org.opensearch.sql.executor.QueryManager; import org.opensearch.sql.executor.QueryService; import org.opensearch.sql.executor.execution.QueryPlanFactory; +import org.opensearch.sql.executor.pagination.PlanSerializer; import org.opensearch.sql.expression.function.BuiltinFunctionRepository; import org.opensearch.sql.monitor.AlwaysHealthyMonitor; import org.opensearch.sql.monitor.ResourceMonitor; @@ -37,8 +37,8 @@ import org.opensearch.sql.storage.StorageEngine; /** - * A utility class which registers SQL engine singletons as `OpenSearchPluginModule` does. - * It is needed to get access to those instances in test and validate their behavior. + * A utility class which registers SQL engine singletons as `OpenSearchPluginModule` does. It is + * needed to get access to those instances in test and validate their behavior. */ @RequiredArgsConstructor public class StandaloneModule extends AbstractModule { @@ -53,8 +53,7 @@ public class StandaloneModule extends AbstractModule { BuiltinFunctionRepository.getInstance(); @Override - protected void configure() { - } + protected void configure() {} @Provides public OpenSearchClient openSearchClient() { @@ -67,8 +66,8 @@ public StorageEngine storageEngine(OpenSearchClient client) { } @Provides - public ExecutionEngine executionEngine(OpenSearchClient client, ExecutionProtector protector, - PlanSerializer planSerializer) { + public ExecutionEngine executionEngine( + OpenSearchClient client, ExecutionProtector protector, PlanSerializer planSerializer) { return new OpenSearchExecutionEngine(client, protector, planSerializer); } diff --git a/integ-test/src/test/java/org/opensearch/sql/util/TestUtils.java b/integ-test/src/test/java/org/opensearch/sql/util/TestUtils.java index 3281c172cb..a2f4021c1d 100644 --- a/integ-test/src/test/java/org/opensearch/sql/util/TestUtils.java +++ b/integ-test/src/test/java/org/opensearch/sql/util/TestUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.util; import static com.google.common.base.Strings.isNullOrEmpty; @@ -44,9 +43,9 @@ public class TestUtils { /** * Create test index by REST client. * - * @param client client connection + * @param client client connection * @param indexName test index name - * @param mapping test index mapping or null if no predefined mapping + * @param mapping test index mapping or null if no predefined mapping */ public static void createIndexByRestClient(RestClient client, String indexName, String mapping) { Request request = new Request("PUT", "/" + indexName); @@ -57,16 +56,15 @@ public static void createIndexByRestClient(RestClient client, String indexName, } /** - * https://github.com/elastic/elasticsearch/pull/49959 - * Deprecate creation of dot-prefixed index names except for hidden and system indices. - * Create hidden index by REST client. + * https://github.com/elastic/elasticsearch/pull/49959 Deprecate creation of dot-prefixed index + * names except for hidden and system indices. Create hidden index by REST client. * - * @param client client connection + * @param client client connection * @param indexName test index name - * @param mapping test index mapping or null if no predefined mapping + * @param mapping test index mapping or null if no predefined mapping */ - public static void createHiddenIndexByRestClient(RestClient client, String indexName, - String mapping) { + public static void createHiddenIndexByRestClient( + RestClient client, String indexName, String mapping) { Request request = new Request("PUT", "/" + indexName); JSONObject jsonObject = isNullOrEmpty(mapping) ? new JSONObject() : new JSONObject(mapping); jsonObject.put("settings", new JSONObject("{\"index\":{\"hidden\":true}}")); @@ -76,11 +74,10 @@ public static void createHiddenIndexByRestClient(RestClient client, String index } /** - * Check if index already exists by OpenSearch index exists API which returns: - * 200 - specified indices or aliases exist - * 404 - one or more indices specified or aliases do not exist + * Check if index already exists by OpenSearch index exists API which returns: 200 - specified + * indices or aliases exist 404 - one or more indices specified or aliases do not exist * - * @param client client connection + * @param client client connection * @param indexName index name * @return true for index exist */ @@ -96,13 +93,13 @@ public static boolean isIndexExist(RestClient client, String indexName) { /** * Load test data set by REST client. * - * @param client client connection - * @param indexName index name + * @param client client connection + * @param indexName index name * @param dataSetFilePath file path of test data set * @throws IOException */ - public static void loadDataByRestClient(RestClient client, String indexName, - String dataSetFilePath) throws IOException { + public static void loadDataByRestClient( + RestClient client, String indexName, String dataSetFilePath) throws IOException { Path path = Paths.get(getResourceFilePath(dataSetFilePath)); Request request = new Request("POST", "/" + indexName + "/_bulk?refresh=true"); request.setJsonEntity(new String(Files.readAllBytes(path))); @@ -112,7 +109,7 @@ public static void loadDataByRestClient(RestClient client, String indexName, /** * Perform a request by REST client. * - * @param client client connection + * @param client client connection * @param request request object */ public static Response performRequest(RestClient client, Request request) { @@ -129,566 +126,567 @@ public static Response performRequest(RestClient client, Request request) { } public static String getAccountIndexMapping() { - return "{ \"mappings\": {" + - " \"properties\": {\n" + - " \"gender\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true\n" + - " }," + - " \"address\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true\n" + - " }," + - " \"firstname\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true,\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }" + - " }" + - " }," + - " \"lastname\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true,\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }" + - " }" + - " }," + - " \"state\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true,\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }" + - " }" + - " }" + - " }" + - " }" + - "}"; + return "{ \"mappings\": {" + + " \"properties\": {\n" + + " \"gender\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true\n" + + " }," + + " \"address\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true\n" + + " }," + + " \"firstname\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true,\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }" + + " }" + + " }," + + " \"lastname\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true,\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }" + + " }" + + " }," + + " \"state\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true,\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }" + + " }" + + " }" + + " }" + + " }" + + "}"; } public static String getPhraseIndexMapping() { - return "{ \"mappings\": {" + - " \"properties\": {\n" + - " \"phrase\": {\n" + - " \"type\": \"text\",\n" + - " \"store\": true\n" + - " }" + - " }" + - " }" + - "}"; + return "{ \"mappings\": {" + + " \"properties\": {\n" + + " \"phrase\": {\n" + + " \"type\": \"text\",\n" + + " \"store\": true\n" + + " }" + + " }" + + " }" + + "}"; } public static String getDogIndexMapping() { - return "{ \"mappings\": {" + - " \"properties\": {\n" + - " \"dog_name\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true\n" + - " }" + - " }" + - " }" + - "}"; + return "{ \"mappings\": {" + + " \"properties\": {\n" + + " \"dog_name\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true\n" + + " }" + + " }" + + " }" + + "}"; } public static String getDogs2IndexMapping() { - return "{ \"mappings\": {" + - " \"properties\": {\n" + - " \"dog_name\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true\n" + - " },\n" + - " \"holdersName\": {\n" + - " \"type\": \"keyword\"\n" + - " }" + - " }" + - " }" + - "}"; + return "{ \"mappings\": {" + + " \"properties\": {\n" + + " \"dog_name\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true\n" + + " },\n" + + " \"holdersName\": {\n" + + " \"type\": \"keyword\"\n" + + " }" + + " }" + + " }" + + "}"; } public static String getDogs3IndexMapping() { - return "{ \"mappings\": {" + - " \"properties\": {\n" + - " \"holdersName\": {\n" + - " \"type\": \"keyword\"\n" + - " },\n" + - " \"color\": {\n" + - " \"type\": \"text\"\n" + - " }" + - " }" + - " }" + - "}"; + return "{ \"mappings\": {" + + " \"properties\": {\n" + + " \"holdersName\": {\n" + + " \"type\": \"keyword\"\n" + + " },\n" + + " \"color\": {\n" + + " \"type\": \"text\"\n" + + " }" + + " }" + + " }" + + "}"; } public static String getPeople2IndexMapping() { - return "{ \"mappings\": {" + - " \"properties\": {\n" + - " \"firstname\": {\n" + - " \"type\": \"keyword\"\n" + - " }" + - " }" + - " }" + - "}"; + return "{ \"mappings\": {" + + " \"properties\": {\n" + + " \"firstname\": {\n" + + " \"type\": \"keyword\"\n" + + " }" + + " }" + + " }" + + "}"; } public static String getGameOfThronesIndexMapping() { - return "{ \"mappings\": { " + - " \"properties\": {\n" + - " \"nickname\": {\n" + - " \"type\":\"text\", " + - " \"fielddata\":true" + - " },\n" + - " \"name\": {\n" + - " \"properties\": {\n" + - " \"firstname\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true\n" + - " },\n" + - " \"lastname\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true\n" + - " },\n" + - " \"ofHerName\": {\n" + - " \"type\": \"integer\"\n" + - " },\n" + - " \"ofHisName\": {\n" + - " \"type\": \"integer\"\n" + - " }\n" + - " }\n" + - " },\n" + - " \"house\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\"\n" + - " }\n" + - " }\n" + - " },\n" + - " \"gender\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\"\n" + - " }\n" + - " }\n" + - " }" + - "} } }"; + return "{ \"mappings\": { " + + " \"properties\": {\n" + + " \"nickname\": {\n" + + " \"type\":\"text\", " + + " \"fielddata\":true" + + " },\n" + + " \"name\": {\n" + + " \"properties\": {\n" + + " \"firstname\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true\n" + + " },\n" + + " \"lastname\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true\n" + + " },\n" + + " \"ofHerName\": {\n" + + " \"type\": \"integer\"\n" + + " },\n" + + " \"ofHisName\": {\n" + + " \"type\": \"integer\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"house\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"gender\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\"\n" + + " }\n" + + " }\n" + + " }" + + "} } }"; } // System public static String getOdbcIndexMapping() { - return "{\n" + - "\t\"mappings\" :{\n" + - "\t\t\"properties\":{\n" + - "\t\t\t\"odbc_time\":{\n" + - "\t\t\t\t\"type\":\"date\",\n" + - "\t\t\t\t\"format\": \"'{ts' ''yyyy-MM-dd HH:mm:ss.SSS'''}'\"\n" + - "\t\t\t},\n" + - "\t\t\t\"docCount\":{\n" + - "\t\t\t\t\"type\":\"text\"\n" + - "\t\t\t}\n" + - "\t\t}\n" + - "\t}\n" + - "}"; + return "{\n" + + "\t\"mappings\" :{\n" + + "\t\t\"properties\":{\n" + + "\t\t\t\"odbc_time\":{\n" + + "\t\t\t\t\"type\":\"date\",\n" + + "\t\t\t\t\"format\": \"'{ts' ''yyyy-MM-dd HH:mm:ss.SSS'''}'\"\n" + + "\t\t\t},\n" + + "\t\t\t\"docCount\":{\n" + + "\t\t\t\t\"type\":\"text\"\n" + + "\t\t\t}\n" + + "\t\t}\n" + + "\t}\n" + + "}"; } public static String getLocationIndexMapping() { - return "{\n" + - "\t\"mappings\" :{\n" + - "\t\t\"properties\":{\n" + - "\t\t\t\"place\":{\n" + - "\t\t\t\t\"type\":\"geo_shape\"\n" + - //"\t\t\t\t\"tree\": \"quadtree\",\n" + // Field tree and precision are deprecated in OpenSearch - //"\t\t\t\t\"precision\": \"10km\"\n" + - "\t\t\t},\n" + - "\t\t\t\"center\":{\n" + - "\t\t\t\t\"type\":\"geo_point\"\n" + - "\t\t\t},\n" + - "\t\t\t\"description\":{\n" + - "\t\t\t\t\"type\":\"text\"\n" + - "\t\t\t}\n" + - "\t\t}\n" + - "\t}\n" + - "}"; + return "{\n" + + "\t\"mappings\" :{\n" + + "\t\t\"properties\":{\n" + + "\t\t\t\"place\":{\n" + + "\t\t\t\t\"type\":\"geo_shape\"\n" + + + // "\t\t\t\t\"tree\": \"quadtree\",\n" + // Field tree and precision are deprecated in + // OpenSearch + // "\t\t\t\t\"precision\": \"10km\"\n" + + "\t\t\t},\n" + + "\t\t\t\"center\":{\n" + + "\t\t\t\t\"type\":\"geo_point\"\n" + + "\t\t\t},\n" + + "\t\t\t\"description\":{\n" + + "\t\t\t\t\"type\":\"text\"\n" + + "\t\t\t}\n" + + "\t\t}\n" + + "\t}\n" + + "}"; } public static String getEmployeeNestedTypeIndexMapping() { - return "{\n" + - " \"mappings\": {\n" + - " \"properties\": {\n" + - " \"comments\": {\n" + - " \"type\": \"nested\",\n" + - " \"properties\": {\n" + - " \"date\": {\n" + - " \"type\": \"date\"\n" + - " },\n" + - " \"likes\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"message\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " },\n" + - " \"id\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"name\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " },\n" + - " \"projects\": {\n" + - " \"type\": \"nested\",\n" + - " \"properties\": {\n" + - " \"address\": {\n" + - " \"type\": \"nested\",\n" + - " \"properties\": {\n" + - " \"city\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " },\n" + - " \"state\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " },\n" + - " \"name\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\"\n" + - " }\n" + - " },\n" + - " \"fielddata\": true\n" + - " },\n" + - " \"started_year\": {\n" + - " \"type\": \"long\"\n" + - " }\n" + - " }\n" + - " },\n" + - " \"title\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - "}\n"; + return "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"comments\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"date\": {\n" + + " \"type\": \"date\"\n" + + " },\n" + + " \"likes\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"message\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " },\n" + + " \"id\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"name\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " },\n" + + " \"projects\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"address\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"city\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " },\n" + + " \"state\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " },\n" + + " \"name\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\"\n" + + " }\n" + + " },\n" + + " \"fielddata\": true\n" + + " },\n" + + " \"started_year\": {\n" + + " \"type\": \"long\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"title\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + "}\n"; } - public static String getNestedTypeIndexMapping() { - return "{ \"mappings\": {\n" + - " \"properties\": {\n" + - " \"message\": {\n" + - " \"type\": \"nested\",\n" + - " \"properties\": {\n" + - " \"info\": {\n" + - " \"type\": \"keyword\",\n" + - " \"index\": \"true\"\n" + - " },\n" + - " \"author\": {\n" + - " \"type\": \"keyword\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\" : 256\n" + - " }\n" + - " },\n" + - " \"index\": \"true\"\n" + - " },\n" + - " \"dayOfWeek\": {\n" + - " \"type\": \"long\"\n" + - " }\n" + - " }\n" + - " },\n" + - " \"comment\": {\n" + - " \"type\": \"nested\",\n" + - " \"properties\": {\n" + - " \"data\": {\n" + - " \"type\": \"keyword\",\n" + - " \"index\": \"true\"\n" + - " },\n" + - " \"likes\": {\n" + - " \"type\": \"long\"\n" + - " }\n" + - " }\n" + - " },\n" + - " \"myNum\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"someField\": {\n" + - " \"type\": \"keyword\",\n" + - " \"index\": \"true\"\n" + - " }\n" + - " }\n" + - " }\n" + - " }}"; + return "{ \"mappings\": {\n" + + " \"properties\": {\n" + + " \"message\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"info\": {\n" + + " \"type\": \"keyword\",\n" + + " \"index\": \"true\"\n" + + " },\n" + + " \"author\": {\n" + + " \"type\": \"keyword\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\" : 256\n" + + " }\n" + + " },\n" + + " \"index\": \"true\"\n" + + " },\n" + + " \"dayOfWeek\": {\n" + + " \"type\": \"long\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"comment\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"data\": {\n" + + " \"type\": \"keyword\",\n" + + " \"index\": \"true\"\n" + + " },\n" + + " \"likes\": {\n" + + " \"type\": \"long\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"myNum\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"someField\": {\n" + + " \"type\": \"keyword\",\n" + + " \"index\": \"true\"\n" + + " }\n" + + " }\n" + + " }\n" + + " }}"; } public static String getJoinTypeIndexMapping() { - return "{\n" + - " \"mappings\": {\n" + - " \"properties\": {\n" + - " \"join_field\": {\n" + - " \"type\": \"join\",\n" + - " \"relations\": {\n" + - " \"parentType\": \"childrenType\"\n" + - " }\n" + - " },\n" + - " \"parentTile\": {\n" + - " \"index\": \"true\",\n" + - " \"type\": \"keyword\"\n" + - " },\n" + - " \"dayOfWeek\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"author\": {\n" + - " \"index\": \"true\",\n" + - " \"type\": \"keyword\"\n" + - " },\n" + - " \"info\": {\n" + - " \"index\": \"true\",\n" + - " \"type\": \"keyword\"\n" + - " }\n" + - " }\n" + - " }\n" + - "}"; + return "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"join_field\": {\n" + + " \"type\": \"join\",\n" + + " \"relations\": {\n" + + " \"parentType\": \"childrenType\"\n" + + " }\n" + + " },\n" + + " \"parentTile\": {\n" + + " \"index\": \"true\",\n" + + " \"type\": \"keyword\"\n" + + " },\n" + + " \"dayOfWeek\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"author\": {\n" + + " \"index\": \"true\",\n" + + " \"type\": \"keyword\"\n" + + " },\n" + + " \"info\": {\n" + + " \"index\": \"true\",\n" + + " \"type\": \"keyword\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; } public static String getBankIndexMapping() { - return "{\n" + - " \"mappings\": {\n" + - " \"properties\": {\n" + - " \"account_number\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"address\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"age\": {\n" + - " \"type\": \"integer\"\n" + - " },\n" + - " \"balance\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"birthdate\": {\n" + - " \"type\": \"date\"\n" + - " },\n" + - " \"city\": {\n" + - " \"type\": \"keyword\"\n" + - " },\n" + - " \"email\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"employer\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"firstname\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"gender\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true\n" + - " }," + - " \"lastname\": {\n" + - " \"type\": \"keyword\"\n" + - " },\n" + - " \"male\": {\n" + - " \"type\": \"boolean\"\n" + - " },\n" + - " \"state\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - "}"; + return "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"account_number\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"address\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"age\": {\n" + + " \"type\": \"integer\"\n" + + " },\n" + + " \"balance\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"birthdate\": {\n" + + " \"type\": \"date\"\n" + + " },\n" + + " \"city\": {\n" + + " \"type\": \"keyword\"\n" + + " },\n" + + " \"email\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"employer\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"firstname\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"gender\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true\n" + + " }," + + " \"lastname\": {\n" + + " \"type\": \"keyword\"\n" + + " },\n" + + " \"male\": {\n" + + " \"type\": \"boolean\"\n" + + " },\n" + + " \"state\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; } public static String getBankWithNullValuesIndexMapping() { - return "{\n" + - " \"mappings\": {\n" + - " \"properties\": {\n" + - " \"account_number\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"address\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"age\": {\n" + - " \"type\": \"integer\"\n" + - " },\n" + - " \"balance\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"gender\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"firstname\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"lastname\": {\n" + - " \"type\": \"keyword\"\n" + - " }\n" + - " }\n" + - " }\n" + - "}"; + return "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"account_number\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"address\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"age\": {\n" + + " \"type\": \"integer\"\n" + + " },\n" + + " \"balance\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"gender\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"firstname\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"lastname\": {\n" + + " \"type\": \"keyword\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; } public static String getOrderIndexMapping() { - return "{\n" + - " \"mappings\": {\n" + - " \"properties\": {\n" + - " \"id\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"name\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - "}"; + return "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"id\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"name\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; } public static String getWeblogsIndexMapping() { - return "{\n" + - " \"mappings\": {\n" + - " \"properties\": {\n" + - " \"host\": {\n" + - " \"type\": \"ip\"\n" + - " },\n" + - " \"method\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"url\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"response\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"bytes\": {\n" + - " \"type\": \"text\"\n" + - " }\n" + - " }\n" + - " }\n" + - "}"; + return "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"host\": {\n" + + " \"type\": \"ip\"\n" + + " },\n" + + " \"method\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"url\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"response\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"bytes\": {\n" + + " \"type\": \"text\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; } public static String getDateIndexMapping() { - return "{ \"mappings\": {" + - " \"properties\": {\n" + - " \"date_keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }" + - " }" + - " }" + - "}"; + return "{ \"mappings\": {" + + " \"properties\": {\n" + + " \"date_keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }" + + " }" + + " }" + + "}"; } public static String getDateTimeIndexMapping() { - return "{" + - " \"mappings\": {" + - " \"properties\": {" + - " \"birthday\": {" + - " \"type\": \"date\"" + - " }" + - " }" + - " }" + - "}"; + return "{" + + " \"mappings\": {" + + " \"properties\": {" + + " \"birthday\": {" + + " \"type\": \"date\"" + + " }" + + " }" + + " }" + + "}"; } public static String getNestedSimpleIndexMapping() { - return "{" + - " \"mappings\": {" + - " \"properties\": {" + - " \"address\": {" + - " \"type\": \"nested\"," + - " \"properties\": {" + - " \"city\": {" + - " \"type\": \"text\"," + - " \"fields\": {" + - " \"keyword\": {" + - " \"type\": \"keyword\"," + - " \"ignore_above\": 256" + - " }" + - " }" + - " }," + - " \"state\": {" + - " \"type\": \"text\"," + - " \"fields\": {" + - " \"keyword\": {" + - " \"type\": \"keyword\"," + - " \"ignore_above\": 256" + - " }" + - " }" + - " }" + - " }" + - " }," + - " \"age\": {" + - " \"type\": \"long\"" + - " }," + - " \"id\": {" + - " \"type\": \"long\"" + - " }," + - " \"name\": {" + - " \"type\": \"text\"," + - " \"fields\": {" + - " \"keyword\": {" + - " \"type\": \"keyword\"," + - " \"ignore_above\": 256" + - " }" + - " }" + - " }" + - " }" + - " }" + - "}"; + return "{" + + " \"mappings\": {" + + " \"properties\": {" + + " \"address\": {" + + " \"type\": \"nested\"," + + " \"properties\": {" + + " \"city\": {" + + " \"type\": \"text\"," + + " \"fields\": {" + + " \"keyword\": {" + + " \"type\": \"keyword\"," + + " \"ignore_above\": 256" + + " }" + + " }" + + " }," + + " \"state\": {" + + " \"type\": \"text\"," + + " \"fields\": {" + + " \"keyword\": {" + + " \"type\": \"keyword\"," + + " \"ignore_above\": 256" + + " }" + + " }" + + " }" + + " }" + + " }," + + " \"age\": {" + + " \"type\": \"long\"" + + " }," + + " \"id\": {" + + " \"type\": \"long\"" + + " }," + + " \"name\": {" + + " \"type\": \"text\"," + + " \"fields\": {" + + " \"keyword\": {" + + " \"type\": \"keyword\"," + + " \"ignore_above\": 256" + + " }" + + " }" + + " }" + + " }" + + " }" + + "}"; } public static void loadBulk(Client client, String jsonPath, String defaultIndex) @@ -698,8 +696,8 @@ public static void loadBulk(Client client, String jsonPath, String defaultIndex) BulkRequest bulkRequest = new BulkRequest(); try (final InputStream stream = new FileInputStream(absJsonPath); - final Reader streamReader = new InputStreamReader(stream, StandardCharsets.UTF_8); - final BufferedReader br = new BufferedReader(streamReader)) { + final Reader streamReader = new InputStreamReader(stream, StandardCharsets.UTF_8); + final BufferedReader br = new BufferedReader(streamReader)) { while (true) { @@ -728,8 +726,11 @@ public static void loadBulk(Client client, String jsonPath, String defaultIndex) BulkResponse bulkResponse = client.bulk(bulkRequest).actionGet(); if (bulkResponse.hasFailures()) { - throw new Exception("Failed to load test data into index " + defaultIndex + ", " + - bulkResponse.buildFailureMessage()); + throw new Exception( + "Failed to load test data into index " + + defaultIndex + + ", " + + bulkResponse.buildFailureMessage()); } System.out.println(bulkResponse.getItems().length + " documents loaded."); // ensure the documents are searchable @@ -755,8 +756,8 @@ public static String getResponseBody(Response response, boolean retainNewLines) final StringBuilder sb = new StringBuilder(); try (final InputStream is = response.getEntity().getContent(); - final BufferedReader br = new BufferedReader( - new InputStreamReader(is, StandardCharsets.UTF_8))) { + final BufferedReader br = + new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8))) { String line; while ((line = br.readLine()) != null) { @@ -769,15 +770,14 @@ public static String getResponseBody(Response response, boolean retainNewLines) return sb.toString(); } - public static String fileToString(final String filePathFromProjectRoot, - final boolean removeNewLines) - throws IOException { + public static String fileToString( + final String filePathFromProjectRoot, final boolean removeNewLines) throws IOException { final String absolutePath = getResourceFilePath(filePathFromProjectRoot); try (final InputStream stream = new FileInputStream(absolutePath); - final Reader streamReader = new InputStreamReader(stream, StandardCharsets.UTF_8); - final BufferedReader br = new BufferedReader(streamReader)) { + final Reader streamReader = new InputStreamReader(stream, StandardCharsets.UTF_8); + final BufferedReader br = new BufferedReader(streamReader)) { final StringBuilder stringBuilder = new StringBuilder(); String line = br.readLine(); @@ -831,36 +831,41 @@ public static List> getPermutations(final List items) { } final String currentItem = items.get(i); - result.addAll(getPermutations(smallerSet).stream().map(smallerSetPermutation -> { - final List permutation = new ArrayList<>(); - permutation.add(currentItem); - permutation.addAll(smallerSetPermutation); - return permutation; - }).collect(Collectors.toCollection(LinkedList::new))); + result.addAll( + getPermutations(smallerSet).stream() + .map( + smallerSetPermutation -> { + final List permutation = new ArrayList<>(); + permutation.add(currentItem); + permutation.addAll(smallerSetPermutation); + return permutation; + }) + .collect(Collectors.toCollection(LinkedList::new))); } return result; } public static void verifyIsV1Cursor(JSONObject response) { - var legacyCursorPrefixes = Arrays.stream(CursorType.values()) - .map(c -> c.getId() + ":").collect(Collectors.toList()); + var legacyCursorPrefixes = + Arrays.stream(CursorType.values()).map(c -> c.getId() + ":").collect(Collectors.toList()); verifyCursor(response, legacyCursorPrefixes, "v1"); } - public static void verifyIsV2Cursor(JSONObject response) { verifyCursor(response, List.of(CURSOR_PREFIX), "v2"); } - private static void verifyCursor(JSONObject response, List validCursorPrefix, String engineName) { - assertTrue("'cursor' property does not exist", response.has("cursor")); + private static void verifyCursor( + JSONObject response, List validCursorPrefix, String engineName) { + assertTrue("'cursor' property does not exist", response.has("cursor")); - var cursor = response.getString("cursor"); - assertFalse("'cursor' property is empty", cursor.isEmpty()); - assertTrue("The cursor '" + cursor.substring(0, 50) + "...' is not from " + engineName + " engine.", - validCursorPrefix.stream().anyMatch(cursor::startsWith)); - } + var cursor = response.getString("cursor"); + assertFalse("'cursor' property is empty", cursor.isEmpty()); + assertTrue( + "The cursor '" + cursor.substring(0, 50) + "...' is not from " + engineName + " engine.", + validCursorPrefix.stream().anyMatch(cursor::startsWith)); + } public static void verifyNoCursor(JSONObject response) { assertTrue(!response.has("cursor")); diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/OpenSearchLegacySqlAnalyzer.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/OpenSearchLegacySqlAnalyzer.java index b44e2bbb41..bb063f4df4 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/OpenSearchLegacySqlAnalyzer.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/OpenSearchLegacySqlAnalyzer.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr; import java.util.Optional; @@ -25,84 +24,77 @@ import org.opensearch.sql.legacy.antlr.visitor.EarlyExitAnalysisException; import org.opensearch.sql.legacy.esdomain.LocalClusterState; -/** - * Entry point for ANTLR generated parser to perform strict syntax and semantic analysis. - */ +/** Entry point for ANTLR generated parser to perform strict syntax and semantic analysis. */ public class OpenSearchLegacySqlAnalyzer { - private static final Logger LOG = LogManager.getLogger(); - - /** Original sql query */ - private final SqlAnalysisConfig config; - - public OpenSearchLegacySqlAnalyzer(SqlAnalysisConfig config) { - this.config = config; - } - - public Optional analyze(String sql, LocalClusterState clusterState) { - // Perform analysis for SELECT only for now because of extra code changes required for SHOW/DESCRIBE. - if (!isSelectStatement(sql) || !config.isAnalyzerEnabled()) { - return Optional.empty(); - } + private static final Logger LOG = LogManager.getLogger(); - try { - return Optional.of(analyzeSemantic( - analyzeSyntax(sql), - clusterState - )); - } catch (EarlyExitAnalysisException e) { - // Expected if configured so log on debug level to avoid always logging stack trace - LOG.debug("Analysis exits early and will skip remaining process", e); - return Optional.empty(); - } - } + /** Original sql query */ + private final SqlAnalysisConfig config; - /** - * Build lexer and parser to perform syntax analysis only. - * Runtime exception with clear message is thrown for any verification error. - * - * @return parse tree - */ - public ParseTree analyzeSyntax(String sql) { - OpenSearchLegacySqlParser parser = createParser(createLexer(sql)); - parser.addErrorListener(new SyntaxAnalysisErrorListener()); - return parser.root(); - } + public OpenSearchLegacySqlAnalyzer(SqlAnalysisConfig config) { + this.config = config; + } - /** - * Perform semantic analysis based on syntax analysis output - parse tree. - * - * @param tree parse tree - * @param clusterState cluster state required for index mapping query - */ - public Type analyzeSemantic(ParseTree tree, LocalClusterState clusterState) { - return tree.accept(new AntlrSqlParseTreeVisitor<>(createAnalyzer(clusterState))); + public Optional analyze(String sql, LocalClusterState clusterState) { + // Perform analysis for SELECT only for now because of extra code changes required for + // SHOW/DESCRIBE. + if (!isSelectStatement(sql) || !config.isAnalyzerEnabled()) { + return Optional.empty(); } - /** Factory method for semantic analyzer to help assemble all required components together */ - private SemanticAnalyzer createAnalyzer(LocalClusterState clusterState) { - SemanticContext context = new SemanticContext(); - OpenSearchMappingLoader - mappingLoader = new OpenSearchMappingLoader(context, clusterState, config.getAnalysisThreshold()); - TypeChecker typeChecker = new TypeChecker(context, config.isFieldSuggestionEnabled()); - return new SemanticAnalyzer(mappingLoader, typeChecker); + try { + return Optional.of(analyzeSemantic(analyzeSyntax(sql), clusterState)); + } catch (EarlyExitAnalysisException e) { + // Expected if configured so log on debug level to avoid always logging stack trace + LOG.debug("Analysis exits early and will skip remaining process", e); + return Optional.empty(); } - - private OpenSearchLegacySqlParser createParser(Lexer lexer) { - return new OpenSearchLegacySqlParser( - new CommonTokenStream(lexer)); - } - - private OpenSearchLegacySqlLexer createLexer(String sql) { - return new OpenSearchLegacySqlLexer( - new CaseInsensitiveCharStream(sql)); - } - - private boolean isSelectStatement(String sql) { - sql = sql.replaceAll("\\R", " ").trim(); - int endOfFirstWord = sql.indexOf(' '); - String firstWord = sql.substring(0, endOfFirstWord > 0 ? endOfFirstWord : sql.length()); - return "SELECT".equalsIgnoreCase(firstWord); - } - + } + + /** + * Build lexer and parser to perform syntax analysis only. Runtime exception with clear message is + * thrown for any verification error. + * + * @return parse tree + */ + public ParseTree analyzeSyntax(String sql) { + OpenSearchLegacySqlParser parser = createParser(createLexer(sql)); + parser.addErrorListener(new SyntaxAnalysisErrorListener()); + return parser.root(); + } + + /** + * Perform semantic analysis based on syntax analysis output - parse tree. + * + * @param tree parse tree + * @param clusterState cluster state required for index mapping query + */ + public Type analyzeSemantic(ParseTree tree, LocalClusterState clusterState) { + return tree.accept(new AntlrSqlParseTreeVisitor<>(createAnalyzer(clusterState))); + } + + /** Factory method for semantic analyzer to help assemble all required components together */ + private SemanticAnalyzer createAnalyzer(LocalClusterState clusterState) { + SemanticContext context = new SemanticContext(); + OpenSearchMappingLoader mappingLoader = + new OpenSearchMappingLoader(context, clusterState, config.getAnalysisThreshold()); + TypeChecker typeChecker = new TypeChecker(context, config.isFieldSuggestionEnabled()); + return new SemanticAnalyzer(mappingLoader, typeChecker); + } + + private OpenSearchLegacySqlParser createParser(Lexer lexer) { + return new OpenSearchLegacySqlParser(new CommonTokenStream(lexer)); + } + + private OpenSearchLegacySqlLexer createLexer(String sql) { + return new OpenSearchLegacySqlLexer(new CaseInsensitiveCharStream(sql)); + } + + private boolean isSelectStatement(String sql) { + sql = sql.replaceAll("\\R", " ").trim(); + int endOfFirstWord = sql.indexOf(' '); + String firstWord = sql.substring(0, endOfFirstWord > 0 ? endOfFirstWord : sql.length()); + return "SELECT".equalsIgnoreCase(firstWord); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/SimilarSymbols.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/SimilarSymbols.java index 0f87b9eb05..7410e56e49 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/SimilarSymbols.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/SimilarSymbols.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr; import java.util.Collection; @@ -13,50 +12,51 @@ import org.apache.lucene.search.spell.LevenshteinDistance; import org.apache.lucene.search.spell.StringDistance; -/** - * String similarity for finding most similar string. - */ +/** String similarity for finding most similar string. */ public class SimilarSymbols { - /** LevenshteinDistance instance is basically a math util which is supposed to be thread safe */ - private static final StringDistance ALGORITHM = new LevenshteinDistance(); - - /** Symbol candidate list from which to pick one as most similar symbol to a target */ - private final Collection candidates; - - public SimilarSymbols(Collection candidates) { - this.candidates = Collections.unmodifiableCollection(candidates); + /** LevenshteinDistance instance is basically a math util which is supposed to be thread safe */ + private static final StringDistance ALGORITHM = new LevenshteinDistance(); + + /** Symbol candidate list from which to pick one as most similar symbol to a target */ + private final Collection candidates; + + public SimilarSymbols(Collection candidates) { + this.candidates = Collections.unmodifiableCollection(candidates); + } + + /** + * Find most similar string in candidates by calculating similarity distance among target and + * candidate strings. + * + * @param target string to match + * @return most similar string to the target + */ + public String mostSimilarTo(String target) { + Optional closest = + candidates.stream() + .map(candidate -> new SymbolDistance(candidate, target)) + .max(Comparator.comparing(SymbolDistance::similarity)); + if (closest.isPresent()) { + return closest.get().candidate; } - - /** - * Find most similar string in candidates by calculating similarity distance - * among target and candidate strings. - * - * @param target string to match - * @return most similar string to the target - */ - public String mostSimilarTo(String target) { - Optional closest = candidates.stream(). - map(candidate -> new SymbolDistance(candidate, target)). - max(Comparator.comparing(SymbolDistance::similarity)); - if (closest.isPresent()) { - return closest.get().candidate; - } - return target; + return target; + } + + /** + * Distance (similarity) between 2 symbols. This class is mainly for Java 8 stream comparator API + */ + private static class SymbolDistance { + private final String candidate; + private final String target; + + private SymbolDistance(String candidate, String target) { + this.candidate = candidate; + this.target = target; } - /** Distance (similarity) between 2 symbols. This class is mainly for Java 8 stream comparator API */ - private static class SymbolDistance { - private final String candidate; - private final String target; - - private SymbolDistance(String candidate, String target) { - this.candidate = candidate; - this.target = target; - } - - public float similarity() { - return ALGORITHM.getDistance(candidate, target); - } + public float similarity() { + return ALGORITHM.getDistance(candidate, target); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/SqlAnalysisConfig.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/SqlAnalysisConfig.java index 56c69755a6..703c7d6586 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/SqlAnalysisConfig.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/SqlAnalysisConfig.java @@ -3,49 +3,48 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr; -/** - * Configuration for SQL analysis. - */ +/** Configuration for SQL analysis. */ public class SqlAnalysisConfig { - /** Is entire analyzer enabled to perform the analysis */ - private final boolean isAnalyzerEnabled; - - /** Is suggestion enabled for field name typo */ - private final boolean isFieldSuggestionEnabled; - - /** Skip entire analysis for index mapping larger than this threhold */ - private final int analysisThreshold; - - public SqlAnalysisConfig(boolean isAnalyzerEnabled, - boolean isFieldSuggestionEnabled, - int analysisThreshold) { - this.isAnalyzerEnabled = isAnalyzerEnabled; - this.isFieldSuggestionEnabled = isFieldSuggestionEnabled; - this.analysisThreshold = analysisThreshold; - } - - public boolean isAnalyzerEnabled() { - return isAnalyzerEnabled; - } - - public boolean isFieldSuggestionEnabled() { - return isFieldSuggestionEnabled; - } - - public int getAnalysisThreshold() { - return analysisThreshold; - } - - @Override - public String toString() { - return "SqlAnalysisConfig{" - + "isAnalyzerEnabled=" + isAnalyzerEnabled - + ", isFieldSuggestionEnabled=" + isFieldSuggestionEnabled - + ", analysisThreshold=" + analysisThreshold - + '}'; - } + /** Is entire analyzer enabled to perform the analysis */ + private final boolean isAnalyzerEnabled; + + /** Is suggestion enabled for field name typo */ + private final boolean isFieldSuggestionEnabled; + + /** Skip entire analysis for index mapping larger than this threhold */ + private final int analysisThreshold; + + public SqlAnalysisConfig( + boolean isAnalyzerEnabled, boolean isFieldSuggestionEnabled, int analysisThreshold) { + this.isAnalyzerEnabled = isAnalyzerEnabled; + this.isFieldSuggestionEnabled = isFieldSuggestionEnabled; + this.analysisThreshold = analysisThreshold; + } + + public boolean isAnalyzerEnabled() { + return isAnalyzerEnabled; + } + + public boolean isFieldSuggestionEnabled() { + return isFieldSuggestionEnabled; + } + + public int getAnalysisThreshold() { + return analysisThreshold; + } + + @Override + public String toString() { + return "SqlAnalysisConfig{" + + "isAnalyzerEnabled=" + + isAnalyzerEnabled + + ", isFieldSuggestionEnabled=" + + isFieldSuggestionEnabled + + ", analysisThreshold=" + + analysisThreshold + + '}'; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/SqlAnalysisException.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/SqlAnalysisException.java index 1856d568a2..b1d1204f21 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/SqlAnalysisException.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/SqlAnalysisException.java @@ -3,15 +3,12 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr; -/** - * SQL query analysis abstract exception. - */ +/** SQL query analysis abstract exception. */ public class SqlAnalysisException extends RuntimeException { - public SqlAnalysisException(String message) { - super(message); - } + public SqlAnalysisException(String message) { + super(message); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalysisException.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalysisException.java index 742642fb42..45c2dbc1dc 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalysisException.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalysisException.java @@ -3,18 +3,14 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic; import org.opensearch.sql.legacy.antlr.SqlAnalysisException; -/** - * Exception for semantic analysis - */ +/** Exception for semantic analysis */ public class SemanticAnalysisException extends SqlAnalysisException { - public SemanticAnalysisException(String message) { - super(message); - } - + public SemanticAnalysisException(String message) { + super(message); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/Environment.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/Environment.java index 11d25c3ce8..4f8a62d898 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/Environment.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/Environment.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.scope; import java.util.HashMap; @@ -11,84 +10,85 @@ import java.util.Optional; import org.opensearch.sql.legacy.antlr.semantic.types.Type; -/** - * Environment for symbol and its attribute (type) in the current scope - */ +/** Environment for symbol and its attribute (type) in the current scope */ public class Environment { - private final Environment parent; - - private final SymbolTable symbolTable; + private final Environment parent; - public Environment(Environment parent) { - this.parent = parent; - this.symbolTable = new SymbolTable(); - } + private final SymbolTable symbolTable; - /** - * Define symbol with the type - * @param symbol symbol to define - * @param type type - */ - public void define(Symbol symbol, Type type) { - symbolTable.store(symbol, type); - } + public Environment(Environment parent) { + this.parent = parent; + this.symbolTable = new SymbolTable(); + } - /** - * Resolve symbol in the environment - * @param symbol symbol to look up - * @return type if exist - */ - public Optional resolve(Symbol symbol) { - Optional type = Optional.empty(); - for (Environment cur = this; cur != null; cur = cur.parent) { - type = cur.symbolTable.lookup(symbol); - if (type.isPresent()) { - break; - } - } - return type; - } + /** + * Define symbol with the type + * + * @param symbol symbol to define + * @param type type + */ + public void define(Symbol symbol, Type type) { + symbolTable.store(symbol, type); + } - /** - * Resolve symbol definitions by a prefix. - * @param prefix a prefix of symbol - * @return all symbols with types that starts with the prefix - */ - public Map resolveByPrefix(Symbol prefix) { - Map typeByName = new HashMap<>(); - for (Environment cur = this; cur != null; cur = cur.parent) { - typeByName.putAll(cur.symbolTable.lookupByPrefix(prefix)); - } - return typeByName; + /** + * Resolve symbol in the environment + * + * @param symbol symbol to look up + * @return type if exist + */ + public Optional resolve(Symbol symbol) { + Optional type = Optional.empty(); + for (Environment cur = this; cur != null; cur = cur.parent) { + type = cur.symbolTable.lookup(symbol); + if (type.isPresent()) { + break; + } } + return type; + } - /** - * Resolve all symbols in the namespace. - * @param namespace a namespace - * @return all symbols in the namespace - */ - public Map resolveAll(Namespace namespace) { - Map result = new HashMap<>(); - for (Environment cur = this; cur != null; cur = cur.parent) { - // putIfAbsent ensures inner most definition will be used (shadow outers) - cur.symbolTable.lookupAll(namespace).forEach(result::putIfAbsent); - } - return result; + /** + * Resolve symbol definitions by a prefix. + * + * @param prefix a prefix of symbol + * @return all symbols with types that starts with the prefix + */ + public Map resolveByPrefix(Symbol prefix) { + Map typeByName = new HashMap<>(); + for (Environment cur = this; cur != null; cur = cur.parent) { + typeByName.putAll(cur.symbolTable.lookupByPrefix(prefix)); } + return typeByName; + } - /** Current environment is root and no any symbol defined */ - public boolean isEmpty(Namespace namespace) { - for (Environment cur = this; cur != null; cur = cur.parent) { - if (!cur.symbolTable.isEmpty(namespace)) { - return false; - } - } - return true; + /** + * Resolve all symbols in the namespace. + * + * @param namespace a namespace + * @return all symbols in the namespace + */ + public Map resolveAll(Namespace namespace) { + Map result = new HashMap<>(); + for (Environment cur = this; cur != null; cur = cur.parent) { + // putIfAbsent ensures inner most definition will be used (shadow outers) + cur.symbolTable.lookupAll(namespace).forEach(result::putIfAbsent); } + return result; + } - public Environment getParent() { - return parent; + /** Current environment is root and no any symbol defined */ + public boolean isEmpty(Namespace namespace) { + for (Environment cur = this; cur != null; cur = cur.parent) { + if (!cur.symbolTable.isEmpty(namespace)) { + return false; + } } + return true; + } + public Environment getParent() { + return parent; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/Namespace.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/Namespace.java index b591de5783..c500809a70 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/Namespace.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/Namespace.java @@ -3,27 +3,22 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.scope; -/** - * Namespace of symbol to avoid naming conflict - */ +/** Namespace of symbol to avoid naming conflict */ public enum Namespace { + FIELD_NAME("Field"), + FUNCTION_NAME("Function"), + OPERATOR_NAME("Operator"); - FIELD_NAME("Field"), - FUNCTION_NAME("Function"), - OPERATOR_NAME("Operator"); - - private final String name; - - Namespace(String name) { - this.name = name; - } + private final String name; - @Override - public String toString() { - return name; - } + Namespace(String name) { + this.name = name; + } + @Override + public String toString() { + return name; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/SemanticContext.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/SemanticContext.java index 968aff0df2..73fa5d1655 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/SemanticContext.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/SemanticContext.java @@ -3,46 +3,44 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.scope; import java.util.Objects; /** - * Semantic context responsible for environment chain (stack) management and everything required for analysis. - * This context should be shared by different stages in future, particularly - * from semantic analysis to logical planning to physical planning. + * Semantic context responsible for environment chain (stack) management and everything required for + * analysis. This context should be shared by different stages in future, particularly from semantic + * analysis to logical planning to physical planning. */ public class SemanticContext { - /** Environment stack for symbol scope management */ - private Environment environment = new Environment(null); - - /** - * Push a new environment - */ - public void push() { - environment = new Environment(environment); - } - - /** - * Return current environment - * @return current environment - */ - public Environment peek() { - return environment; - } - - /** - * Pop up current environment from environment chain - * @return current environment (before pop) - */ - public Environment pop() { - Objects.requireNonNull(environment, "Fail to pop context due to no environment present"); - - Environment curEnv = environment; - environment = curEnv.getParent(); - return curEnv; - } - + /** Environment stack for symbol scope management */ + private Environment environment = new Environment(null); + + /** Push a new environment */ + public void push() { + environment = new Environment(environment); + } + + /** + * Return current environment + * + * @return current environment + */ + public Environment peek() { + return environment; + } + + /** + * Pop up current environment from environment chain + * + * @return current environment (before pop) + */ + public Environment pop() { + Objects.requireNonNull(environment, "Fail to pop context due to no environment present"); + + Environment curEnv = environment; + environment = curEnv.getParent(); + return curEnv; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/Symbol.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/Symbol.java index e9b6892e68..837baf1c00 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/Symbol.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/Symbol.java @@ -3,34 +3,30 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.scope; -/** - * Symbol in the scope - */ +/** Symbol in the scope */ public class Symbol { - private final Namespace namespace; - - private final String name; + private final Namespace namespace; - public Symbol(Namespace namespace, String name) { - this.namespace = namespace; - this.name = name; - } + private final String name; - public Namespace getNamespace() { - return namespace; - } + public Symbol(Namespace namespace, String name) { + this.namespace = namespace; + this.name = name; + } - public String getName() { - return name; - } + public Namespace getNamespace() { + return namespace; + } - @Override - public String toString() { - return namespace + " [" + name + "]"; - } + public String getName() { + return name; + } + @Override + public String toString() { + return namespace + " [" + name + "]"; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/SymbolTable.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/SymbolTable.java index a8f0174c25..ee9f4545a6 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/SymbolTable.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/SymbolTable.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.scope; import static java.util.Collections.emptyMap; @@ -17,79 +16,79 @@ import java.util.stream.Collectors; import org.opensearch.sql.legacy.antlr.semantic.types.Type; -/** - * Symbol table for symbol definition and resolution. - */ +/** Symbol table for symbol definition and resolution. */ public class SymbolTable { - /** - * Two-dimension hash table to manage symbols with type in different namespace - */ - private Map> tableByNamespace = new EnumMap<>(Namespace.class); + /** Two-dimension hash table to manage symbols with type in different namespace */ + private Map> tableByNamespace = + new EnumMap<>(Namespace.class); - /** - * Store symbol with the type. Create new map for namespace for the first time. - * @param symbol symbol to define - * @param type symbol type - */ - public void store(Symbol symbol, Type type) { - tableByNamespace.computeIfAbsent( - symbol.getNamespace(), - ns -> new TreeMap<>() - ).computeIfAbsent( - symbol.getName(), - symbolName -> new TypeSupplier(symbolName, type) - ).add(type); - } + /** + * Store symbol with the type. Create new map for namespace for the first time. + * + * @param symbol symbol to define + * @param type symbol type + */ + public void store(Symbol symbol, Type type) { + tableByNamespace + .computeIfAbsent(symbol.getNamespace(), ns -> new TreeMap<>()) + .computeIfAbsent(symbol.getName(), symbolName -> new TypeSupplier(symbolName, type)) + .add(type); + } - /** - * Look up symbol in the namespace map. - * @param symbol symbol to look up - * @return symbol type which is optional - */ - public Optional lookup(Symbol symbol) { - Map table = tableByNamespace.get(symbol.getNamespace()); - TypeSupplier typeSupplier = null; - if (table != null) { - typeSupplier = table.get(symbol.getName()); - } - return Optional.ofNullable(typeSupplier).map(TypeSupplier::get); + /** + * Look up symbol in the namespace map. + * + * @param symbol symbol to look up + * @return symbol type which is optional + */ + public Optional lookup(Symbol symbol) { + Map table = tableByNamespace.get(symbol.getNamespace()); + TypeSupplier typeSupplier = null; + if (table != null) { + typeSupplier = table.get(symbol.getName()); } + return Optional.ofNullable(typeSupplier).map(TypeSupplier::get); + } - /** - * Look up symbols by a prefix. - * @param prefix a symbol prefix - * @return symbols starting with the prefix - */ - public Map lookupByPrefix(Symbol prefix) { - NavigableMap table = tableByNamespace.get(prefix.getNamespace()); - if (table != null) { - return table.subMap(prefix.getName(), prefix.getName() + Character.MAX_VALUE) - .entrySet().stream() - .filter(entry -> null != entry.getValue().get()) - .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().get())); - } - return emptyMap(); + /** + * Look up symbols by a prefix. + * + * @param prefix a symbol prefix + * @return symbols starting with the prefix + */ + public Map lookupByPrefix(Symbol prefix) { + NavigableMap table = tableByNamespace.get(prefix.getNamespace()); + if (table != null) { + return table + .subMap(prefix.getName(), prefix.getName() + Character.MAX_VALUE) + .entrySet() + .stream() + .filter(entry -> null != entry.getValue().get()) + .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().get())); } + return emptyMap(); + } - /** - * Look up all symbols in the namespace. - * @param namespace a namespace - * @return all symbols in the namespace map - */ - public Map lookupAll(Namespace namespace) { - return tableByNamespace.getOrDefault(namespace, emptyNavigableMap()) - .entrySet().stream() - .filter(entry -> null != entry.getValue().get()) - .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().get())); - } + /** + * Look up all symbols in the namespace. + * + * @param namespace a namespace + * @return all symbols in the namespace map + */ + public Map lookupAll(Namespace namespace) { + return tableByNamespace.getOrDefault(namespace, emptyNavigableMap()).entrySet().stream() + .filter(entry -> null != entry.getValue().get()) + .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().get())); + } - /** - * Check if namespace map in empty (none definition) - * @param namespace a namespace - * @return true for empty - */ - public boolean isEmpty(Namespace namespace) { - return tableByNamespace.getOrDefault(namespace, emptyNavigableMap()).isEmpty(); - } + /** + * Check if namespace map in empty (none definition) + * + * @param namespace a namespace + * @return true for empty + */ + public boolean isEmpty(Namespace namespace) { + return tableByNamespace.getOrDefault(namespace, emptyNavigableMap()).isEmpty(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/TypeSupplier.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/TypeSupplier.java index 355ae70249..7c2410cf76 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/TypeSupplier.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/scope/TypeSupplier.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.scope; import java.util.HashSet; @@ -13,39 +12,38 @@ import org.opensearch.sql.legacy.antlr.semantic.types.Type; /** - * The TypeSupplier is construct by the symbolName and symbolType. - * The TypeSupplier implement the {@link Supplier} interface to provide the {@link Type}. - * The TypeSupplier maintain types to track different {@link Type} definition for the same symbolName. + * The TypeSupplier is construct by the symbolName and symbolType. The TypeSupplier implement the + * {@link Supplier} interface to provide the {@link Type}. The TypeSupplier maintain types to + * track different {@link Type} definition for the same symbolName. */ public class TypeSupplier implements Supplier { - private final String symbolName; - private final Type symbolType; - private final Set types; + private final String symbolName; + private final Type symbolType; + private final Set types; - public TypeSupplier(String symbolName, Type symbolType) { - this.symbolName = symbolName; - this.symbolType = symbolType; - this.types = new HashSet<>(); - this.types.add(symbolType); - } + public TypeSupplier(String symbolName, Type symbolType) { + this.symbolName = symbolName; + this.symbolType = symbolType; + this.types = new HashSet<>(); + this.types.add(symbolType); + } - public TypeSupplier add(Type type) { - types.add(type); - return this; - } + public TypeSupplier add(Type type) { + types.add(type); + return this; + } - /** - * Get the {@link Type} - * Throw {@link SemanticAnalysisException} if conflict found. - * Currently, if the two types not equal, they are treated as conflicting. - */ - @Override - public Type get() { - if (types.size() > 1) { - throw new SemanticAnalysisException( - String.format("Field [%s] have conflict type [%s]", symbolName, types)); - } else { - return symbolType; - } + /** + * Get the {@link Type} Throw {@link SemanticAnalysisException} if conflict found. Currently, if + * the two types not equal, they are treated as conflicting. + */ + @Override + public Type get() { + if (types.size() > 1) { + throw new SemanticAnalysisException( + String.format("Field [%s] have conflict type [%s]", symbolName, types)); + } else { + return symbolType; } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/Type.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/Type.java index 0491c4e568..539e3478d2 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/Type.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/Type.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.types; import static org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType.TYPE_ERROR; @@ -15,66 +14,64 @@ import org.opensearch.sql.legacy.antlr.visitor.Reducible; import org.opensearch.sql.legacy.utils.StringUtils; -/** - * Type interface which represents any type of symbol in the SQL. - */ +/** Type interface which represents any type of symbol in the SQL. */ public interface Type extends Reducible { - /** - * Hide generic type ugliness and error check here in one place. - */ - @SuppressWarnings("unchecked") - @Override - default T reduce(List others) { - List actualArgTypes = (List) others; - Type result = construct(actualArgTypes); - if (result != TYPE_ERROR) { - return (T) result; - } - - // Generate error message by current type name, argument types and usage of current type - // For example, 'Function [LOG] cannot work with [TEXT, INTEGER]. Usage: LOG(NUMBER) -> NUMBER - String actualArgTypesStr; - if (actualArgTypes.isEmpty()) { - actualArgTypesStr = ""; - } else { - actualArgTypesStr = actualArgTypes.stream(). - map(Type::usage). - collect(Collectors.joining(", ")); - } + /** Hide generic type ugliness and error check here in one place. */ + @SuppressWarnings("unchecked") + @Override + default T reduce(List others) { + List actualArgTypes = (List) others; + Type result = construct(actualArgTypes); + if (result != TYPE_ERROR) { + return (T) result; + } - throw new SemanticAnalysisException( - StringUtils.format("%s cannot work with [%s]. Usage: %s", - this, actualArgTypesStr, usage())); + // Generate error message by current type name, argument types and usage of current type + // For example, 'Function [LOG] cannot work with [TEXT, INTEGER]. Usage: LOG(NUMBER) -> NUMBER + String actualArgTypesStr; + if (actualArgTypes.isEmpty()) { + actualArgTypesStr = ""; + } else { + actualArgTypesStr = + actualArgTypes.stream().map(Type::usage).collect(Collectors.joining(", ")); } - /** - * Type descriptive name - * @return name - */ - String getName(); + throw new SemanticAnalysisException( + StringUtils.format( + "%s cannot work with [%s]. Usage: %s", this, actualArgTypesStr, usage())); + } - /** - * Check if current type is compatible with other of same type. - * @param other other type - * @return true if compatible - */ - default boolean isCompatible(Type other) { - return other == UNKNOWN || this == other; - } + /** + * Type descriptive name + * + * @return name + */ + String getName(); + + /** + * Check if current type is compatible with other of same type. + * + * @param other other type + * @return true if compatible + */ + default boolean isCompatible(Type other) { + return other == UNKNOWN || this == other; + } - /** - * Construct a new type by applying current constructor on other types. - * Constructor is a generic conception that could be function, operator, join etc. - * - * @param others other types - * @return a new type as result - */ - Type construct(List others); + /** + * Construct a new type by applying current constructor on other types. Constructor is a generic + * conception that could be function, operator, join etc. + * + * @param others other types + * @return a new type as result + */ + Type construct(List others); - /** - * Return typical usage of current type - * @return usage string - */ - String usage(); + /** + * Return typical usage of current type + * + * @return usage string + */ + String usage(); } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/TypeExpression.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/TypeExpression.java index eacca7b00d..5a9d4d7410 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/TypeExpression.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/TypeExpression.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.types; import static org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType.TYPE_ERROR; @@ -17,105 +16,107 @@ import org.opensearch.sql.legacy.utils.StringUtils; /** - * Type expression representing specification(s) of constructor such as function, operator etc. - * Type expression has to be an interface with default methods because most subclass needs to be Enum. + * Type expression representing specification(s) of constructor such as function, operator etc. Type + * expression has to be an interface with default methods because most subclass needs to be Enum. */ public interface TypeExpression extends Type { - @Override - default Type construct(List actualArgs) { - TypeExpressionSpec[] specifications = specifications(); - if (specifications.length == 0) { - // Empty spec means type check for this type expression is not implemented yet. - // Return this to be compatible with everything. - return UNKNOWN; - } + @Override + default Type construct(List actualArgs) { + TypeExpressionSpec[] specifications = specifications(); + if (specifications.length == 0) { + // Empty spec means type check for this type expression is not implemented yet. + // Return this to be compatible with everything. + return UNKNOWN; + } - // Create a temp specification for compatibility check. - TypeExpressionSpec actualSpec = new TypeExpressionSpec(); - actualSpec.argTypes = actualArgs.toArray(new Type[0]); - - // Perform compatibility check between actual spec (argument types) and expected. - // If found any compatible spec, it means actual spec is legal and thus apply to get result type. - // Ex. Actual=[INTEGER], Specs=[NUMBER->NUMBER], [STRING->NUMBER]. So first spec matches and return NUMBER. - for (TypeExpressionSpec spec : specifications) { - if (spec.isCompatible(actualSpec)) { - return spec.constructFunc.apply(actualArgs.toArray(new Type[0])); - } - } - return TYPE_ERROR; + // Create a temp specification for compatibility check. + TypeExpressionSpec actualSpec = new TypeExpressionSpec(); + actualSpec.argTypes = actualArgs.toArray(new Type[0]); + + // Perform compatibility check between actual spec (argument types) and expected. + // If found any compatible spec, it means actual spec is legal and thus apply to get result + // type. + // Ex. Actual=[INTEGER], Specs=[NUMBER->NUMBER], [STRING->NUMBER]. So first spec matches and + // return NUMBER. + for (TypeExpressionSpec spec : specifications) { + if (spec.isCompatible(actualSpec)) { + return spec.constructFunc.apply(actualArgs.toArray(new Type[0])); + } + } + return TYPE_ERROR; + } + + @Override + default String usage() { + return Arrays.stream(specifications()) + .map(spec -> getName() + spec) + .collect(Collectors.joining(" or ")); + } + + /** + * Each type expression may be overloaded and include multiple specifications. + * + * @return all valid specifications or empty which means not implemented yet + */ + TypeExpressionSpec[] specifications(); + + /** + * A specification is combination of a construct function and arg types for a type expression + * (represent a constructor) + */ + class TypeExpressionSpec { + Type[] argTypes; + Function constructFunc; + + public TypeExpressionSpec map(Type... args) { + this.argTypes = args; + return this; } - @Override - default String usage() { - return Arrays.stream(specifications()). - map(spec -> getName() + spec). - collect(Collectors.joining(" or ")); + public TypeExpressionSpec to(Function constructFunc) { + // Required for generic type to replace placeholder ex.T with actual position in argument + // list. + // So construct function of generic type can return binding type finally. + this.constructFunc = Generic.specialize(constructFunc, argTypes); + return this; } - /** - * Each type expression may be overloaded and include multiple specifications. - * @return all valid specifications or empty which means not implemented yet - */ - TypeExpressionSpec[] specifications(); - - /** - * A specification is combination of a construct function and arg types - * for a type expression (represent a constructor) - */ - class TypeExpressionSpec { - Type[] argTypes; - Function constructFunc; - - public TypeExpressionSpec map(Type... args) { - this.argTypes = args; - return this; - } + /** Return a base type no matter what's the arg types Mostly this is used for empty arg types */ + public TypeExpressionSpec to(Type returnType) { + this.constructFunc = x -> returnType; + return this; + } - public TypeExpressionSpec to(Function constructFunc) { - // Required for generic type to replace placeholder ex.T with actual position in argument list. - // So construct function of generic type can return binding type finally. - this.constructFunc = Generic.specialize(constructFunc, argTypes); - return this; - } + public boolean isCompatible(TypeExpressionSpec otherSpec) { + Type[] expectArgTypes = this.argTypes; + Type[] actualArgTypes = otherSpec.argTypes; - /** Return a base type no matter what's the arg types - Mostly this is used for empty arg types */ - public TypeExpressionSpec to(Type returnType) { - this.constructFunc = x -> returnType; - return this; - } + // Check if arg numbers exactly match + if (expectArgTypes.length != actualArgTypes.length) { + return false; + } - public boolean isCompatible(TypeExpressionSpec otherSpec) { - Type[] expectArgTypes = this.argTypes; - Type[] actualArgTypes = otherSpec.argTypes; - - // Check if arg numbers exactly match - if (expectArgTypes.length != actualArgTypes.length) { - return false; - } - - // Check if all arg types are compatible - for (int i = 0; i < expectArgTypes.length; i++) { - if (!expectArgTypes[i].isCompatible(actualArgTypes[i])) { - return false; - } - } - return true; + // Check if all arg types are compatible + for (int i = 0; i < expectArgTypes.length; i++) { + if (!expectArgTypes[i].isCompatible(actualArgTypes[i])) { + return false; } + } + return true; + } - @Override - public String toString() { - String argTypesStr = Arrays.stream(argTypes). - map(Type::usage). - collect(Collectors.joining(", ")); + @Override + public String toString() { + String argTypesStr = + Arrays.stream(argTypes).map(Type::usage).collect(Collectors.joining(", ")); - // Only show generic type name in return value for clarity - Type returnType = constructFunc.apply(argTypes); - String returnTypeStr = (returnType instanceof Generic) ? returnType.getName() : returnType.usage(); + // Only show generic type name in return value for clarity + Type returnType = constructFunc.apply(argTypes); + String returnTypeStr = + (returnType instanceof Generic) ? returnType.getName() : returnType.usage(); - return StringUtils.format("(%s) -> %s", argTypesStr, returnTypeStr); - } + return StringUtils.format("(%s) -> %s", argTypesStr, returnTypeStr); } - + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/base/BaseType.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/base/BaseType.java index 280b7b4c76..37e0c4d4b3 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/base/BaseType.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/base/BaseType.java @@ -3,24 +3,21 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.types.base; import java.util.List; import org.opensearch.sql.legacy.antlr.semantic.types.Type; -/** - * Base type interface - */ +/** Base type interface */ public interface BaseType extends Type { - @Override - default Type construct(List others) { - return this; - } + @Override + default Type construct(List others) { + return this; + } - @Override - default String usage() { - return getName(); - } + @Override + default String usage() { + return getName(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/base/OpenSearchDataType.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/base/OpenSearchDataType.java index eab40c2dc7..00ef4afdf1 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/base/OpenSearchDataType.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/base/OpenSearchDataType.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.types.base; import static org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchIndex.IndexType.NESTED_FIELD; @@ -13,105 +12,102 @@ import java.util.Map; import org.opensearch.sql.legacy.antlr.semantic.types.Type; -/** - * Base type hierarchy based on OpenSearch data type - */ +/** Base type hierarchy based on OpenSearch data type */ public enum OpenSearchDataType implements BaseType { - - TYPE_ERROR, - UNKNOWN, - - SHORT, LONG, - INTEGER(SHORT, LONG), - FLOAT(INTEGER), - DOUBLE(FLOAT), - NUMBER(DOUBLE), - - KEYWORD, - TEXT(KEYWORD), - STRING(TEXT), - - DATE_NANOS, - DATE(DATE_NANOS, STRING), - - BOOLEAN, - - OBJECT, NESTED, - COMPLEX(OBJECT, NESTED), - - GEO_POINT, - - OPENSEARCH_TYPE( - NUMBER, - //STRING, move to under DATE because DATE is compatible - DATE, - BOOLEAN, - COMPLEX, - GEO_POINT - ); - - - /** - * Java Enum's valueOf() may thrown "enum constant not found" exception. - * And Java doesn't provide a contains method. - * So this static map is necessary for check and efficiency. - */ - private static final Map ALL_BASE_TYPES; - static { - ImmutableMap.Builder builder = new ImmutableMap.Builder<>(); - for (OpenSearchDataType type : OpenSearchDataType.values()) { - builder.put(type.name(), type); - } - ALL_BASE_TYPES = builder.build(); + TYPE_ERROR, + UNKNOWN, + + SHORT, + LONG, + INTEGER(SHORT, LONG), + FLOAT(INTEGER), + DOUBLE(FLOAT), + NUMBER(DOUBLE), + + KEYWORD, + TEXT(KEYWORD), + STRING(TEXT), + + DATE_NANOS, + DATE(DATE_NANOS, STRING), + + BOOLEAN, + + OBJECT, + NESTED, + COMPLEX(OBJECT, NESTED), + + GEO_POINT, + + OPENSEARCH_TYPE( + NUMBER, + // STRING, move to under DATE because DATE is compatible + DATE, + BOOLEAN, + COMPLEX, + GEO_POINT); + + /** + * Java Enum's valueOf() may thrown "enum constant not found" exception. And Java doesn't provide + * a contains method. So this static map is necessary for check and efficiency. + */ + private static final Map ALL_BASE_TYPES; + + static { + ImmutableMap.Builder builder = new ImmutableMap.Builder<>(); + for (OpenSearchDataType type : OpenSearchDataType.values()) { + builder.put(type.name(), type); } + ALL_BASE_TYPES = builder.build(); + } - public static OpenSearchDataType typeOf(String str) { - return ALL_BASE_TYPES.getOrDefault(toUpper(str), UNKNOWN); - } + public static OpenSearchDataType typeOf(String str) { + return ALL_BASE_TYPES.getOrDefault(toUpper(str), UNKNOWN); + } - /** Parent of current base type */ - private OpenSearchDataType parent; + /** Parent of current base type */ + private OpenSearchDataType parent; - OpenSearchDataType(OpenSearchDataType... compatibleTypes) { - for (OpenSearchDataType subType : compatibleTypes) { - subType.parent = this; - } + OpenSearchDataType(OpenSearchDataType... compatibleTypes) { + for (OpenSearchDataType subType : compatibleTypes) { + subType.parent = this; } - - @Override - public String getName() { - return name(); + } + + @Override + public String getName() { + return name(); + } + + /** + * For base type, compatibility means this (current type) is ancestor of other in the base type + * hierarchy. + */ + @Override + public boolean isCompatible(Type other) { + // Skip compatibility check if type is unknown + if (this == UNKNOWN || other == UNKNOWN) { + return true; } - /** - * For base type, compatibility means this (current type) is ancestor of other - * in the base type hierarchy. - */ - @Override - public boolean isCompatible(Type other) { - // Skip compatibility check if type is unknown - if (this == UNKNOWN || other == UNKNOWN) { - return true; - } - - if (!(other instanceof OpenSearchDataType)) { - // Nested data type is compatible with nested index type for type expression use - if (other instanceof OpenSearchIndex && ((OpenSearchIndex) other).type() == NESTED_FIELD) { - return isCompatible(NESTED); - } - return false; - } - - // One way compatibility: parent base type is compatible with children - OpenSearchDataType cur = (OpenSearchDataType) other; - while (cur != null && cur != this) { - cur = cur.parent; - } - return cur != null; + if (!(other instanceof OpenSearchDataType)) { + // Nested data type is compatible with nested index type for type expression use + if (other instanceof OpenSearchIndex && ((OpenSearchIndex) other).type() == NESTED_FIELD) { + return isCompatible(NESTED); + } + return false; } - @Override - public String toString() { - return "OpenSearch Data Type [" + getName() + "]"; + // One way compatibility: parent base type is compatible with children + OpenSearchDataType cur = (OpenSearchDataType) other; + while (cur != null && cur != this) { + cur = cur.parent; } + return cur != null; + } + + @Override + public String toString() { + return "OpenSearch Data Type [" + getName() + "]"; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/base/OpenSearchIndex.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/base/OpenSearchIndex.java index b3d971100b..2c790f15aa 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/base/OpenSearchIndex.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/base/OpenSearchIndex.java @@ -3,68 +3,66 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.types.base; import java.util.Objects; import org.opensearch.sql.legacy.antlr.semantic.types.Type; -/** - * Index type is not Enum because essentially each index is a brand new type. - */ +/** Index type is not Enum because essentially each index is a brand new type. */ public class OpenSearchIndex implements BaseType { - public enum IndexType { - INDEX, NESTED_FIELD, INDEX_PATTERN - } + public enum IndexType { + INDEX, + NESTED_FIELD, + INDEX_PATTERN + } - private final String indexName; - private final IndexType indexType; + private final String indexName; + private final IndexType indexType; - public OpenSearchIndex(String indexName, IndexType indexType) { - this.indexName = indexName; - this.indexType = indexType; - } + public OpenSearchIndex(String indexName, IndexType indexType) { + this.indexName = indexName; + this.indexType = indexType; + } - public IndexType type() { - return indexType; - } + public IndexType type() { + return indexType; + } - @Override - public String getName() { - return indexName; - } + @Override + public String getName() { + return indexName; + } - @Override - public boolean isCompatible(Type other) { - return equals(other); - } + @Override + public boolean isCompatible(Type other) { + return equals(other); + } - @Override - public String usage() { - return indexType.name(); - } + @Override + public String usage() { + return indexType.name(); + } - @Override - public String toString() { - return indexType + " [" + indexName + "]"; - } + @Override + public String toString() { + return indexType + " [" + indexName + "]"; + } - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - OpenSearchIndex index = (OpenSearchIndex) o; - return Objects.equals(indexName, index.indexName) - && indexType == index.indexType; + @Override + public boolean equals(Object o) { + if (this == o) { + return true; } - - @Override - public int hashCode() { - return Objects.hash(indexName, indexType); + if (o == null || getClass() != o.getClass()) { + return false; } + OpenSearchIndex index = (OpenSearchIndex) o; + return Objects.equals(indexName, index.indexName) && indexType == index.indexType; + } + + @Override + public int hashCode() { + return Objects.hash(indexName, indexType); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/function/AggregateFunction.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/function/AggregateFunction.java index 37e4091b0a..9cebf3dda6 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/function/AggregateFunction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/function/AggregateFunction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.types.function; import static org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType.DOUBLE; @@ -15,41 +14,38 @@ import org.opensearch.sql.legacy.antlr.semantic.types.Type; import org.opensearch.sql.legacy.antlr.semantic.types.TypeExpression; -/** - * Aggregate function - */ +/** Aggregate function */ public enum AggregateFunction implements TypeExpression { - COUNT( - func().to(INTEGER), // COUNT(*) - func(OPENSEARCH_TYPE).to(INTEGER) - ), - MAX(func(T(NUMBER)).to(T)), - MIN(func(T(NUMBER)).to(T)), - AVG(func(T(NUMBER)).to(DOUBLE)), - SUM(func(T(NUMBER)).to(T)); - - private TypeExpressionSpec[] specifications; - - AggregateFunction(TypeExpressionSpec... specifications) { - this.specifications = specifications; - } - - @Override - public String getName() { - return name(); - } - - @Override - public TypeExpressionSpec[] specifications() { - return specifications; - } - - private static TypeExpressionSpec func(Type... argTypes) { - return new TypeExpressionSpec().map(argTypes); - } - - @Override - public String toString() { - return "Function [" + name() + "]"; - } + COUNT( + func().to(INTEGER), // COUNT(*) + func(OPENSEARCH_TYPE).to(INTEGER)), + MAX(func(T(NUMBER)).to(T)), + MIN(func(T(NUMBER)).to(T)), + AVG(func(T(NUMBER)).to(DOUBLE)), + SUM(func(T(NUMBER)).to(T)); + + private TypeExpressionSpec[] specifications; + + AggregateFunction(TypeExpressionSpec... specifications) { + this.specifications = specifications; + } + + @Override + public String getName() { + return name(); + } + + @Override + public TypeExpressionSpec[] specifications() { + return specifications; + } + + private static TypeExpressionSpec func(Type... argTypes) { + return new TypeExpressionSpec().map(argTypes); + } + + @Override + public String toString() { + return "Function [" + name() + "]"; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/function/OpenSearchScalarFunction.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/function/OpenSearchScalarFunction.java index 93e1950d50..435a5ca968 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/function/OpenSearchScalarFunction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/function/OpenSearchScalarFunction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.types.function; import static org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType.BOOLEAN; @@ -16,87 +15,73 @@ import org.opensearch.sql.legacy.antlr.semantic.types.Type; import org.opensearch.sql.legacy.antlr.semantic.types.TypeExpression; -/** - * OpenSearch special scalar functions - */ +/** OpenSearch special scalar functions */ public enum OpenSearchScalarFunction implements TypeExpression { + DATE_HISTOGRAM(), // this is aggregate function + DAY_OF_MONTH(func(DATE).to(INTEGER)), + DAY_OF_YEAR(func(DATE).to(INTEGER)), + DAY_OF_WEEK(func(DATE).to(INTEGER)), + EXCLUDE(), // can only be used in SELECT? + EXTENDED_STATS(), // need confirm + FIELD(), // couldn't find test cases related + FILTER(), + GEO_BOUNDING_BOX(func(GEO_POINT, NUMBER, NUMBER, NUMBER, NUMBER).to(BOOLEAN)), + GEO_CELL(), // optional arg or overloaded spec is required. + GEO_DISTANCE(func(GEO_POINT, STRING, NUMBER, NUMBER).to(BOOLEAN)), + GEO_DISTANCE_RANGE(func(GEO_POINT, STRING, NUMBER, NUMBER).to(BOOLEAN)), + GEO_INTERSECTS(), // ? + GEO_POLYGON(), // varargs is required for 2nd arg + HISTOGRAM(), // same as date_histogram + HOUR_OF_DAY(func(DATE).to(INTEGER)), + INCLUDE(), // same as exclude + IN_TERMS(), // varargs + MATCHPHRASE(func(STRING, STRING).to(BOOLEAN), func(STRING).to(STRING)), // slop arg is optional + MATCH_PHRASE(MATCHPHRASE.specifications()), + MATCHQUERY(func(STRING, STRING).to(BOOLEAN), func(STRING).to(STRING)), + MATCH_QUERY(MATCHQUERY.specifications()), + MINUTE_OF_DAY(func(DATE).to(INTEGER)), // or long? + MINUTE_OF_HOUR(func(DATE).to(INTEGER)), + MONTH_OF_YEAR(func(DATE).to(INTEGER)), + MULTIMATCH(), // kw arguments + MULTI_MATCH(MULTIMATCH.specifications()), + NESTED(), // overloaded + PERCENTILES(), // ? + REGEXP_QUERY(), // ? + REVERSE_NESTED(), // need overloaded + QUERY(func(STRING).to(BOOLEAN)), + RANGE(), // aggregate function + SCORE(), // semantic problem? + SECOND_OF_MINUTE(func(DATE).to(INTEGER)), + STATS(), + TERM(), // semantic problem + TERMS(), // semantic problem + TOPHITS(), // only available in SELECT + WEEK_OF_YEAR(func(DATE).to(INTEGER)), + WILDCARDQUERY(func(STRING, STRING).to(BOOLEAN), func(STRING).to(STRING)), + WILDCARD_QUERY(WILDCARDQUERY.specifications()); - DATE_HISTOGRAM(), // this is aggregate function - DAY_OF_MONTH(func(DATE).to(INTEGER)), - DAY_OF_YEAR(func(DATE).to(INTEGER)), - DAY_OF_WEEK(func(DATE).to(INTEGER)), - EXCLUDE(), // can only be used in SELECT? - EXTENDED_STATS(), // need confirm - FIELD(), // couldn't find test cases related - FILTER(), - GEO_BOUNDING_BOX(func(GEO_POINT, NUMBER, NUMBER, NUMBER, NUMBER).to(BOOLEAN)), - GEO_CELL(), // optional arg or overloaded spec is required. - GEO_DISTANCE(func(GEO_POINT, STRING, NUMBER, NUMBER).to(BOOLEAN)), - GEO_DISTANCE_RANGE(func(GEO_POINT, STRING, NUMBER, NUMBER).to(BOOLEAN)), - GEO_INTERSECTS(), //? - GEO_POLYGON(), // varargs is required for 2nd arg - HISTOGRAM(), // same as date_histogram - HOUR_OF_DAY(func(DATE).to(INTEGER)), - INCLUDE(), // same as exclude - IN_TERMS(), // varargs - MATCHPHRASE( - func(STRING, STRING).to(BOOLEAN), - func(STRING).to(STRING) - ), //slop arg is optional - MATCH_PHRASE(MATCHPHRASE.specifications()), - MATCHQUERY( - func(STRING, STRING).to(BOOLEAN), - func(STRING).to(STRING) - ), - MATCH_QUERY(MATCHQUERY.specifications()), - MINUTE_OF_DAY(func(DATE).to(INTEGER)), // or long? - MINUTE_OF_HOUR(func(DATE).to(INTEGER)), - MONTH_OF_YEAR(func(DATE).to(INTEGER)), - MULTIMATCH(), // kw arguments - MULTI_MATCH(MULTIMATCH.specifications()), - NESTED(), // overloaded - PERCENTILES(), //? - REGEXP_QUERY(), //? - REVERSE_NESTED(), // need overloaded - QUERY(func(STRING).to(BOOLEAN)), - RANGE(), // aggregate function - SCORE(), // semantic problem? - SECOND_OF_MINUTE(func(DATE).to(INTEGER)), - STATS(), - TERM(), // semantic problem - TERMS(), // semantic problem - TOPHITS(), // only available in SELECT - WEEK_OF_YEAR(func(DATE).to(INTEGER)), - WILDCARDQUERY( - func(STRING, STRING).to(BOOLEAN), - func(STRING).to(STRING) - ), - WILDCARD_QUERY(WILDCARDQUERY.specifications()); - - - private final TypeExpressionSpec[] specifications; - - OpenSearchScalarFunction(TypeExpressionSpec... specifications) { - this.specifications = specifications; - } + private final TypeExpressionSpec[] specifications; - @Override - public String getName() { - return name(); - } + OpenSearchScalarFunction(TypeExpressionSpec... specifications) { + this.specifications = specifications; + } - @Override - public TypeExpressionSpec[] specifications() { - return specifications; - } + @Override + public String getName() { + return name(); + } - private static TypeExpressionSpec func(Type... argTypes) { - return new TypeExpressionSpec().map(argTypes); - } + @Override + public TypeExpressionSpec[] specifications() { + return specifications; + } - @Override - public String toString() { - return "Function [" + name() + "]"; - } + private static TypeExpressionSpec func(Type... argTypes) { + return new TypeExpressionSpec().map(argTypes); + } + @Override + public String toString() { + return "Function [" + name() + "]"; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/function/ScalarFunction.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/function/ScalarFunction.java index e993562df8..5dfada7ca8 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/function/ScalarFunction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/function/ScalarFunction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.types.function; import static org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType.BOOLEAN; @@ -18,123 +17,98 @@ import org.opensearch.sql.legacy.antlr.semantic.types.TypeExpression; import org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType; -/** - * Scalar SQL function - */ +/** Scalar SQL function */ public enum ScalarFunction implements TypeExpression { + ABS(func(T(NUMBER)).to(T)), // translate to Java: T ABS(T) + ACOS(func(T(NUMBER)).to(DOUBLE)), + ADD(func(T(NUMBER), NUMBER).to(T)), + ASCII(func(T(STRING)).to(INTEGER)), + ASIN(func(T(NUMBER)).to(DOUBLE)), + ATAN(func(T(NUMBER)).to(DOUBLE)), + ATAN2(func(T(NUMBER), NUMBER).to(DOUBLE)), + CAST(), + CBRT(func(T(NUMBER)).to(T)), + CEIL(func(T(NUMBER)).to(T)), + CONCAT(), // TODO: varargs support required + CONCAT_WS(), + COS(func(T(NUMBER)).to(DOUBLE)), + COSH(func(T(NUMBER)).to(DOUBLE)), + COT(func(T(NUMBER)).to(DOUBLE)), + CURDATE(func().to(OpenSearchDataType.DATE)), + DATE(func(OpenSearchDataType.DATE).to(OpenSearchDataType.DATE)), + DATE_FORMAT( + func(OpenSearchDataType.DATE, STRING).to(STRING), + func(OpenSearchDataType.DATE, STRING, STRING).to(STRING)), + DAYOFMONTH(func(OpenSearchDataType.DATE).to(INTEGER)), + DEGREES(func(T(NUMBER)).to(DOUBLE)), + DIVIDE(func(T(NUMBER), NUMBER).to(T)), + E(func().to(DOUBLE)), + EXP(func(T(NUMBER)).to(T)), + EXPM1(func(T(NUMBER)).to(T)), + FLOOR(func(T(NUMBER)).to(T)), + IF(func(BOOLEAN, OPENSEARCH_TYPE, OPENSEARCH_TYPE).to(OPENSEARCH_TYPE)), + IFNULL(func(OPENSEARCH_TYPE, OPENSEARCH_TYPE).to(OPENSEARCH_TYPE)), + ISNULL(func(OPENSEARCH_TYPE).to(INTEGER)), + LEFT(func(T(STRING), INTEGER).to(T)), + LENGTH(func(STRING).to(INTEGER)), + LN(func(T(NUMBER)).to(DOUBLE)), + LOCATE(func(STRING, STRING, INTEGER).to(INTEGER), func(STRING, STRING).to(INTEGER)), + LOG(func(T(NUMBER)).to(DOUBLE), func(T(NUMBER), NUMBER).to(DOUBLE)), + LOG2(func(T(NUMBER)).to(DOUBLE)), + LOG10(func(T(NUMBER)).to(DOUBLE)), + LOWER(func(T(STRING)).to(T), func(T(STRING), STRING).to(T)), + LTRIM(func(T(STRING)).to(T)), + MAKETIME(func(INTEGER, INTEGER, INTEGER).to(OpenSearchDataType.DATE)), + MODULUS(func(T(NUMBER), NUMBER).to(T)), + MONTH(func(OpenSearchDataType.DATE).to(INTEGER)), + MONTHNAME(func(OpenSearchDataType.DATE).to(STRING)), + MULTIPLY(func(T(NUMBER), NUMBER).to(NUMBER)), + NOW(func().to(OpenSearchDataType.DATE)), + PI(func().to(DOUBLE)), + POW(func(T(NUMBER)).to(T), func(T(NUMBER), NUMBER).to(T)), + POWER(func(T(NUMBER)).to(T), func(T(NUMBER), NUMBER).to(T)), + RADIANS(func(T(NUMBER)).to(DOUBLE)), + RAND(func().to(NUMBER), func(T(NUMBER)).to(T)), + REPLACE(func(T(STRING), STRING, STRING).to(T)), + RIGHT(func(T(STRING), INTEGER).to(T)), + RINT(func(T(NUMBER)).to(T)), + ROUND(func(T(NUMBER)).to(T)), + RTRIM(func(T(STRING)).to(T)), + SIGN(func(T(NUMBER)).to(T)), + SIGNUM(func(T(NUMBER)).to(T)), + SIN(func(T(NUMBER)).to(DOUBLE)), + SINH(func(T(NUMBER)).to(DOUBLE)), + SQRT(func(T(NUMBER)).to(T)), + SUBSTRING(func(T(STRING), INTEGER, INTEGER).to(T)), + SUBTRACT(func(T(NUMBER), NUMBER).to(T)), + TAN(func(T(NUMBER)).to(DOUBLE)), + TIMESTAMP(func(OpenSearchDataType.DATE).to(OpenSearchDataType.DATE)), + TRIM(func(T(STRING)).to(T)), + UPPER(func(T(STRING)).to(T), func(T(STRING), STRING).to(T)), + YEAR(func(OpenSearchDataType.DATE).to(INTEGER)); - ABS(func(T(NUMBER)).to(T)), // translate to Java: T ABS(T) - ACOS(func(T(NUMBER)).to(DOUBLE)), - ADD(func(T(NUMBER), NUMBER).to(T)), - ASCII(func(T(STRING)).to(INTEGER)), - ASIN(func(T(NUMBER)).to(DOUBLE)), - ATAN(func(T(NUMBER)).to(DOUBLE)), - ATAN2(func(T(NUMBER), NUMBER).to(DOUBLE)), - CAST(), - CBRT(func(T(NUMBER)).to(T)), - CEIL(func(T(NUMBER)).to(T)), - CONCAT(), // TODO: varargs support required - CONCAT_WS(), - COS(func(T(NUMBER)).to(DOUBLE)), - COSH(func(T(NUMBER)).to(DOUBLE)), - COT(func(T(NUMBER)).to(DOUBLE)), - CURDATE(func().to(OpenSearchDataType.DATE)), - DATE(func(OpenSearchDataType.DATE).to(OpenSearchDataType.DATE)), - DATE_FORMAT( - func(OpenSearchDataType.DATE, STRING).to(STRING), - func(OpenSearchDataType.DATE, STRING, STRING).to(STRING) - ), - DAYOFMONTH(func(OpenSearchDataType.DATE).to(INTEGER)), - DEGREES(func(T(NUMBER)).to(DOUBLE)), - DIVIDE(func(T(NUMBER), NUMBER).to(T)), - E(func().to(DOUBLE)), - EXP(func(T(NUMBER)).to(T)), - EXPM1(func(T(NUMBER)).to(T)), - FLOOR(func(T(NUMBER)).to(T)), - IF(func(BOOLEAN, OPENSEARCH_TYPE, OPENSEARCH_TYPE).to(OPENSEARCH_TYPE)), - IFNULL(func(OPENSEARCH_TYPE, OPENSEARCH_TYPE).to(OPENSEARCH_TYPE)), - ISNULL(func(OPENSEARCH_TYPE).to(INTEGER)), - LEFT(func(T(STRING), INTEGER).to(T)), - LENGTH(func(STRING).to(INTEGER)), - LN(func(T(NUMBER)).to(DOUBLE)), - LOCATE( - func(STRING, STRING, INTEGER).to(INTEGER), - func(STRING, STRING).to(INTEGER) - ), - LOG( - func(T(NUMBER)).to(DOUBLE), - func(T(NUMBER), NUMBER).to(DOUBLE) - ), - LOG2(func(T(NUMBER)).to(DOUBLE)), - LOG10(func(T(NUMBER)).to(DOUBLE)), - LOWER( - func(T(STRING)).to(T), - func(T(STRING), STRING).to(T) - ), - LTRIM(func(T(STRING)).to(T)), - MAKETIME(func(INTEGER, INTEGER, INTEGER).to(OpenSearchDataType.DATE)), - MODULUS(func(T(NUMBER), NUMBER).to(T)), - MONTH(func(OpenSearchDataType.DATE).to(INTEGER)), - MONTHNAME(func(OpenSearchDataType.DATE).to(STRING)), - MULTIPLY(func(T(NUMBER), NUMBER).to(NUMBER)), - NOW(func().to(OpenSearchDataType.DATE)), - PI(func().to(DOUBLE)), - POW( - func(T(NUMBER)).to(T), - func(T(NUMBER), NUMBER).to(T) - ), - POWER( - func(T(NUMBER)).to(T), - func(T(NUMBER), NUMBER).to(T) - ), - RADIANS(func(T(NUMBER)).to(DOUBLE)), - RAND( - func().to(NUMBER), - func(T(NUMBER)).to(T) - ), - REPLACE(func(T(STRING), STRING, STRING).to(T)), - RIGHT(func(T(STRING), INTEGER).to(T)), - RINT(func(T(NUMBER)).to(T)), - ROUND(func(T(NUMBER)).to(T)), - RTRIM(func(T(STRING)).to(T)), - SIGN(func(T(NUMBER)).to(T)), - SIGNUM(func(T(NUMBER)).to(T)), - SIN(func(T(NUMBER)).to(DOUBLE)), - SINH(func(T(NUMBER)).to(DOUBLE)), - SQRT(func(T(NUMBER)).to(T)), - SUBSTRING(func(T(STRING), INTEGER, INTEGER).to(T)), - SUBTRACT(func(T(NUMBER), NUMBER).to(T)), - TAN(func(T(NUMBER)).to(DOUBLE)), - TIMESTAMP(func(OpenSearchDataType.DATE).to(OpenSearchDataType.DATE)), - TRIM(func(T(STRING)).to(T)), - UPPER( - func(T(STRING)).to(T), - func(T(STRING), STRING).to(T) - ), - YEAR(func(OpenSearchDataType.DATE).to(INTEGER)); - - private final TypeExpressionSpec[] specifications; + private final TypeExpressionSpec[] specifications; - ScalarFunction(TypeExpressionSpec... specifications) { - this.specifications = specifications; - } + ScalarFunction(TypeExpressionSpec... specifications) { + this.specifications = specifications; + } - @Override - public String getName() { - return name(); - } + @Override + public String getName() { + return name(); + } - @Override - public TypeExpressionSpec[] specifications() { - return specifications; - } + @Override + public TypeExpressionSpec[] specifications() { + return specifications; + } - private static TypeExpressionSpec func(Type... argTypes) { - return new TypeExpressionSpec().map(argTypes); - } + private static TypeExpressionSpec func(Type... argTypes) { + return new TypeExpressionSpec().map(argTypes); + } - @Override - public String toString() { - return "Function [" + name() + "]"; - } + @Override + public String toString() { + return "Function [" + name() + "]"; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/operator/ComparisonOperator.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/operator/ComparisonOperator.java index 993d996df3..19e8f85aa3 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/operator/ComparisonOperator.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/operator/ComparisonOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.types.operator; import static org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType.BOOLEAN; @@ -12,53 +11,50 @@ import java.util.List; import org.opensearch.sql.legacy.antlr.semantic.types.Type; -/** - * Type for comparison operator - */ +/** Type for comparison operator */ public enum ComparisonOperator implements Type { - - EQUAL("="), - NOT_EQUAL("<>"), - NOT_EQUAL2("!="), - GREATER_THAN(">"), - GREATER_THAN_OR_EQUAL_TO(">="), - SMALLER_THAN("<"), - SMALLER_THAN_OR_EQUAL_TO("<="), - IS("IS"); - - /** Actual name representing the operator */ - private final String name; - - ComparisonOperator(String name) { - this.name = name; - } - - @Override - public String getName() { - return name; - } - - @Override - public Type construct(List actualArgs) { - if (actualArgs.size() != 2) { - return TYPE_ERROR; - } - - Type leftType = actualArgs.get(0); - Type rightType = actualArgs.get(1); - if (leftType.isCompatible(rightType) || rightType.isCompatible(leftType)) { - return BOOLEAN; - } - return TYPE_ERROR; - } - - @Override - public String usage() { - return "Please use compatible types from each side."; + EQUAL("="), + NOT_EQUAL("<>"), + NOT_EQUAL2("!="), + GREATER_THAN(">"), + GREATER_THAN_OR_EQUAL_TO(">="), + SMALLER_THAN("<"), + SMALLER_THAN_OR_EQUAL_TO("<="), + IS("IS"); + + /** Actual name representing the operator */ + private final String name; + + ComparisonOperator(String name) { + this.name = name; + } + + @Override + public String getName() { + return name; + } + + @Override + public Type construct(List actualArgs) { + if (actualArgs.size() != 2) { + return TYPE_ERROR; } - @Override - public String toString() { - return "Operator [" + getName() + "]"; + Type leftType = actualArgs.get(0); + Type rightType = actualArgs.get(1); + if (leftType.isCompatible(rightType) || rightType.isCompatible(leftType)) { + return BOOLEAN; } + return TYPE_ERROR; + } + + @Override + public String usage() { + return "Please use compatible types from each side."; + } + + @Override + public String toString() { + return "Operator [" + getName() + "]"; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/operator/JoinOperator.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/operator/JoinOperator.java index 75bc306cd9..02decab1ae 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/operator/JoinOperator.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/operator/JoinOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.types.operator; import static org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType.TYPE_ERROR; @@ -13,35 +12,32 @@ import org.opensearch.sql.legacy.antlr.semantic.types.Type; import org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchIndex; -/** - * Join operator - */ +/** Join operator */ public enum JoinOperator implements Type { - JOIN; - - @Override - public String getName() { - return name(); - } - - @Override - public Type construct(List others) { - Optional isAnyNonIndexType = others.stream(). - filter(type -> !(type instanceof OpenSearchIndex)). - findAny(); - if (isAnyNonIndexType.isPresent()) { - return TYPE_ERROR; - } - return others.get(0); - } - - @Override - public String usage() { - return "Please join index with other index or its nested field."; - } - - @Override - public String toString() { - return "Operator [" + getName() + "]"; + JOIN; + + @Override + public String getName() { + return name(); + } + + @Override + public Type construct(List others) { + Optional isAnyNonIndexType = + others.stream().filter(type -> !(type instanceof OpenSearchIndex)).findAny(); + if (isAnyNonIndexType.isPresent()) { + return TYPE_ERROR; } + return others.get(0); + } + + @Override + public String usage() { + return "Please join index with other index or its nested field."; + } + + @Override + public String toString() { + return "Operator [" + getName() + "]"; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/operator/SetOperator.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/operator/SetOperator.java index 988c9856e3..e8a80cd821 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/operator/SetOperator.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/operator/SetOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.types.operator; import static org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType.TYPE_ERROR; @@ -11,45 +10,43 @@ import java.util.List; import org.opensearch.sql.legacy.antlr.semantic.types.Type; -/** - * Set operator between queries. - */ +/** Set operator between queries. */ public enum SetOperator implements Type { - UNION, - MINUS, - IN; - - @Override - public String getName() { - return name(); + UNION, + MINUS, + IN; + + @Override + public String getName() { + return name(); + } + + @Override + public Type construct(List others) { + if (others.size() < 2) { + throw new IllegalStateException(""); } - @Override - public Type construct(List others) { - if (others.size() < 2) { - throw new IllegalStateException(""); - } - - // Compare each type and return anyone for now if pass - for (int i = 0; i < others.size() - 1; i++) { - Type type1 = others.get(i); - Type type2 = others.get(i + 1); - - // Do it again as in Product because single base type won't be wrapped in Product - if (!type1.isCompatible(type2) && !type2.isCompatible(type1)) { - return TYPE_ERROR; - } - } - return others.get(0); - } - - @Override - public String usage() { - return "Please return field(s) of compatible type from each query."; - } + // Compare each type and return anyone for now if pass + for (int i = 0; i < others.size() - 1; i++) { + Type type1 = others.get(i); + Type type2 = others.get(i + 1); - @Override - public String toString() { - return "Operator [" + getName() + "]"; + // Do it again as in Product because single base type won't be wrapped in Product + if (!type1.isCompatible(type2) && !type2.isCompatible(type1)) { + return TYPE_ERROR; + } } + return others.get(0); + } + + @Override + public String usage() { + return "Please return field(s) of compatible type from each query."; + } + + @Override + public String toString() { + return "Operator [" + getName() + "]"; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/special/Generic.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/special/Generic.java index 7efdb55426..ad718a8256 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/special/Generic.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/special/Generic.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.types.special; import java.util.Arrays; @@ -12,79 +11,84 @@ import org.opensearch.sql.legacy.antlr.semantic.types.Type; import org.opensearch.sql.legacy.utils.StringUtils; -/** - * Generic type for more precise type expression - */ +/** Generic type for more precise type expression */ public class Generic implements Type { - /** Generic type placeholder namespace */ - private enum Name { T } - - /** Construct function to find generic type in argument list with same name */ - public static final Function T = types -> findSameGenericType(Name.T, types); - - /** Generic type name */ - private final Name name; - - /** Actual type binding to current generic type */ - private final Type binding; - - public Generic(Name name, Type type) { - this.name = name; - this.binding = type; - } - - public static Type T(Type type) { - return new Generic(Name.T, type); - } - - /** - * Return a function for replacing generic type in argument list with binding type. - * Ex. after T instance found in argument list [T(NUMBER), STRING], create function to return actualTypes[0] - * - * @param func function for finding generic type in argument list (namely, function T above) - * @param actualArgTypes actual argument types - */ - public static Function specialize(Function func, - Type[] actualArgTypes) { - if (func != T) { - return func; - } - - Type genericType = func.apply(actualArgTypes); - int genericTypeIndex = Arrays.asList(actualArgTypes).indexOf(genericType); - return actualTypes -> actualTypes[genericTypeIndex]; + /** Generic type placeholder namespace */ + private enum Name { + T + } + + /** Construct function to find generic type in argument list with same name */ + public static final Function T = types -> findSameGenericType(Name.T, types); + + /** Generic type name */ + private final Name name; + + /** Actual type binding to current generic type */ + private final Type binding; + + public Generic(Name name, Type type) { + this.name = name; + this.binding = type; + } + + public static Type T(Type type) { + return new Generic(Name.T, type); + } + + /** + * Return a function for replacing generic type in argument list with binding type. Ex. after T + * instance found in argument list [T(NUMBER), STRING], create function to return actualTypes[0] + * + * @param func function for finding generic type in argument list (namely, function T above) + * @param actualArgTypes actual argument types + */ + public static Function specialize( + Function func, Type[] actualArgTypes) { + if (func != T) { + return func; } - /** Find placeholder in argument list, ex. in [T(NUMBER), STRING] -> T, return instance at first T */ - private static Type findSameGenericType(Name name, Type[] types) { - return Arrays.stream(types). - filter(type -> type instanceof Generic). - filter(type -> ((Generic) type).name == name). - findFirst(). - orElseThrow(() -> new IllegalStateException(StringUtils.format( - "Type definition is wrong. Could not unbind generic type [%s] in type list %s.", - name, types)) - ); - } - - @Override - public String getName() { - return this.name.name(); - } - - @Override - public boolean isCompatible(Type other) { - return binding.isCompatible(other); - } - - @Override - public Type construct(List others) { - return binding.construct(others); - } - - @Override - public String usage() { - return binding.usage() + " " + name; - } + Type genericType = func.apply(actualArgTypes); + int genericTypeIndex = Arrays.asList(actualArgTypes).indexOf(genericType); + return actualTypes -> actualTypes[genericTypeIndex]; + } + + /** + * Find placeholder in argument list, ex. in [T(NUMBER), STRING] -> T, return instance at first T + */ + private static Type findSameGenericType(Name name, Type[] types) { + return Arrays.stream(types) + .filter(type -> type instanceof Generic) + .filter(type -> ((Generic) type).name == name) + .findFirst() + .orElseThrow( + () -> + new IllegalStateException( + StringUtils.format( + "Type definition is wrong. Could not unbind generic type [%s] in type list" + + " %s.", + name, types))); + } + + @Override + public String getName() { + return this.name.name(); + } + + @Override + public boolean isCompatible(Type other) { + return binding.isCompatible(other); + } + + @Override + public Type construct(List others) { + return binding.construct(others); + } + + @Override + public String usage() { + return binding.usage() + " " + name; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/special/Product.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/special/Product.java index ad4d86895b..98f04dc629 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/special/Product.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/types/special/Product.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.types.special; import java.util.Collections; @@ -12,62 +11,56 @@ import lombok.Getter; import org.opensearch.sql.legacy.antlr.semantic.types.Type; -/** - * Combination of multiple types, ex. function arguments - */ +/** Combination of multiple types, ex. function arguments */ public class Product implements Type { - @Getter - private final List types; + @Getter private final List types; - public Product(List itemTypes) { - types = Collections.unmodifiableList(itemTypes); - } + public Product(List itemTypes) { + types = Collections.unmodifiableList(itemTypes); + } - @Override - public String getName() { - return "Product of types " + types; - } + @Override + public String getName() { + return "Product of types " + types; + } - @Override - public boolean isCompatible(Type other) { - if (!(other instanceof Product)) { - return false; - } - - Product otherProd = (Product) other; - if (types.size() != otherProd.types.size()) { - return false; - } - - for (int i = 0; i < types.size(); i++) { - Type type = types.get(i); - Type otherType = otherProd.types.get(i); - if (!isCompatibleEitherWay(type, otherType)) { - return false; - } - } - return true; + @Override + public boolean isCompatible(Type other) { + if (!(other instanceof Product)) { + return false; } - @Override - public Type construct(List others) { - return this; + Product otherProd = (Product) other; + if (types.size() != otherProd.types.size()) { + return false; } - @Override - public String usage() { - if (types.isEmpty()) { - return "(*)"; - } - return types.stream(). - map(Type::usage). - collect(Collectors.joining(", ", "(", ")")); + for (int i = 0; i < types.size(); i++) { + Type type = types.get(i); + Type otherType = otherProd.types.get(i); + if (!isCompatibleEitherWay(type, otherType)) { + return false; + } } + return true; + } + + @Override + public Type construct(List others) { + return this; + } - /** Perform two-way compatibility check here which is different from normal type expression */ - private boolean isCompatibleEitherWay(Type type1, Type type2) { - return type1.isCompatible(type2) || type2.isCompatible(type1); + @Override + public String usage() { + if (types.isEmpty()) { + return "(*)"; } + return types.stream().map(Type::usage).collect(Collectors.joining(", ", "(", ")")); + } + /** Perform two-way compatibility check here which is different from normal type expression */ + private boolean isCompatibleEitherWay(Type type1, Type type2) { + return type1.isCompatible(type2) || type2.isCompatible(type1); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/visitor/OpenSearchMappingLoader.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/visitor/OpenSearchMappingLoader.java index 7bfca0a015..4d009dc438 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/visitor/OpenSearchMappingLoader.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/visitor/OpenSearchMappingLoader.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.visitor; import static org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchIndex.IndexType.INDEX; @@ -12,7 +11,6 @@ import java.util.HashSet; import java.util.Map; import java.util.Set; -import java.util.stream.Collectors; import org.opensearch.sql.legacy.antlr.semantic.scope.Environment; import org.opensearch.sql.legacy.antlr.semantic.scope.Namespace; import org.opensearch.sql.legacy.antlr.semantic.scope.SemanticContext; @@ -27,178 +25,181 @@ import org.opensearch.sql.legacy.esdomain.mapping.IndexMappings; import org.opensearch.sql.legacy.utils.StringUtils; -/** - * Load index and nested field mapping into semantic context - */ +/** Load index and nested field mapping into semantic context */ public class OpenSearchMappingLoader implements GenericSqlParseTreeVisitor { - /** Semantic context shared in the semantic analysis process */ - private final SemanticContext context; - - /** Local cluster state for mapping query */ - private final LocalClusterState clusterState; - - /** Threshold to decide if continue the analysis */ - private final int threshold; - - public OpenSearchMappingLoader(SemanticContext context, LocalClusterState clusterState, int threshold) { - this.context = context; - this.clusterState = clusterState; - this.threshold = threshold; - } - - /* - * Suppose index 'accounts' includes 'name', 'age' and nested field 'projects' - * which includes 'name' and 'active'. - * - * 1. Define itself: - * ----- new definitions ----- - * accounts -> INDEX - * - * 2. Define without alias no matter if alias given: - * 'accounts' -> INDEX - * ----- new definitions ----- - * 'name' -> TEXT - * 'age' -> INTEGER - * 'projects' -> NESTED - * 'projects.name' -> KEYWORD - * 'projects.active' -> BOOLEAN - */ - @Override - public Type visitIndexName(String indexName) { - if (isNotNested(indexName)) { - defineIndexType(indexName); - loadAllFieldsWithType(indexName); - } - return defaultValue(); - } - - @Override - public void visitAs(String alias, Type type) { - if (!(type instanceof OpenSearchIndex)) { - return; - } - - OpenSearchIndex index = (OpenSearchIndex) type; - String indexName = type.getName(); - - if (index.type() == INDEX) { - String aliasName = alias.isEmpty() ? indexName : alias; - defineAllFieldNamesByAppendingAliasPrefix(indexName, aliasName); - } else if (index.type() == NESTED_FIELD) { - if (!alias.isEmpty()) { - defineNestedFieldNamesByReplacingWithAlias(indexName, alias); - } - } // else Do nothing for index pattern - } - - private void defineIndexType(String indexName) { - environment().define(new Symbol(Namespace.FIELD_NAME, indexName), new OpenSearchIndex(indexName, INDEX)); - } - - private void loadAllFieldsWithType(String indexName) { - Set mappings = getFieldMappings(indexName); - mappings.forEach(mapping -> mapping.flat(this::defineFieldName)); - } - - /* - * 3.1 Define with alias if given: ex."SELECT * FROM accounts a". - * 'accounts' -> INDEX - * 'name' -> TEXT - * 'age' -> INTEGER - * 'projects' -> NESTED - * 'projects.name' -> KEYWORD - * 'projects.active' -> BOOLEAN - * ----- new definitions ----- - * ['a' -> INDEX] -- this is done in semantic analyzer - * 'a.name' -> TEXT - * 'a.age' -> INTEGER - * 'a.projects' -> NESTED - * 'a.projects.name' -> KEYWORD - * 'a.projects.active' -> BOOLEAN - * - * 3.2 Otherwise define by index full name: ex."SELECT * FROM account" - * 'accounts' -> INDEX - * 'name' -> TEXT - * 'age' -> INTEGER - * 'projects' -> NESTED - * 'projects.name' -> KEYWORD - * 'projects.active' -> BOOLEAN - * ----- new definitions ----- - * 'accounts.name' -> TEXT - * 'accounts.age' -> INTEGER - * 'accounts.projects' -> NESTED - * 'accounts.projects.name' -> KEYWORD - * 'accounts.projects.active' -> BOOLEAN - */ - private void defineAllFieldNamesByAppendingAliasPrefix(String indexName, String alias) { - Set mappings = getFieldMappings(indexName); - mappings.stream().forEach(mapping -> mapping.flat((fieldName, type) -> - defineFieldName(alias + "." + fieldName, type))); + /** Semantic context shared in the semantic analysis process */ + private final SemanticContext context; + + /** Local cluster state for mapping query */ + private final LocalClusterState clusterState; + + /** Threshold to decide if continue the analysis */ + private final int threshold; + + public OpenSearchMappingLoader( + SemanticContext context, LocalClusterState clusterState, int threshold) { + this.context = context; + this.clusterState = clusterState; + this.threshold = threshold; + } + + /* + * Suppose index 'accounts' includes 'name', 'age' and nested field 'projects' + * which includes 'name' and 'active'. + * + * 1. Define itself: + * ----- new definitions ----- + * accounts -> INDEX + * + * 2. Define without alias no matter if alias given: + * 'accounts' -> INDEX + * ----- new definitions ----- + * 'name' -> TEXT + * 'age' -> INTEGER + * 'projects' -> NESTED + * 'projects.name' -> KEYWORD + * 'projects.active' -> BOOLEAN + */ + @Override + public Type visitIndexName(String indexName) { + if (isNotNested(indexName)) { + defineIndexType(indexName); + loadAllFieldsWithType(indexName); } + return defaultValue(); + } - /* - * 3.3 Define with alias if given: ex."SELECT * FROM accounts a, a.project p" - * 'accounts' -> INDEX - * 'name' -> TEXT - * 'age' -> INTEGER - * 'projects' -> NESTED - * 'projects.name' -> KEYWORD - * 'projects.active' -> BOOLEAN - * 'a.name' -> TEXT - * 'a.age' -> INTEGER - * 'a.projects' -> NESTED - * 'a.projects.name' -> KEYWORD - * 'a.projects.active' -> BOOLEAN - * ----- new definitions ----- - * ['p' -> NESTED] -- this is done in semantic analyzer - * 'p.name' -> KEYWORD - * 'p.active' -> BOOLEAN - */ - private void defineNestedFieldNamesByReplacingWithAlias(String nestedFieldName, String alias) { - Map typeByFullName = environment().resolveByPrefix( - new Symbol(Namespace.FIELD_NAME, nestedFieldName)); - typeByFullName.forEach( - (fieldName, fieldType) -> defineFieldName(fieldName.replace(nestedFieldName, alias), fieldType) - ); + @Override + public void visitAs(String alias, Type type) { + if (!(type instanceof OpenSearchIndex)) { + return; } - /** - * Check if index name is NOT nested, for example. return true for index 'accounts' or '.opensearch_dashboards' - * but return false for nested field name 'a.projects'. - */ - private boolean isNotNested(String indexName) { - return indexName.indexOf('.', 1) == -1; // taking care of .opensearch_dashboards + OpenSearchIndex index = (OpenSearchIndex) type; + String indexName = type.getName(); + + if (index.type() == INDEX) { + String aliasName = alias.isEmpty() ? indexName : alias; + defineAllFieldNamesByAppendingAliasPrefix(indexName, aliasName); + } else if (index.type() == NESTED_FIELD) { + if (!alias.isEmpty()) { + defineNestedFieldNamesByReplacingWithAlias(indexName, alias); + } + } // else Do nothing for index pattern + } + + private void defineIndexType(String indexName) { + environment() + .define(new Symbol(Namespace.FIELD_NAME, indexName), new OpenSearchIndex(indexName, INDEX)); + } + + private void loadAllFieldsWithType(String indexName) { + Set mappings = getFieldMappings(indexName); + mappings.forEach(mapping -> mapping.flat(this::defineFieldName)); + } + + /* + * 3.1 Define with alias if given: ex."SELECT * FROM accounts a". + * 'accounts' -> INDEX + * 'name' -> TEXT + * 'age' -> INTEGER + * 'projects' -> NESTED + * 'projects.name' -> KEYWORD + * 'projects.active' -> BOOLEAN + * ----- new definitions ----- + * ['a' -> INDEX] -- this is done in semantic analyzer + * 'a.name' -> TEXT + * 'a.age' -> INTEGER + * 'a.projects' -> NESTED + * 'a.projects.name' -> KEYWORD + * 'a.projects.active' -> BOOLEAN + * + * 3.2 Otherwise define by index full name: ex."SELECT * FROM account" + * 'accounts' -> INDEX + * 'name' -> TEXT + * 'age' -> INTEGER + * 'projects' -> NESTED + * 'projects.name' -> KEYWORD + * 'projects.active' -> BOOLEAN + * ----- new definitions ----- + * 'accounts.name' -> TEXT + * 'accounts.age' -> INTEGER + * 'accounts.projects' -> NESTED + * 'accounts.projects.name' -> KEYWORD + * 'accounts.projects.active' -> BOOLEAN + */ + private void defineAllFieldNamesByAppendingAliasPrefix(String indexName, String alias) { + Set mappings = getFieldMappings(indexName); + mappings.stream() + .forEach( + mapping -> + mapping.flat((fieldName, type) -> defineFieldName(alias + "." + fieldName, type))); + } + + /* + * 3.3 Define with alias if given: ex."SELECT * FROM accounts a, a.project p" + * 'accounts' -> INDEX + * 'name' -> TEXT + * 'age' -> INTEGER + * 'projects' -> NESTED + * 'projects.name' -> KEYWORD + * 'projects.active' -> BOOLEAN + * 'a.name' -> TEXT + * 'a.age' -> INTEGER + * 'a.projects' -> NESTED + * 'a.projects.name' -> KEYWORD + * 'a.projects.active' -> BOOLEAN + * ----- new definitions ----- + * ['p' -> NESTED] -- this is done in semantic analyzer + * 'p.name' -> KEYWORD + * 'p.active' -> BOOLEAN + */ + private void defineNestedFieldNamesByReplacingWithAlias(String nestedFieldName, String alias) { + Map typeByFullName = + environment().resolveByPrefix(new Symbol(Namespace.FIELD_NAME, nestedFieldName)); + typeByFullName.forEach( + (fieldName, fieldType) -> + defineFieldName(fieldName.replace(nestedFieldName, alias), fieldType)); + } + + /** + * Check if index name is NOT nested, for example. return true for index 'accounts' or + * '.opensearch_dashboards' but return false for nested field name 'a.projects'. + */ + private boolean isNotNested(String indexName) { + return indexName.indexOf('.', 1) == -1; // taking care of .opensearch_dashboards + } + + private Set getFieldMappings(String indexName) { + IndexMappings indexMappings = clusterState.getFieldMappings(new String[] {indexName}); + Set fieldMappingsSet = new HashSet<>(indexMappings.allMappings()); + + for (FieldMappings fieldMappings : fieldMappingsSet) { + int size = fieldMappings.data().size(); + if (size > threshold) { + throw new EarlyExitAnalysisException( + StringUtils.format( + "Index [%s] has [%d] fields more than threshold [%d]", indexName, size, threshold)); + } } - - private Set getFieldMappings(String indexName) { - IndexMappings indexMappings = clusterState.getFieldMappings(new String[]{indexName}); - Set fieldMappingsSet = new HashSet<>(indexMappings.allMappings()); - - for (FieldMappings fieldMappings : fieldMappingsSet) { - int size = fieldMappings.data().size(); - if (size > threshold) { - throw new EarlyExitAnalysisException(StringUtils.format( - "Index [%s] has [%d] fields more than threshold [%d]", indexName, size, threshold)); - } - } - return fieldMappingsSet; + return fieldMappingsSet; + } + + private void defineFieldName(String fieldName, String type) { + if ("NESTED".equalsIgnoreCase(type)) { + defineFieldName(fieldName, new OpenSearchIndex(fieldName, NESTED_FIELD)); + } else { + defineFieldName(fieldName, OpenSearchDataType.typeOf(type)); } + } - private void defineFieldName(String fieldName, String type) { - if ("NESTED".equalsIgnoreCase(type)) { - defineFieldName(fieldName, new OpenSearchIndex(fieldName, NESTED_FIELD)); - } else { - defineFieldName(fieldName, OpenSearchDataType.typeOf(type)); - } - } + private void defineFieldName(String fieldName, Type type) { + Symbol symbol = new Symbol(Namespace.FIELD_NAME, fieldName); + environment().define(symbol, type); + } - private void defineFieldName(String fieldName, Type type) { - Symbol symbol = new Symbol(Namespace.FIELD_NAME, fieldName); - environment().define(symbol, type); - } - - private Environment environment() { - return context.peek(); - } + private Environment environment() { + return context.peek(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/visitor/SemanticAnalyzer.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/visitor/SemanticAnalyzer.java index 32bad91737..0655062be3 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/visitor/SemanticAnalyzer.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/visitor/SemanticAnalyzer.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.visitor; import static org.opensearch.sql.legacy.utils.StringUtils.unquoteFullColumn; @@ -13,125 +12,123 @@ import org.opensearch.sql.legacy.antlr.semantic.types.Type; import org.opensearch.sql.legacy.antlr.visitor.GenericSqlParseTreeVisitor; -/** - * Main visitor implementation to drive the entire semantic analysis. - */ +/** Main visitor implementation to drive the entire semantic analysis. */ public class SemanticAnalyzer implements GenericSqlParseTreeVisitor { - private final OpenSearchMappingLoader mappingLoader; - - private final TypeChecker typeChecker; - - public SemanticAnalyzer(OpenSearchMappingLoader mappingLoader, TypeChecker typeChecker) { - this.mappingLoader = mappingLoader; - this.typeChecker = typeChecker; - } - - @Override - public void visitRoot() { - mappingLoader.visitRoot(); - typeChecker.visitRoot(); - } - - @Override - public void visitQuery() { - mappingLoader.visitQuery(); - typeChecker.visitQuery(); - } - - @Override - public void endVisitQuery() { - mappingLoader.endVisitQuery(); - typeChecker.endVisitQuery(); - } - - @Override - public Type visitSelect(List itemTypes) { - mappingLoader.visitSelect(itemTypes); - return typeChecker.visitSelect(itemTypes); - } - - @Override - public Type visitSelectAllColumn() { - mappingLoader.visitSelectAllColumn(); - return typeChecker.visitSelectAllColumn(); - } - - @Override - public void visitAs(String alias, Type type) { - mappingLoader.visitAs(unquoteSingleField(alias), type); - typeChecker.visitAs(unquoteSingleField(alias), type); - } - - @Override - public Type visitIndexName(String indexName) { - mappingLoader.visitIndexName(unquoteSingleField(indexName)); - return typeChecker.visitIndexName(unquoteSingleField(indexName)); - } - - @Override - public Type visitFieldName(String fieldName) { - mappingLoader.visitFieldName(unquoteFullColumn(fieldName)); - return typeChecker.visitFieldName(unquoteFullColumn(fieldName)); - } - - @Override - public Type visitFunctionName(String funcName) { - mappingLoader.visitFunctionName(funcName); - return typeChecker.visitFunctionName(funcName); - } - - @Override - public Type visitOperator(String opName) { - mappingLoader.visitOperator(opName); - return typeChecker.visitOperator(opName); - } - - @Override - public Type visitString(String text) { - mappingLoader.visitString(text); - return typeChecker.visitString(text); - } - - @Override - public Type visitInteger(String text) { - mappingLoader.visitInteger(text); - return typeChecker.visitInteger(text); - } - - @Override - public Type visitFloat(String text) { - mappingLoader.visitFloat(text); - return typeChecker.visitFloat(text); - } - - @Override - public Type visitBoolean(String text) { - mappingLoader.visitBoolean(text); - return typeChecker.visitBoolean(text); - } - - @Override - public Type visitDate(String text) { - mappingLoader.visitDate(text); - return typeChecker.visitDate(text); - } - - @Override - public Type visitNull() { - mappingLoader.visitNull(); - return typeChecker.visitNull(); - } - - @Override - public Type visitConvertedType(String text) { - mappingLoader.visitConvertedType(text); - return typeChecker.visitConvertedType(text); - } - - @Override - public Type defaultValue() { - mappingLoader.defaultValue(); - return typeChecker.defaultValue(); - } + private final OpenSearchMappingLoader mappingLoader; + + private final TypeChecker typeChecker; + + public SemanticAnalyzer(OpenSearchMappingLoader mappingLoader, TypeChecker typeChecker) { + this.mappingLoader = mappingLoader; + this.typeChecker = typeChecker; + } + + @Override + public void visitRoot() { + mappingLoader.visitRoot(); + typeChecker.visitRoot(); + } + + @Override + public void visitQuery() { + mappingLoader.visitQuery(); + typeChecker.visitQuery(); + } + + @Override + public void endVisitQuery() { + mappingLoader.endVisitQuery(); + typeChecker.endVisitQuery(); + } + + @Override + public Type visitSelect(List itemTypes) { + mappingLoader.visitSelect(itemTypes); + return typeChecker.visitSelect(itemTypes); + } + + @Override + public Type visitSelectAllColumn() { + mappingLoader.visitSelectAllColumn(); + return typeChecker.visitSelectAllColumn(); + } + + @Override + public void visitAs(String alias, Type type) { + mappingLoader.visitAs(unquoteSingleField(alias), type); + typeChecker.visitAs(unquoteSingleField(alias), type); + } + + @Override + public Type visitIndexName(String indexName) { + mappingLoader.visitIndexName(unquoteSingleField(indexName)); + return typeChecker.visitIndexName(unquoteSingleField(indexName)); + } + + @Override + public Type visitFieldName(String fieldName) { + mappingLoader.visitFieldName(unquoteFullColumn(fieldName)); + return typeChecker.visitFieldName(unquoteFullColumn(fieldName)); + } + + @Override + public Type visitFunctionName(String funcName) { + mappingLoader.visitFunctionName(funcName); + return typeChecker.visitFunctionName(funcName); + } + + @Override + public Type visitOperator(String opName) { + mappingLoader.visitOperator(opName); + return typeChecker.visitOperator(opName); + } + + @Override + public Type visitString(String text) { + mappingLoader.visitString(text); + return typeChecker.visitString(text); + } + + @Override + public Type visitInteger(String text) { + mappingLoader.visitInteger(text); + return typeChecker.visitInteger(text); + } + + @Override + public Type visitFloat(String text) { + mappingLoader.visitFloat(text); + return typeChecker.visitFloat(text); + } + + @Override + public Type visitBoolean(String text) { + mappingLoader.visitBoolean(text); + return typeChecker.visitBoolean(text); + } + + @Override + public Type visitDate(String text) { + mappingLoader.visitDate(text); + return typeChecker.visitDate(text); + } + + @Override + public Type visitNull() { + mappingLoader.visitNull(); + return typeChecker.visitNull(); + } + + @Override + public Type visitConvertedType(String text) { + mappingLoader.visitConvertedType(text); + return typeChecker.visitConvertedType(text); + } + + @Override + public Type defaultValue() { + mappingLoader.defaultValue(); + return typeChecker.defaultValue(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/visitor/TypeChecker.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/visitor/TypeChecker.java index 59c0036575..19119c776c 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/visitor/TypeChecker.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/semantic/visitor/TypeChecker.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.visitor; import static org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType.UNKNOWN; @@ -31,198 +30,196 @@ import org.opensearch.sql.legacy.antlr.visitor.GenericSqlParseTreeVisitor; import org.opensearch.sql.legacy.utils.StringUtils; -/** - * SQL semantic analyzer that determines if a syntactical correct query is meaningful. - */ +/** SQL semantic analyzer that determines if a syntactical correct query is meaningful. */ public class TypeChecker implements GenericSqlParseTreeVisitor { - private static final Type NULL_TYPE = new Type() { + private static final Type NULL_TYPE = + new Type() { @Override public String getName() { - return "NULL"; + return "NULL"; } @Override public boolean isCompatible(Type other) { - throw new IllegalStateException("Compatibility check on NULL type with " + other); + throw new IllegalStateException("Compatibility check on NULL type with " + other); } @Override public Type construct(List others) { - throw new IllegalStateException("Construct operation on NULL type with " + others); + throw new IllegalStateException("Construct operation on NULL type with " + others); } @Override public String usage() { - throw new IllegalStateException("Usage print operation on NULL type"); - } - }; - - /** Semantic context for symbol scope management */ - private final SemanticContext context; - - /** Should suggestion provided. Disabled by default for security concern. */ - private final boolean isSuggestEnabled; - - public TypeChecker(SemanticContext context) { - this.context = context; - this.isSuggestEnabled = false; - } - - public TypeChecker(SemanticContext context, boolean isSuggestEnabled) { - this.context = context; - this.isSuggestEnabled = isSuggestEnabled; - } - - @Override - public void visitRoot() { - defineFunctionNames(ScalarFunction.values()); - defineFunctionNames(OpenSearchScalarFunction.values()); - defineFunctionNames(AggregateFunction.values()); - defineOperatorNames(ComparisonOperator.values()); - defineOperatorNames(SetOperator.values()); - defineOperatorNames(JoinOperator.values()); - } - - @Override - public void visitQuery() { - context.push(); - } - - @Override - public void endVisitQuery() { - context.pop(); - } - - @Override - public Type visitSelect(List itemTypes) { - if (itemTypes.size() == 1) { - return itemTypes.get(0); - } else if (itemTypes.size() == 0) { - return visitSelectAllColumn(); - } - // Return product for empty (SELECT *) and #items > 1 - return new Product(itemTypes); - } - - @Override - public Type visitSelectAllColumn() { - return resolveAllColumn(); - } - - @Override - public void visitAs(String alias, Type type) { - defineFieldName(alias, type); - } - - @Override - public Type visitIndexName(String indexName) { - return resolve(new Symbol(Namespace.FIELD_NAME, indexName)); - } - - @Override - public Type visitFieldName(String fieldName) { - // Bypass hidden fields which is not present in mapping, ex. _id, _type. - if (fieldName.startsWith("_")) { - return UNKNOWN; - } - // Ignore case for function/operator though field name is case sensitive - return resolve(new Symbol(Namespace.FIELD_NAME, fieldName)); - } - - @Override - public Type visitFunctionName(String funcName) { - return resolve(new Symbol(Namespace.FUNCTION_NAME, StringUtils.toUpper(funcName))); - } - - @Override - public Type visitOperator(String opName) { - return resolve(new Symbol(Namespace.OPERATOR_NAME, StringUtils.toUpper(opName))); - } - - @Override - public Type visitString(String text) { - return OpenSearchDataType.STRING; - } - - @Override - public Type visitInteger(String text) { - return OpenSearchDataType.INTEGER; - } - - @Override - public Type visitFloat(String text) { - return OpenSearchDataType.FLOAT; - } - - @Override - public Type visitBoolean(String text) { - // "IS [NOT] MISSING" can be used on any data type - return "MISSING".equalsIgnoreCase(text) ? UNKNOWN : OpenSearchDataType.BOOLEAN; - } - - @Override - public Type visitDate(String text) { - return OpenSearchDataType.DATE; - } - - @Override - public Type visitNull() { - return UNKNOWN; - } - - @Override - public Type visitConvertedType(String text) { - return OpenSearchDataType.typeOf(text); - } - - @Override - public Type defaultValue() { - return NULL_TYPE; - } - - private void defineFieldName(String fieldName, Type type) { - Symbol symbol = new Symbol(Namespace.FIELD_NAME, fieldName); - if (!environment().resolve(symbol).isPresent()) { - environment().define(symbol, type); + throw new IllegalStateException("Usage print operation on NULL type"); } - } - - private void defineFunctionNames(TypeExpression[] expressions) { - for (TypeExpression expr : expressions) { - environment().define(new Symbol(Namespace.FUNCTION_NAME, expr.getName()), expr); - } - } - - private void defineOperatorNames(Type[] expressions) { - for (Type expr : expressions) { - environment().define(new Symbol(Namespace.OPERATOR_NAME, expr.getName()), expr); - } - } - - private Type resolve(Symbol symbol) { - Optional type = environment().resolve(symbol); - if (type.isPresent()) { - return type.get(); - } - - String errorMsg = StringUtils.format("%s cannot be found or used here.", symbol); - - if (isSuggestEnabled || symbol.getNamespace() != Namespace.FIELD_NAME) { - Set allSymbolsInScope = environment().resolveAll(symbol.getNamespace()).keySet(); - String suggestedWord = new SimilarSymbols(allSymbolsInScope).mostSimilarTo(symbol.getName()); - errorMsg += StringUtils.format(" Did you mean [%s]?", suggestedWord); - } - throw new SemanticAnalysisException(errorMsg); - } - - private Type resolveAllColumn() { - environment().resolveAll(Namespace.FIELD_NAME); - return new Product(ImmutableList.of()); - } - - private Environment environment() { - return context.peek(); - } - + }; + + /** Semantic context for symbol scope management */ + private final SemanticContext context; + + /** Should suggestion provided. Disabled by default for security concern. */ + private final boolean isSuggestEnabled; + + public TypeChecker(SemanticContext context) { + this.context = context; + this.isSuggestEnabled = false; + } + + public TypeChecker(SemanticContext context, boolean isSuggestEnabled) { + this.context = context; + this.isSuggestEnabled = isSuggestEnabled; + } + + @Override + public void visitRoot() { + defineFunctionNames(ScalarFunction.values()); + defineFunctionNames(OpenSearchScalarFunction.values()); + defineFunctionNames(AggregateFunction.values()); + defineOperatorNames(ComparisonOperator.values()); + defineOperatorNames(SetOperator.values()); + defineOperatorNames(JoinOperator.values()); + } + + @Override + public void visitQuery() { + context.push(); + } + + @Override + public void endVisitQuery() { + context.pop(); + } + + @Override + public Type visitSelect(List itemTypes) { + if (itemTypes.size() == 1) { + return itemTypes.get(0); + } else if (itemTypes.size() == 0) { + return visitSelectAllColumn(); + } + // Return product for empty (SELECT *) and #items > 1 + return new Product(itemTypes); + } + + @Override + public Type visitSelectAllColumn() { + return resolveAllColumn(); + } + + @Override + public void visitAs(String alias, Type type) { + defineFieldName(alias, type); + } + + @Override + public Type visitIndexName(String indexName) { + return resolve(new Symbol(Namespace.FIELD_NAME, indexName)); + } + + @Override + public Type visitFieldName(String fieldName) { + // Bypass hidden fields which is not present in mapping, ex. _id, _type. + if (fieldName.startsWith("_")) { + return UNKNOWN; + } + // Ignore case for function/operator though field name is case sensitive + return resolve(new Symbol(Namespace.FIELD_NAME, fieldName)); + } + + @Override + public Type visitFunctionName(String funcName) { + return resolve(new Symbol(Namespace.FUNCTION_NAME, StringUtils.toUpper(funcName))); + } + + @Override + public Type visitOperator(String opName) { + return resolve(new Symbol(Namespace.OPERATOR_NAME, StringUtils.toUpper(opName))); + } + + @Override + public Type visitString(String text) { + return OpenSearchDataType.STRING; + } + + @Override + public Type visitInteger(String text) { + return OpenSearchDataType.INTEGER; + } + + @Override + public Type visitFloat(String text) { + return OpenSearchDataType.FLOAT; + } + + @Override + public Type visitBoolean(String text) { + // "IS [NOT] MISSING" can be used on any data type + return "MISSING".equalsIgnoreCase(text) ? UNKNOWN : OpenSearchDataType.BOOLEAN; + } + + @Override + public Type visitDate(String text) { + return OpenSearchDataType.DATE; + } + + @Override + public Type visitNull() { + return UNKNOWN; + } + + @Override + public Type visitConvertedType(String text) { + return OpenSearchDataType.typeOf(text); + } + + @Override + public Type defaultValue() { + return NULL_TYPE; + } + + private void defineFieldName(String fieldName, Type type) { + Symbol symbol = new Symbol(Namespace.FIELD_NAME, fieldName); + if (!environment().resolve(symbol).isPresent()) { + environment().define(symbol, type); + } + } + + private void defineFunctionNames(TypeExpression[] expressions) { + for (TypeExpression expr : expressions) { + environment().define(new Symbol(Namespace.FUNCTION_NAME, expr.getName()), expr); + } + } + + private void defineOperatorNames(Type[] expressions) { + for (Type expr : expressions) { + environment().define(new Symbol(Namespace.OPERATOR_NAME, expr.getName()), expr); + } + } + + private Type resolve(Symbol symbol) { + Optional type = environment().resolve(symbol); + if (type.isPresent()) { + return type.get(); + } + + String errorMsg = StringUtils.format("%s cannot be found or used here.", symbol); + + if (isSuggestEnabled || symbol.getNamespace() != Namespace.FIELD_NAME) { + Set allSymbolsInScope = environment().resolveAll(symbol.getNamespace()).keySet(); + String suggestedWord = new SimilarSymbols(allSymbolsInScope).mostSimilarTo(symbol.getName()); + errorMsg += StringUtils.format(" Did you mean [%s]?", suggestedWord); + } + throw new SemanticAnalysisException(errorMsg); + } + + private Type resolveAllColumn() { + environment().resolveAll(Namespace.FIELD_NAME); + return new Product(ImmutableList.of()); + } + + private Environment environment() { + return context.peek(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/syntax/CaseInsensitiveCharStream.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/syntax/CaseInsensitiveCharStream.java index de7e60e9f3..c7cb212826 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/syntax/CaseInsensitiveCharStream.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/syntax/CaseInsensitiveCharStream.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.syntax; import org.antlr.v4.runtime.CharStream; @@ -11,63 +10,64 @@ import org.antlr.v4.runtime.misc.Interval; /** - * Custom stream to convert character to upper case for case insensitive grammar before sending to lexer. + * Custom stream to convert character to upper case for case insensitive grammar before sending to + * lexer. */ public class CaseInsensitiveCharStream implements CharStream { - /** Character stream */ - private final CharStream charStream; + /** Character stream */ + private final CharStream charStream; - public CaseInsensitiveCharStream(String sql) { - this.charStream = CharStreams.fromString(sql); - } + public CaseInsensitiveCharStream(String sql) { + this.charStream = CharStreams.fromString(sql); + } - @Override - public String getText(Interval interval) { - return charStream.getText(interval); - } + @Override + public String getText(Interval interval) { + return charStream.getText(interval); + } - @Override - public void consume() { - charStream.consume(); - } + @Override + public void consume() { + charStream.consume(); + } - @Override - public int LA(int i) { - int c = charStream.LA(i); - if (c <= 0) { - return c; - } - return Character.toUpperCase(c); + @Override + public int LA(int i) { + int c = charStream.LA(i); + if (c <= 0) { + return c; } + return Character.toUpperCase(c); + } - @Override - public int mark() { - return charStream.mark(); - } + @Override + public int mark() { + return charStream.mark(); + } - @Override - public void release(int marker) { - charStream.release(marker); - } + @Override + public void release(int marker) { + charStream.release(marker); + } - @Override - public int index() { - return charStream.index(); - } + @Override + public int index() { + return charStream.index(); + } - @Override - public void seek(int index) { - charStream.seek(index); - } + @Override + public void seek(int index) { + charStream.seek(index); + } - @Override - public int size() { - return charStream.size(); - } + @Override + public int size() { + return charStream.size(); + } - @Override - public String getSourceName() { - return charStream.getSourceName(); - } + @Override + public String getSourceName() { + return charStream.getSourceName(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/syntax/SyntaxAnalysisErrorListener.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/syntax/SyntaxAnalysisErrorListener.java index 185f2696b7..5f0c7e38d1 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/syntax/SyntaxAnalysisErrorListener.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/syntax/SyntaxAnalysisErrorListener.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.syntax; import org.antlr.v4.runtime.BaseErrorListener; @@ -15,50 +14,53 @@ import org.opensearch.sql.legacy.utils.StringUtils; /** - * Syntax analysis error listener that handles any syntax error by throwing exception with useful information. + * Syntax analysis error listener that handles any syntax error by throwing exception with useful + * information. */ public class SyntaxAnalysisErrorListener extends BaseErrorListener { - @Override - public void syntaxError(Recognizer recognizer, Object offendingSymbol, - int line, int charPositionInLine, String msg, - RecognitionException e) { + @Override + public void syntaxError( + Recognizer recognizer, + Object offendingSymbol, + int line, + int charPositionInLine, + String msg, + RecognitionException e) { - CommonTokenStream tokens = (CommonTokenStream) recognizer.getInputStream(); - Token offendingToken = (Token) offendingSymbol; - String query = tokens.getText(); + CommonTokenStream tokens = (CommonTokenStream) recognizer.getInputStream(); + Token offendingToken = (Token) offendingSymbol; + String query = tokens.getText(); - throw new SyntaxAnalysisException( - StringUtils.format( - "Failed to parse query due to offending symbol [%s] at: '%s' <--- HERE... More details: %s", - getOffendingText(offendingToken), - truncateQueryAtOffendingToken(query, offendingToken), - getDetails(recognizer, msg, e) - ) - ); - } + throw new SyntaxAnalysisException( + StringUtils.format( + "Failed to parse query due to offending symbol [%s] at: '%s' <--- HERE... More details:" + + " %s", + getOffendingText(offendingToken), + truncateQueryAtOffendingToken(query, offendingToken), + getDetails(recognizer, msg, e))); + } - private String getOffendingText(Token offendingToken) { - return offendingToken.getText(); - } + private String getOffendingText(Token offendingToken) { + return offendingToken.getText(); + } - private String truncateQueryAtOffendingToken(String query, Token offendingToken) { - return query.substring(0, offendingToken.getStopIndex() + 1); - } + private String truncateQueryAtOffendingToken(String query, Token offendingToken) { + return query.substring(0, offendingToken.getStopIndex() + 1); + } - /** - * As official JavaDoc says, e=null means parser was able to recover from the error. - * In other words, "msg" argument includes the information we want. - */ - private String getDetails(Recognizer recognizer, String msg, RecognitionException e) { - String details; - if (e == null) { - details = msg; - } else { - IntervalSet followSet = e.getExpectedTokens(); - details = "Expecting tokens in " + followSet.toString(recognizer.getVocabulary()); - } - return details; + /** + * As official JavaDoc says, e=null means parser was able to recover from the error. In other + * words, "msg" argument includes the information we want. + */ + private String getDetails(Recognizer recognizer, String msg, RecognitionException e) { + String details; + if (e == null) { + details = msg; + } else { + IntervalSet followSet = e.getExpectedTokens(); + details = "Expecting tokens in " + followSet.toString(recognizer.getVocabulary()); } - + return details; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/syntax/SyntaxAnalysisException.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/syntax/SyntaxAnalysisException.java index f79de62229..dce5437a19 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/syntax/SyntaxAnalysisException.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/syntax/SyntaxAnalysisException.java @@ -3,17 +3,14 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.syntax; import org.opensearch.sql.legacy.antlr.SqlAnalysisException; -/** - * Exception for syntax analysis - */ +/** Exception for syntax analysis */ public class SyntaxAnalysisException extends SqlAnalysisException { - public SyntaxAnalysisException(String message) { - super(message); - } + public SyntaxAnalysisException(String message) { + super(message); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/AntlrSqlParseTreeVisitor.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/AntlrSqlParseTreeVisitor.java index 90a8274568..5a16ee3540 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/AntlrSqlParseTreeVisitor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/AntlrSqlParseTreeVisitor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.visitor; import static java.util.Collections.emptyList; @@ -55,357 +54,342 @@ import org.opensearch.sql.legacy.antlr.parser.OpenSearchLegacySqlParser.TableNamePatternContext; import org.opensearch.sql.legacy.antlr.parser.OpenSearchLegacySqlParserBaseVisitor; -/** - * ANTLR parse tree visitor to drive the analysis process. - */ -public class AntlrSqlParseTreeVisitor extends OpenSearchLegacySqlParserBaseVisitor { - - /** Generic visitor to perform the real action on parse tree */ - private final GenericSqlParseTreeVisitor visitor; - - public AntlrSqlParseTreeVisitor(GenericSqlParseTreeVisitor visitor) { - this.visitor = visitor; - } - - @Override - public T visitRoot(RootContext ctx) { - visitor.visitRoot(); - return super.visitRoot(ctx); - } - - @Override - public T visitUnionSelect(UnionSelectContext ctx) { - T union = visitor.visitOperator("UNION"); - return reduce(union, - asList( - ctx.querySpecification(), - ctx.unionStatement() - ) - ); - } - - @Override - public T visitMinusSelect(MinusSelectContext ctx) { - T minus = visitor.visitOperator("MINUS"); - return reduce(minus, asList(ctx.querySpecification(), ctx.minusStatement())); - } - - @Override - public T visitInPredicate(InPredicateContext ctx) { - T in = visitor.visitOperator("IN"); - PredicateContext field = ctx.predicate(); - ParserRuleContext subquery = (ctx.selectStatement() != null) ? ctx.selectStatement() : ctx.expressions(); - return reduce(in, Arrays.asList(field, subquery)); - } - - @Override - public T visitTableSources(TableSourcesContext ctx) { - if (ctx.tableSource().size() < 2) { - return super.visitTableSources(ctx); - } - T commaJoin = visitor.visitOperator("JOIN"); - return reduce(commaJoin, ctx.tableSource()); - } - - @Override - public T visitTableSourceBase(TableSourceBaseContext ctx) { - if (ctx.joinPart().isEmpty()) { - return super.visitTableSourceBase(ctx); - } - T join = visitor.visitOperator("JOIN"); - return reduce(join, asList(ctx.tableSourceItem(), ctx.joinPart())); - } - - @Override - public T visitInnerJoin(InnerJoinContext ctx) { - return visitJoin(ctx.children, ctx.tableSourceItem()); - } - - @Override - public T visitOuterJoin(OuterJoinContext ctx) { - return visitJoin(ctx.children, ctx.tableSourceItem()); - } - - /** - * Enforce visit order because ANTLR is generic and unaware. - * - * Visiting order is: - * FROM - * => WHERE - * => SELECT - * => GROUP BY - * => HAVING - * => ORDER BY - * => LIMIT - */ - @Override - public T visitQuerySpecification(QuerySpecificationContext ctx) { - visitor.visitQuery(); - - // Always visit FROM clause first to define symbols - FromClauseContext fromClause = ctx.fromClause(); - visit(fromClause.tableSources()); - - if (fromClause.whereExpr != null) { - visit(fromClause.whereExpr); - } - - // Note visit GROUP BY and HAVING later than SELECT for alias definition - T result = visitSelectElements(ctx.selectElements()); - fromClause.groupByItem().forEach(this::visit); - if (fromClause.havingExpr != null) { - visit(fromClause.havingExpr); - } - - if (ctx.orderByClause() != null) { - visitOrderByClause(ctx.orderByClause()); - } - if (ctx.limitClause() != null) { - visitLimitClause(ctx.limitClause()); - } - - visitor.endVisitQuery(); - return result; - } - - @Override - public T visitSubqueryTableItem(SubqueryTableItemContext ctx) { - throw new EarlyExitAnalysisException("Exit when meeting subquery in from"); - } - - /** Visit here instead of tableName because we need alias */ - @Override - public T visitAtomTableItem(AtomTableItemContext ctx) { - String alias = (ctx.alias == null) ? "" : ctx.alias.getText(); - T result = visit(ctx.tableName()); - visitor.visitAs(alias, result); - return result; - } - - @Override - public T visitSimpleTableName(SimpleTableNameContext ctx) { - return visitor.visitIndexName(ctx.getText()); - } - - @Override - public T visitTableNamePattern(TableNamePatternContext ctx) { - return visitor.visitIndexName(ctx.getText()); - } - - @Override - public T visitTableAndTypeName(TableAndTypeNameContext ctx) { - return visitor.visitIndexName(ctx.uid(0).getText()); - } - - @Override - public T visitFullColumnName(FullColumnNameContext ctx) { - return visitor.visitFieldName(ctx.getText()); - } - - @Override - public T visitUdfFunctionCall(UdfFunctionCallContext ctx) { - String funcName = ctx.fullId().getText(); - T func = visitor.visitFunctionName(funcName); - return reduce(func, ctx.functionArgs()); - } - - @Override - public T visitScalarFunctionCall(ScalarFunctionCallContext ctx) { - UnsupportedSemanticVerifier.verify(ctx); - T func = visit(ctx.scalarFunctionName()); - return reduce(func, ctx.functionArgs()); - } - - @Override - public T visitMathOperator(MathOperatorContext ctx) { - UnsupportedSemanticVerifier.verify(ctx); - return super.visitMathOperator(ctx); - } - - @Override - public T visitRegexpPredicate(RegexpPredicateContext ctx) { - UnsupportedSemanticVerifier.verify(ctx); - return super.visitRegexpPredicate(ctx); - } - - @Override - public T visitSelectElements(SelectElementsContext ctx) { - return visitor.visitSelect(ctx.selectElement(). - stream(). - map(this::visit). - collect(Collectors.toList())); - } - - @Override - public T visitSelectStarElement(OpenSearchLegacySqlParser.SelectStarElementContext ctx) { - return visitor.visitSelectAllColumn(); - } - - @Override - public T visitSelectColumnElement(SelectColumnElementContext ctx) { - return visitSelectItem(ctx.fullColumnName(), ctx.uid()); - } - - @Override - public T visitSelectFunctionElement(SelectFunctionElementContext ctx) { - return visitSelectItem(ctx.functionCall(), ctx.uid()); - } - - @Override - public T visitSelectExpressionElement(SelectExpressionElementContext ctx) { - return visitSelectItem(ctx.expression(), ctx.uid()); - } - - @Override - public T visitAggregateWindowedFunction(AggregateWindowedFunctionContext ctx) { - String funcName = ctx.getChild(0).getText(); - T func = visitor.visitFunctionName(funcName); - return reduce(func, ctx.functionArg()); - } - - @Override - public T visitFunctionNameBase(FunctionNameBaseContext ctx) { - return visitor.visitFunctionName(ctx.getText()); - } - - @Override - public T visitBinaryComparisonPredicate(BinaryComparisonPredicateContext ctx) { - if (isNamedArgument(ctx)) { // Essentially named argument is assign instead of comparison - return defaultResult(); - } - - T op = visit(ctx.comparisonOperator()); - return reduce(op, Arrays.asList(ctx.left, ctx.right)); - } - - @Override - public T visitIsExpression(IsExpressionContext ctx) { - T op = visitor.visitOperator("IS"); - return op.reduce(Arrays.asList( - visit(ctx.predicate()), - visitor.visitBoolean(ctx.testValue.getText())) - ); - } - - @Override - public T visitConvertedDataType(OpenSearchLegacySqlParser.ConvertedDataTypeContext ctx) { - if (ctx.getChild(0) != null && !Strings.isNullOrEmpty(ctx.getChild(0).getText())) { - return visitor.visitConvertedType(ctx.getChild(0).getText()); - } else { - return super.visitConvertedDataType(ctx); - } - } - - @Override - public T visitComparisonOperator(ComparisonOperatorContext ctx) { - return visitor.visitOperator(ctx.getText()); - } - - @Override - public T visitConstant(ConstantContext ctx) { - if (ctx.REAL_LITERAL() != null) { - return visitor.visitFloat(ctx.getText()); - } - if (ctx.dateType != null) { - return visitor.visitDate(ctx.getText()); - } - if (ctx.nullLiteral != null) { - return visitor.visitNull(); - } - return super.visitConstant(ctx); - } - - @Override - public T visitStringLiteral(StringLiteralContext ctx) { - return visitor.visitString(ctx.getText()); - } - - @Override - public T visitDecimalLiteral(DecimalLiteralContext ctx) { - return visitor.visitInteger(ctx.getText()); - } - - @Override - public T visitBooleanLiteral(BooleanLiteralContext ctx) { - return visitor.visitBoolean(ctx.getText()); - } - - @Override - protected T defaultResult() { - return visitor.defaultValue(); - } - - @Override - protected T aggregateResult(T aggregate, T nextResult) { - if (nextResult != defaultResult()) { // Simply return non-default value for now - return nextResult; - } - return aggregate; - } - - /** - * Named argument, ex. TOPHITS('size'=3), is under FunctionArgs -> Predicate - * And the function name should be contained in openSearchFunctionNameBase - */ - private boolean isNamedArgument(BinaryComparisonPredicateContext ctx) { - if (ctx.getParent() != null && ctx.getParent().getParent() != null - && ctx.getParent().getParent().getParent() != null - && ctx.getParent().getParent().getParent() instanceof ScalarFunctionCallContext) { - - ScalarFunctionCallContext parent = (ScalarFunctionCallContext) ctx.getParent().getParent().getParent(); - return parent.scalarFunctionName().functionNameBase().openSearchFunctionNameBase() != null; - } - return false; - } - - /** Enforce visiting result of table instead of ON clause as result */ - private T visitJoin(List children, TableSourceItemContext tableCtx) { - T result = defaultResult(); - for (ParseTree child : children) { - if (child == tableCtx) { - result = visit(tableCtx); - } else { - visit(child); - } - } - return result; - } - - /** Visit select items for type check and alias definition */ - private T visitSelectItem(ParserRuleContext item, UidContext uid) { - T result = visit(item); - if (uid != null) { - visitor.visitAs(uid.getText(), result); - } - return result; - } - - private T reduce(T reducer, ParserRuleContext ctx) { - return reduce(reducer, (ctx == null) ? emptyList() : ctx.children); - } - - /** Make constructor apply arguments and return result type */ - private T reduce(T reducer, List nodes) { - List args; - if (nodes == null) { - args = emptyList(); - } else { - args = nodes.stream(). - map(this::visit). - filter(type -> type != defaultResult()). - collect(Collectors.toList()); - } - return reducer.reduce(args); - } - - /** Combine an item and a list of items to a single list */ - private - List asList(Node1 first, List rest) { - - List result = new ArrayList<>(singleton(first)); - result.addAll(rest); - return result; - } - +/** ANTLR parse tree visitor to drive the analysis process. */ +public class AntlrSqlParseTreeVisitor + extends OpenSearchLegacySqlParserBaseVisitor { + + /** Generic visitor to perform the real action on parse tree */ + private final GenericSqlParseTreeVisitor visitor; + + public AntlrSqlParseTreeVisitor(GenericSqlParseTreeVisitor visitor) { + this.visitor = visitor; + } + + @Override + public T visitRoot(RootContext ctx) { + visitor.visitRoot(); + return super.visitRoot(ctx); + } + + @Override + public T visitUnionSelect(UnionSelectContext ctx) { + T union = visitor.visitOperator("UNION"); + return reduce(union, asList(ctx.querySpecification(), ctx.unionStatement())); + } + + @Override + public T visitMinusSelect(MinusSelectContext ctx) { + T minus = visitor.visitOperator("MINUS"); + return reduce(minus, asList(ctx.querySpecification(), ctx.minusStatement())); + } + + @Override + public T visitInPredicate(InPredicateContext ctx) { + T in = visitor.visitOperator("IN"); + PredicateContext field = ctx.predicate(); + ParserRuleContext subquery = + (ctx.selectStatement() != null) ? ctx.selectStatement() : ctx.expressions(); + return reduce(in, Arrays.asList(field, subquery)); + } + + @Override + public T visitTableSources(TableSourcesContext ctx) { + if (ctx.tableSource().size() < 2) { + return super.visitTableSources(ctx); + } + T commaJoin = visitor.visitOperator("JOIN"); + return reduce(commaJoin, ctx.tableSource()); + } + + @Override + public T visitTableSourceBase(TableSourceBaseContext ctx) { + if (ctx.joinPart().isEmpty()) { + return super.visitTableSourceBase(ctx); + } + T join = visitor.visitOperator("JOIN"); + return reduce(join, asList(ctx.tableSourceItem(), ctx.joinPart())); + } + + @Override + public T visitInnerJoin(InnerJoinContext ctx) { + return visitJoin(ctx.children, ctx.tableSourceItem()); + } + + @Override + public T visitOuterJoin(OuterJoinContext ctx) { + return visitJoin(ctx.children, ctx.tableSourceItem()); + } + + /** + * Enforce visit order because ANTLR is generic and unaware. + * + *

Visiting order is: FROM => WHERE => SELECT => GROUP BY => HAVING => ORDER BY => LIMIT + */ + @Override + public T visitQuerySpecification(QuerySpecificationContext ctx) { + visitor.visitQuery(); + + // Always visit FROM clause first to define symbols + FromClauseContext fromClause = ctx.fromClause(); + visit(fromClause.tableSources()); + + if (fromClause.whereExpr != null) { + visit(fromClause.whereExpr); + } + + // Note visit GROUP BY and HAVING later than SELECT for alias definition + T result = visitSelectElements(ctx.selectElements()); + fromClause.groupByItem().forEach(this::visit); + if (fromClause.havingExpr != null) { + visit(fromClause.havingExpr); + } + + if (ctx.orderByClause() != null) { + visitOrderByClause(ctx.orderByClause()); + } + if (ctx.limitClause() != null) { + visitLimitClause(ctx.limitClause()); + } + + visitor.endVisitQuery(); + return result; + } + + @Override + public T visitSubqueryTableItem(SubqueryTableItemContext ctx) { + throw new EarlyExitAnalysisException("Exit when meeting subquery in from"); + } + + /** Visit here instead of tableName because we need alias */ + @Override + public T visitAtomTableItem(AtomTableItemContext ctx) { + String alias = (ctx.alias == null) ? "" : ctx.alias.getText(); + T result = visit(ctx.tableName()); + visitor.visitAs(alias, result); + return result; + } + + @Override + public T visitSimpleTableName(SimpleTableNameContext ctx) { + return visitor.visitIndexName(ctx.getText()); + } + + @Override + public T visitTableNamePattern(TableNamePatternContext ctx) { + return visitor.visitIndexName(ctx.getText()); + } + + @Override + public T visitTableAndTypeName(TableAndTypeNameContext ctx) { + return visitor.visitIndexName(ctx.uid(0).getText()); + } + + @Override + public T visitFullColumnName(FullColumnNameContext ctx) { + return visitor.visitFieldName(ctx.getText()); + } + + @Override + public T visitUdfFunctionCall(UdfFunctionCallContext ctx) { + String funcName = ctx.fullId().getText(); + T func = visitor.visitFunctionName(funcName); + return reduce(func, ctx.functionArgs()); + } + + @Override + public T visitScalarFunctionCall(ScalarFunctionCallContext ctx) { + UnsupportedSemanticVerifier.verify(ctx); + T func = visit(ctx.scalarFunctionName()); + return reduce(func, ctx.functionArgs()); + } + + @Override + public T visitMathOperator(MathOperatorContext ctx) { + UnsupportedSemanticVerifier.verify(ctx); + return super.visitMathOperator(ctx); + } + + @Override + public T visitRegexpPredicate(RegexpPredicateContext ctx) { + UnsupportedSemanticVerifier.verify(ctx); + return super.visitRegexpPredicate(ctx); + } + + @Override + public T visitSelectElements(SelectElementsContext ctx) { + return visitor.visitSelect( + ctx.selectElement().stream().map(this::visit).collect(Collectors.toList())); + } + + @Override + public T visitSelectStarElement(OpenSearchLegacySqlParser.SelectStarElementContext ctx) { + return visitor.visitSelectAllColumn(); + } + + @Override + public T visitSelectColumnElement(SelectColumnElementContext ctx) { + return visitSelectItem(ctx.fullColumnName(), ctx.uid()); + } + + @Override + public T visitSelectFunctionElement(SelectFunctionElementContext ctx) { + return visitSelectItem(ctx.functionCall(), ctx.uid()); + } + + @Override + public T visitSelectExpressionElement(SelectExpressionElementContext ctx) { + return visitSelectItem(ctx.expression(), ctx.uid()); + } + + @Override + public T visitAggregateWindowedFunction(AggregateWindowedFunctionContext ctx) { + String funcName = ctx.getChild(0).getText(); + T func = visitor.visitFunctionName(funcName); + return reduce(func, ctx.functionArg()); + } + + @Override + public T visitFunctionNameBase(FunctionNameBaseContext ctx) { + return visitor.visitFunctionName(ctx.getText()); + } + + @Override + public T visitBinaryComparisonPredicate(BinaryComparisonPredicateContext ctx) { + if (isNamedArgument(ctx)) { // Essentially named argument is assign instead of comparison + return defaultResult(); + } + + T op = visit(ctx.comparisonOperator()); + return reduce(op, Arrays.asList(ctx.left, ctx.right)); + } + + @Override + public T visitIsExpression(IsExpressionContext ctx) { + T op = visitor.visitOperator("IS"); + return op.reduce( + Arrays.asList(visit(ctx.predicate()), visitor.visitBoolean(ctx.testValue.getText()))); + } + + @Override + public T visitConvertedDataType(OpenSearchLegacySqlParser.ConvertedDataTypeContext ctx) { + if (ctx.getChild(0) != null && !Strings.isNullOrEmpty(ctx.getChild(0).getText())) { + return visitor.visitConvertedType(ctx.getChild(0).getText()); + } else { + return super.visitConvertedDataType(ctx); + } + } + + @Override + public T visitComparisonOperator(ComparisonOperatorContext ctx) { + return visitor.visitOperator(ctx.getText()); + } + + @Override + public T visitConstant(ConstantContext ctx) { + if (ctx.REAL_LITERAL() != null) { + return visitor.visitFloat(ctx.getText()); + } + if (ctx.dateType != null) { + return visitor.visitDate(ctx.getText()); + } + if (ctx.nullLiteral != null) { + return visitor.visitNull(); + } + return super.visitConstant(ctx); + } + + @Override + public T visitStringLiteral(StringLiteralContext ctx) { + return visitor.visitString(ctx.getText()); + } + + @Override + public T visitDecimalLiteral(DecimalLiteralContext ctx) { + return visitor.visitInteger(ctx.getText()); + } + + @Override + public T visitBooleanLiteral(BooleanLiteralContext ctx) { + return visitor.visitBoolean(ctx.getText()); + } + + @Override + protected T defaultResult() { + return visitor.defaultValue(); + } + + @Override + protected T aggregateResult(T aggregate, T nextResult) { + if (nextResult != defaultResult()) { // Simply return non-default value for now + return nextResult; + } + return aggregate; + } + + /** + * Named argument, ex. TOPHITS('size'=3), is under FunctionArgs -> Predicate And the function name + * should be contained in openSearchFunctionNameBase + */ + private boolean isNamedArgument(BinaryComparisonPredicateContext ctx) { + if (ctx.getParent() != null + && ctx.getParent().getParent() != null + && ctx.getParent().getParent().getParent() != null + && ctx.getParent().getParent().getParent() instanceof ScalarFunctionCallContext) { + + ScalarFunctionCallContext parent = + (ScalarFunctionCallContext) ctx.getParent().getParent().getParent(); + return parent.scalarFunctionName().functionNameBase().openSearchFunctionNameBase() != null; + } + return false; + } + + /** Enforce visiting result of table instead of ON clause as result */ + private T visitJoin(List children, TableSourceItemContext tableCtx) { + T result = defaultResult(); + for (ParseTree child : children) { + if (child == tableCtx) { + result = visit(tableCtx); + } else { + visit(child); + } + } + return result; + } + + /** Visit select items for type check and alias definition */ + private T visitSelectItem(ParserRuleContext item, UidContext uid) { + T result = visit(item); + if (uid != null) { + visitor.visitAs(uid.getText(), result); + } + return result; + } + + private T reduce(T reducer, ParserRuleContext ctx) { + return reduce(reducer, (ctx == null) ? emptyList() : ctx.children); + } + + /** Make constructor apply arguments and return result type */ + private T reduce(T reducer, List nodes) { + List args; + if (nodes == null) { + args = emptyList(); + } else { + args = + nodes.stream() + .map(this::visit) + .filter(type -> type != defaultResult()) + .collect(Collectors.toList()); + } + return reducer.reduce(args); + } + + /** Combine an item and a list of items to a single list */ + private List asList( + Node1 first, List rest) { + + List result = new ArrayList<>(singleton(first)); + result.addAll(rest); + return result; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/EarlyExitAnalysisException.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/EarlyExitAnalysisException.java index b0bd01a093..cf583aab40 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/EarlyExitAnalysisException.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/EarlyExitAnalysisException.java @@ -3,15 +3,12 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.visitor; -/** - * Exit visitor early due to some reason. - */ +/** Exit visitor early due to some reason. */ public class EarlyExitAnalysisException extends RuntimeException { - public EarlyExitAnalysisException(String message) { - super(message); - } + public EarlyExitAnalysisException(String message) { + super(message); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/GenericSqlParseTreeVisitor.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/GenericSqlParseTreeVisitor.java index 511f932a0f..bd78c1b03f 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/GenericSqlParseTreeVisitor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/GenericSqlParseTreeVisitor.java @@ -3,78 +3,74 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.visitor; import java.util.List; -/** - * Generic parse tree visitor without dependency on concrete parse tree class. - */ +/** Generic parse tree visitor without dependency on concrete parse tree class. */ public interface GenericSqlParseTreeVisitor { - default void visitRoot() {} - - default void visitQuery() {} + default void visitRoot() {} - default void endVisitQuery() {} + default void visitQuery() {} - default T visitSelect(List items) { - return defaultValue(); - } + default void endVisitQuery() {} - default T visitSelectAllColumn() { - return defaultValue(); - } + default T visitSelect(List items) { + return defaultValue(); + } - default void visitAs(String alias, T type) {} + default T visitSelectAllColumn() { + return defaultValue(); + } - default T visitIndexName(String indexName) { - return defaultValue(); - } + default void visitAs(String alias, T type) {} - default T visitFieldName(String fieldName) { - return defaultValue(); - } + default T visitIndexName(String indexName) { + return defaultValue(); + } - default T visitFunctionName(String funcName) { - return defaultValue(); - } + default T visitFieldName(String fieldName) { + return defaultValue(); + } - default T visitOperator(String opName) { - return defaultValue(); - } + default T visitFunctionName(String funcName) { + return defaultValue(); + } - default T visitString(String text) { - return defaultValue(); - } + default T visitOperator(String opName) { + return defaultValue(); + } - default T visitInteger(String text) { - return defaultValue(); - } + default T visitString(String text) { + return defaultValue(); + } - default T visitFloat(String text) { - return defaultValue(); - } + default T visitInteger(String text) { + return defaultValue(); + } - default T visitBoolean(String text) { - return defaultValue(); - } + default T visitFloat(String text) { + return defaultValue(); + } - default T visitDate(String text) { - return defaultValue(); - } + default T visitBoolean(String text) { + return defaultValue(); + } - default T visitNull() { - return defaultValue(); - } + default T visitDate(String text) { + return defaultValue(); + } - default T visitConvertedType(String text) { - return defaultValue(); - } + default T visitNull() { + return defaultValue(); + } - default T defaultValue() { - return null; - } + default T visitConvertedType(String text) { + return defaultValue(); + } + default T defaultValue() { + return null; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/Reducible.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/Reducible.java index 510a76659e..edb4136d49 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/Reducible.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/Reducible.java @@ -3,21 +3,18 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.visitor; import java.util.List; -/** - * Abstraction for anything that can be reduced and used by {@link AntlrSqlParseTreeVisitor}. - */ +/** Abstraction for anything that can be reduced and used by {@link AntlrSqlParseTreeVisitor}. */ public interface Reducible { - /** - * Reduce current and others to generate a new one - * @param others others - * @return reduction - */ - T reduce(List others); - + /** + * Reduce current and others to generate a new one + * + * @param others others + * @return reduction + */ + T reduce(List others); } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/UnsupportedSemanticVerifier.java b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/UnsupportedSemanticVerifier.java index dc37425a62..919af8e6e2 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/UnsupportedSemanticVerifier.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/antlr/visitor/UnsupportedSemanticVerifier.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.visitor; import com.google.common.collect.Sets; @@ -17,71 +16,68 @@ public class UnsupportedSemanticVerifier { - private static final Set mathConstants = Sets.newHashSet( - "e", "pi" - ); - - private static final Set supportedNestedFunctions = Sets.newHashSet( - "nested", "reverse_nested", "score", "match_query", "matchquery" - ); + private static final Set mathConstants = Sets.newHashSet("e", "pi"); - /** - * The following two sets include the functions and operators that have requested or issued by users - * but the plugin does not support yet. - */ - private static final Set unsupportedFunctions = Sets.newHashSet( - "adddate", "addtime", "datetime", "greatest", "least" - ); + private static final Set supportedNestedFunctions = + Sets.newHashSet("nested", "reverse_nested", "score", "match_query", "matchquery"); - private static final Set unsupportedOperators = Sets.newHashSet( - "div" - ); + /** + * The following two sets include the functions and operators that have requested or issued by + * users but the plugin does not support yet. + */ + private static final Set unsupportedFunctions = + Sets.newHashSet("adddate", "addtime", "datetime", "greatest", "least"); + private static final Set unsupportedOperators = Sets.newHashSet("div"); - /** - * The scalar function calls are separated into (a)typical function calls; (b)nested function calls with functions - * as arguments, like abs(log(...)); (c)aggregations with functions as aggregators, like max(abs(....)). - * Currently, we do not support nested functions or nested aggregations, aka (b) and (c). - * However, for the special EsFunctions included in the [supportedNestedFunctions] set, we have supported them in - * nested function calls and aggregations (b&c). Besides, the math constants included in the [mathConstants] set - * are regraded as scalar functions, but they are working well in the painless script. - * - * Thus, the types of functions to throw exceptions: - * (I)case (b) except that the arguments are from the [mathConstants] set; - * (II) case (b) except that the arguments are from the [supportedNestedFunctions] set; - * (III) case (c) except that the aggregators are from thet [supportedNestedFunctions] set. - */ - public static void verify(ScalarFunctionCallContext ctx) { - String funcName = StringUtils.toLower(ctx.scalarFunctionName().getText()); + /** + * The scalar function calls are separated into (a)typical function calls; (b)nested function + * calls with functions as arguments, like abs(log(...)); (c)aggregations with functions as + * aggregators, like max(abs(....)). Currently, we do not support nested functions or nested + * aggregations, aka (b) and (c). However, for the special EsFunctions included in the + * [supportedNestedFunctions] set, we have supported them in nested function calls and + * aggregations (b&c). Besides, the math constants included in the [mathConstants] set are + * regraded as scalar functions, but they are working well in the painless script. + * + *

Thus, the types of functions to throw exceptions: (I)case (b) except that the arguments are + * from the [mathConstants] set; (II) case (b) except that the arguments are from the + * [supportedNestedFunctions] set; (III) case (c) except that the aggregators are from thet + * [supportedNestedFunctions] set. + */ + public static void verify(ScalarFunctionCallContext ctx) { + String funcName = StringUtils.toLower(ctx.scalarFunctionName().getText()); - // type (III) - if (ctx.parent.parent instanceof OpenSearchLegacySqlParser.FunctionAsAggregatorFunctionContext - && !(supportedNestedFunctions.contains(StringUtils.toLower(funcName)))) { - throw new SqlFeatureNotImplementedException(StringUtils.format( - "Aggregation calls with function aggregator like [%s] are not supported yet", - ctx.parent.parent.getText())); + // type (III) + if (ctx.parent.parent instanceof OpenSearchLegacySqlParser.FunctionAsAggregatorFunctionContext + && !(supportedNestedFunctions.contains(StringUtils.toLower(funcName)))) { + throw new SqlFeatureNotImplementedException( + StringUtils.format( + "Aggregation calls with function aggregator like [%s] are not supported yet", + ctx.parent.parent.getText())); - // type (I) and (II) - } else if (ctx.parent.parent instanceof OpenSearchLegacySqlParser.NestedFunctionArgsContext - && !(mathConstants.contains(funcName) || supportedNestedFunctions.contains(funcName))) { - throw new SqlFeatureNotImplementedException(StringUtils.format( - "Nested function calls like [%s] are not supported yet", ctx.parent.parent.parent.getText())); + // type (I) and (II) + } else if (ctx.parent.parent instanceof OpenSearchLegacySqlParser.NestedFunctionArgsContext + && !(mathConstants.contains(funcName) || supportedNestedFunctions.contains(funcName))) { + throw new SqlFeatureNotImplementedException( + StringUtils.format( + "Nested function calls like [%s] are not supported yet", + ctx.parent.parent.parent.getText())); - // unsupported functions - } else if (unsupportedFunctions.contains(funcName)) { - throw new SqlFeatureNotImplementedException(StringUtils.format("Function [%s] is not supported yet", - funcName)); - } + // unsupported functions + } else if (unsupportedFunctions.contains(funcName)) { + throw new SqlFeatureNotImplementedException( + StringUtils.format("Function [%s] is not supported yet", funcName)); } + } - public static void verify(MathOperatorContext ctx) { - if (unsupportedOperators.contains(StringUtils.toLower(ctx.getText()))) { - throw new SqlFeatureNotImplementedException(StringUtils.format("Operator [%s] is not supported yet", - ctx.getText())); - } + public static void verify(MathOperatorContext ctx) { + if (unsupportedOperators.contains(StringUtils.toLower(ctx.getText()))) { + throw new SqlFeatureNotImplementedException( + StringUtils.format("Operator [%s] is not supported yet", ctx.getText())); } + } - public static void verify(RegexpPredicateContext ctx) { - throw new SqlFeatureNotImplementedException("Regexp predicate is not supported yet"); - } + public static void verify(RegexpPredicateContext ctx) { + throw new SqlFeatureNotImplementedException("Regexp predicate is not supported yet"); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/cursor/Cursor.java b/legacy/src/main/java/org/opensearch/sql/legacy/cursor/Cursor.java index d3985259dd..1a38016f9a 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/cursor/Cursor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/cursor/Cursor.java @@ -3,19 +3,17 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.cursor; - public interface Cursor { - NullCursor NULL_CURSOR = new NullCursor(); + NullCursor NULL_CURSOR = new NullCursor(); - /** - * All cursor's are of the form : - * The serialized form before encoding is upto Cursor implementation - */ - String generateCursorId(); + /** + * All cursor's are of the form : The serialized form before + * encoding is upto Cursor implementation + */ + String generateCursorId(); - CursorType getType(); + CursorType getType(); } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/cursor/CursorType.java b/legacy/src/main/java/org/opensearch/sql/legacy/cursor/CursorType.java index 7c96cb8835..0ac3a25809 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/cursor/CursorType.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/cursor/CursorType.java @@ -3,42 +3,41 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.cursor; import java.util.HashMap; import java.util.Map; /** - * Different types queries for which cursor is supported. - * The result execution, and cursor genreation/parsing will depend on the cursor type. - * NullCursor is the placeholder implementation in case of non-cursor query. + * Different types queries for which cursor is supported. The result execution, and cursor + * genreation/parsing will depend on the cursor type. NullCursor is the placeholder implementation + * in case of non-cursor query. */ public enum CursorType { - NULL(null), - DEFAULT("d"), - AGGREGATION("a"), - JOIN("j"); + NULL(null), + DEFAULT("d"), + AGGREGATION("a"), + JOIN("j"); - public String id; + public String id; - CursorType(String id) { - this.id = id; - } + CursorType(String id) { + this.id = id; + } - public String getId() { - return this.id; - } + public String getId() { + return this.id; + } - public static final Map LOOKUP = new HashMap<>(); + public static final Map LOOKUP = new HashMap<>(); - static { - for (CursorType type : CursorType.values()) { - LOOKUP.put(type.getId(), type); - } + static { + for (CursorType type : CursorType.values()) { + LOOKUP.put(type.getId(), type); } + } - public static CursorType getById(String id) { - return LOOKUP.getOrDefault(id, NULL); - } + public static CursorType getById(String id) { + return LOOKUP.getOrDefault(id, NULL); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/cursor/DefaultCursor.java b/legacy/src/main/java/org/opensearch/sql/legacy/cursor/DefaultCursor.java index 856c1e5e2b..72addd6032 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/cursor/DefaultCursor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/cursor/DefaultCursor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.cursor; import com.google.common.base.Strings; @@ -21,140 +20,144 @@ import org.json.JSONObject; import org.opensearch.sql.legacy.executor.format.Schema; - /** - * Minimum metdata that will be serialized for generating cursorId for - * SELECT .... FROM .. ORDER BY .... queries + * Minimum metdata that will be serialized for generating cursorId for SELECT .... FROM .. ORDER BY + * .... queries */ @Getter @Setter @NoArgsConstructor public class DefaultCursor implements Cursor { - /** Make sure all keys are unique to prevent overriding - * and as small as possible to make cursor compact - */ - private static final String FETCH_SIZE = "f"; - private static final String ROWS_LEFT = "l"; - private static final String INDEX_PATTERN = "i"; - private static final String SCROLL_ID = "s"; - private static final String SCHEMA_COLUMNS = "c"; - private static final String FIELD_ALIAS_MAP = "a"; - - /** To get mappings for index to check if type is date needed for - * @see org.opensearch.sql.legacy.executor.format.DateFieldFormatter */ - @NonNull - private String indexPattern; - - /** List of Schema.Column for maintaining field order and generating null values of missing fields */ - @NonNull - private List columns; - - /** To delegate to correct cursor handler to get next page*/ - private final CursorType type = CursorType.DEFAULT; - + /** + * Make sure all keys are unique to prevent overriding and as small as possible to make cursor + * compact + */ + private static final String FETCH_SIZE = "f"; + + private static final String ROWS_LEFT = "l"; + private static final String INDEX_PATTERN = "i"; + private static final String SCROLL_ID = "s"; + private static final String SCHEMA_COLUMNS = "c"; + private static final String FIELD_ALIAS_MAP = "a"; + + /** + * To get mappings for index to check if type is date needed for + * + * @see org.opensearch.sql.legacy.executor.format.DateFieldFormatter + */ + @NonNull private String indexPattern; + + /** + * List of Schema.Column for maintaining field order and generating null values of missing fields + */ + @NonNull private List columns; + + /** To delegate to correct cursor handler to get next page */ + private final CursorType type = CursorType.DEFAULT; + + /** + * Truncate the @see DataRows to respect LIMIT clause and/or to identify last page to close scroll + * context. docsLeft is decremented by fetch_size for call to get page of result. + */ + private long rowsLeft; + + /** + * @see org.opensearch.sql.legacy.executor.format.SelectResultSet + */ + @NonNull private Map fieldAliasMap; + + /** To get next batch of result */ + private String scrollId; + + /** To reduce the number of rows left by fetchSize */ + @NonNull private Integer fetchSize; + + private Integer limit; + + @Override + public CursorType getType() { + return type; + } + + @Override + public String generateCursorId() { + if (rowsLeft <= 0 || Strings.isNullOrEmpty(scrollId)) { + return null; + } + JSONObject json = new JSONObject(); + json.put(FETCH_SIZE, fetchSize); + json.put(ROWS_LEFT, rowsLeft); + json.put(INDEX_PATTERN, indexPattern); + json.put(SCROLL_ID, scrollId); + json.put(SCHEMA_COLUMNS, getSchemaAsJson()); + json.put(FIELD_ALIAS_MAP, fieldAliasMap); + return String.format("%s:%s", type.getId(), encodeCursor(json)); + } + + public static DefaultCursor from(String cursorId) { /** - * Truncate the @see DataRows to respect LIMIT clause and/or to identify last page to close scroll context. - * docsLeft is decremented by fetch_size for call to get page of result. + * It is assumed that cursorId here is the second part of the original cursor passed by the + * client after removing first part which identifies cursor type */ - private long rowsLeft; - - /** @see org.opensearch.sql.legacy.executor.format.SelectResultSet */ - @NonNull - private Map fieldAliasMap; - - /** To get next batch of result */ - private String scrollId; - - /** To reduce the number of rows left by fetchSize */ - @NonNull - private Integer fetchSize; - - private Integer limit; - - @Override - public CursorType getType() { - return type; - } - - @Override - public String generateCursorId() { - if (rowsLeft <=0 || Strings.isNullOrEmpty(scrollId)) { - return null; - } - JSONObject json = new JSONObject(); - json.put(FETCH_SIZE, fetchSize); - json.put(ROWS_LEFT, rowsLeft); - json.put(INDEX_PATTERN, indexPattern); - json.put(SCROLL_ID, scrollId); - json.put(SCHEMA_COLUMNS, getSchemaAsJson()); - json.put(FIELD_ALIAS_MAP, fieldAliasMap); - return String.format("%s:%s", type.getId(), encodeCursor(json)); - } - - public static DefaultCursor from(String cursorId) { - /** - * It is assumed that cursorId here is the second part of the original cursor passed - * by the client after removing first part which identifies cursor type - */ - JSONObject json = decodeCursor(cursorId); - DefaultCursor cursor = new DefaultCursor(); - cursor.setFetchSize(json.getInt(FETCH_SIZE)); - cursor.setRowsLeft(json.getLong(ROWS_LEFT)); - cursor.setIndexPattern(json.getString(INDEX_PATTERN)); - cursor.setScrollId(json.getString(SCROLL_ID)); - cursor.setColumns(getColumnsFromSchema(json.getJSONArray(SCHEMA_COLUMNS))); - cursor.setFieldAliasMap(fieldAliasMap(json.getJSONObject(FIELD_ALIAS_MAP))); - - return cursor; - } - - private JSONArray getSchemaAsJson() { - JSONArray schemaJson = new JSONArray(); - - for (Schema.Column column : columns) { - schemaJson.put(schemaEntry(column.getName(), column.getAlias(), column.getType())); - } - - return schemaJson; + JSONObject json = decodeCursor(cursorId); + DefaultCursor cursor = new DefaultCursor(); + cursor.setFetchSize(json.getInt(FETCH_SIZE)); + cursor.setRowsLeft(json.getLong(ROWS_LEFT)); + cursor.setIndexPattern(json.getString(INDEX_PATTERN)); + cursor.setScrollId(json.getString(SCROLL_ID)); + cursor.setColumns(getColumnsFromSchema(json.getJSONArray(SCHEMA_COLUMNS))); + cursor.setFieldAliasMap(fieldAliasMap(json.getJSONObject(FIELD_ALIAS_MAP))); + + return cursor; + } + + private JSONArray getSchemaAsJson() { + JSONArray schemaJson = new JSONArray(); + + for (Schema.Column column : columns) { + schemaJson.put(schemaEntry(column.getName(), column.getAlias(), column.getType())); } - private JSONObject schemaEntry(String name, String alias, String type) { - JSONObject entry = new JSONObject(); - entry.put("name", name); - if (alias != null) { - entry.put("alias", alias); - } - entry.put("type", type); - return entry; - } - - private static String encodeCursor(JSONObject cursorJson) { - return Base64.getEncoder().encodeToString(cursorJson.toString().getBytes()); - } - - private static JSONObject decodeCursor(String cursorId) { - return new JSONObject(new String(Base64.getDecoder().decode(cursorId))); - } - - private static Map fieldAliasMap(JSONObject json) { - Map fieldToAliasMap = new HashMap<>(); - json.keySet().forEach(key -> fieldToAliasMap.put(key, json.get(key).toString())); - return fieldToAliasMap; - } + return schemaJson; + } - private static List getColumnsFromSchema(JSONArray schema) { - List columns = IntStream. - range(0, schema.length()). - mapToObj(i -> { - JSONObject jsonColumn = schema.getJSONObject(i); - return new Schema.Column( - jsonColumn.getString("name"), - jsonColumn.optString("alias", null), - Schema.Type.valueOf(jsonColumn.getString("type").toUpperCase()) - ); - } - ).collect(Collectors.toList()); - return columns; + private JSONObject schemaEntry(String name, String alias, String type) { + JSONObject entry = new JSONObject(); + entry.put("name", name); + if (alias != null) { + entry.put("alias", alias); } + entry.put("type", type); + return entry; + } + + private static String encodeCursor(JSONObject cursorJson) { + return Base64.getEncoder().encodeToString(cursorJson.toString().getBytes()); + } + + private static JSONObject decodeCursor(String cursorId) { + return new JSONObject(new String(Base64.getDecoder().decode(cursorId))); + } + + private static Map fieldAliasMap(JSONObject json) { + Map fieldToAliasMap = new HashMap<>(); + json.keySet().forEach(key -> fieldToAliasMap.put(key, json.get(key).toString())); + return fieldToAliasMap; + } + + private static List getColumnsFromSchema(JSONArray schema) { + List columns = + IntStream.range(0, schema.length()) + .mapToObj( + i -> { + JSONObject jsonColumn = schema.getJSONObject(i); + return new Schema.Column( + jsonColumn.getString("name"), + jsonColumn.optString("alias", null), + Schema.Type.valueOf(jsonColumn.getString("type").toUpperCase())); + }) + .collect(Collectors.toList()); + return columns; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/cursor/NullCursor.java b/legacy/src/main/java/org/opensearch/sql/legacy/cursor/NullCursor.java index fb6beca96d..5b99f49515 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/cursor/NullCursor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/cursor/NullCursor.java @@ -3,27 +3,24 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.cursor; -/** - * A placeholder Cursor implementation to work with non-paginated queries. - */ +/** A placeholder Cursor implementation to work with non-paginated queries. */ public class NullCursor implements Cursor { - private final CursorType type = CursorType.NULL; + private final CursorType type = CursorType.NULL; - @Override - public String generateCursorId() { - return null; - } + @Override + public String generateCursorId() { + return null; + } - @Override - public CursorType getType() { - return type; - } + @Override + public CursorType getType() { + return type; + } - public NullCursor from(String cursorId) { - return NULL_CURSOR; - } + public NullCursor from(String cursorId) { + return NULL_CURSOR; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/ColumnTypeProvider.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/ColumnTypeProvider.java index 3b2691186b..b7d90b66da 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/ColumnTypeProvider.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/ColumnTypeProvider.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; import com.google.common.collect.ImmutableList; @@ -17,66 +16,64 @@ import org.opensearch.sql.legacy.antlr.semantic.types.special.Product; import org.opensearch.sql.legacy.executor.format.Schema; -/** - * The definition of column type provider - */ +/** The definition of column type provider */ public class ColumnTypeProvider { - private final List typeList; + private final List typeList; - private static final Map TYPE_MAP = - new ImmutableMap.Builder() - .put(OpenSearchDataType.SHORT, Schema.Type.SHORT) - .put(OpenSearchDataType.LONG, Schema.Type.LONG) - .put(OpenSearchDataType.INTEGER, Schema.Type.INTEGER) - .put(OpenSearchDataType.FLOAT, Schema.Type.FLOAT) - .put(OpenSearchDataType.DOUBLE, Schema.Type.DOUBLE) - .put(OpenSearchDataType.KEYWORD, Schema.Type.KEYWORD) - .put(OpenSearchDataType.TEXT, Schema.Type.TEXT) - .put(OpenSearchDataType.STRING, Schema.Type.TEXT) - .put(OpenSearchDataType.DATE, Schema.Type.DATE) - .put(OpenSearchDataType.BOOLEAN, Schema.Type.BOOLEAN) - .put(OpenSearchDataType.UNKNOWN, Schema.Type.DOUBLE) - .build(); - public static final Schema.Type COLUMN_DEFAULT_TYPE = Schema.Type.DOUBLE; + private static final Map TYPE_MAP = + new ImmutableMap.Builder() + .put(OpenSearchDataType.SHORT, Schema.Type.SHORT) + .put(OpenSearchDataType.LONG, Schema.Type.LONG) + .put(OpenSearchDataType.INTEGER, Schema.Type.INTEGER) + .put(OpenSearchDataType.FLOAT, Schema.Type.FLOAT) + .put(OpenSearchDataType.DOUBLE, Schema.Type.DOUBLE) + .put(OpenSearchDataType.KEYWORD, Schema.Type.KEYWORD) + .put(OpenSearchDataType.TEXT, Schema.Type.TEXT) + .put(OpenSearchDataType.STRING, Schema.Type.TEXT) + .put(OpenSearchDataType.DATE, Schema.Type.DATE) + .put(OpenSearchDataType.BOOLEAN, Schema.Type.BOOLEAN) + .put(OpenSearchDataType.UNKNOWN, Schema.Type.DOUBLE) + .build(); + public static final Schema.Type COLUMN_DEFAULT_TYPE = Schema.Type.DOUBLE; - public ColumnTypeProvider(Type type) { - this.typeList = convertOutputColumnType(type); - } + public ColumnTypeProvider(Type type) { + this.typeList = convertOutputColumnType(type); + } - public ColumnTypeProvider() { - this.typeList = new ArrayList<>(); - } + public ColumnTypeProvider() { + this.typeList = new ArrayList<>(); + } - /** - * Get the type of column by index. - * - * @param index column index. - * @return column type. - */ - public Schema.Type get(int index) { - if (typeList.isEmpty()) { - return COLUMN_DEFAULT_TYPE; - } else { - return typeList.get(index); - } + /** + * Get the type of column by index. + * + * @param index column index. + * @return column type. + */ + public Schema.Type get(int index) { + if (typeList.isEmpty()) { + return COLUMN_DEFAULT_TYPE; + } else { + return typeList.get(index); } + } - private List convertOutputColumnType(Type type) { - if (type instanceof Product) { - List types = ((Product) type).getTypes(); - return types.stream().map(t -> convertType(t)).collect(Collectors.toList()); - } else if (type instanceof OpenSearchDataType) { - return ImmutableList.of(convertType(type)); - } else { - return ImmutableList.of(COLUMN_DEFAULT_TYPE); - } + private List convertOutputColumnType(Type type) { + if (type instanceof Product) { + List types = ((Product) type).getTypes(); + return types.stream().map(t -> convertType(t)).collect(Collectors.toList()); + } else if (type instanceof OpenSearchDataType) { + return ImmutableList.of(convertType(type)); + } else { + return ImmutableList.of(COLUMN_DEFAULT_TYPE); } + } - private Schema.Type convertType(Type type) { - try { - return TYPE_MAP.getOrDefault(type, COLUMN_DEFAULT_TYPE); - } catch (Exception e) { - return COLUMN_DEFAULT_TYPE; - } + private Schema.Type convertType(Type type) { + try { + return TYPE_MAP.getOrDefault(type, COLUMN_DEFAULT_TYPE); + } catch (Exception e) { + return COLUMN_DEFAULT_TYPE; } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Condition.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Condition.java index ff6b016ddb..8c6efd0819 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Condition.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Condition.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; import com.alibaba.druid.sql.ast.SQLExpr; @@ -18,404 +17,422 @@ import org.opensearch.sql.legacy.utils.StringUtils; /** - * - * * @author ansj */ public class Condition extends Where { - public enum OPERATOR { - - EQ, - GT, - LT, - GTE, - LTE, - N, - LIKE, - NLIKE, - REGEXP, - IS, - ISN, - IN, - NIN, - BETWEEN, - NBETWEEN, - GEO_INTERSECTS, - GEO_BOUNDING_BOX, - GEO_DISTANCE, - GEO_POLYGON, - IN_TERMS, - TERM, - IDS_QUERY, - NESTED_COMPLEX, - NOT_EXISTS_NESTED_COMPLEX, - CHILDREN_COMPLEX, - SCRIPT, - NIN_TERMS, - NTERM, - NREGEXP; - - public static Map methodNameToOpear; - - public static Map operStringToOpear; - - public static Map simpleOperStringToOpear; - - private static BiMap negatives; - - private static BiMap simpleReverses; - - static { - methodNameToOpear = new HashMap<>(); - methodNameToOpear.put("term", TERM); - methodNameToOpear.put("matchterm", TERM); - methodNameToOpear.put("match_term", TERM); - methodNameToOpear.put("terms", IN_TERMS); - methodNameToOpear.put("in_terms", IN_TERMS); - methodNameToOpear.put("ids", IDS_QUERY); - methodNameToOpear.put("ids_query", IDS_QUERY); - methodNameToOpear.put("regexp", REGEXP); - methodNameToOpear.put("regexp_query", REGEXP); - } - - static { - operStringToOpear = new HashMap<>(); - operStringToOpear.put("=", EQ); - operStringToOpear.put(">", GT); - operStringToOpear.put("<", LT); - operStringToOpear.put(">=", GTE); - operStringToOpear.put("<=", LTE); - operStringToOpear.put("<>", N); - operStringToOpear.put("LIKE", LIKE); - operStringToOpear.put("NOT", N); - operStringToOpear.put("NOT LIKE", NLIKE); - operStringToOpear.put("IS", IS); - operStringToOpear.put("IS NOT", ISN); - operStringToOpear.put("IN", IN); - operStringToOpear.put("NOT IN", NIN); - operStringToOpear.put("BETWEEN", BETWEEN); - operStringToOpear.put("NOT BETWEEN", NBETWEEN); - operStringToOpear.put("GEO_INTERSECTS", GEO_INTERSECTS); - operStringToOpear.put("GEO_BOUNDING_BOX", GEO_BOUNDING_BOX); - operStringToOpear.put("GEO_DISTANCE", GEO_DISTANCE); - operStringToOpear.put("GEO_POLYGON", GEO_POLYGON); - operStringToOpear.put("NESTED", NESTED_COMPLEX); - operStringToOpear.put("CHILDREN", CHILDREN_COMPLEX); - operStringToOpear.put("SCRIPT", SCRIPT); - } - - static { - simpleOperStringToOpear = new HashMap<>(); - simpleOperStringToOpear.put("=", EQ); - simpleOperStringToOpear.put(">", GT); - simpleOperStringToOpear.put("<", LT); - simpleOperStringToOpear.put(">=", GTE); - simpleOperStringToOpear.put("<=", LTE); - simpleOperStringToOpear.put("<>", N); - } - - static { - negatives = HashBiMap.create(7); - negatives.put(EQ, N); - negatives.put(IN_TERMS, NIN_TERMS); - negatives.put(TERM, NTERM); - negatives.put(GT, LTE); - negatives.put(LT, GTE); - negatives.put(LIKE, NLIKE); - negatives.put(IS, ISN); - negatives.put(IN, NIN); - negatives.put(BETWEEN, NBETWEEN); - negatives.put(NESTED_COMPLEX, NOT_EXISTS_NESTED_COMPLEX); - negatives.put(REGEXP, NREGEXP); - } - - static { - simpleReverses = HashBiMap.create(4); - simpleReverses.put(EQ, EQ); - simpleReverses.put(GT, LT); - simpleReverses.put(GTE, LTE); - simpleReverses.put(N, N); - } - - public OPERATOR negative() throws SqlParseException { - OPERATOR negative = negatives.get(this); - negative = negative != null ? negative : negatives.inverse().get(this); - if (negative == null) { - throw new SqlParseException(StringUtils.format("Negative operator [%s] is not supported.", - this.name())); - } - return negative; - } - - public OPERATOR simpleReverse() throws SqlParseException { - OPERATOR reverse = simpleReverses.get(this); - reverse = reverse != null ? reverse : simpleReverses.inverse().get(this); - if (reverse == null) { - throw new SqlParseException(StringUtils.format("Simple reverse operator [%s] is not supported.", - this.name())); - } - return reverse; - } - - public Boolean isSimpleOperator() { - return simpleOperStringToOpear.containsValue(this); - } - } - - private String name; - - private SQLExpr nameExpr; - - private Object value; - - public SQLExpr getNameExpr() { - return nameExpr; - } - - public SQLExpr getValueExpr() { - return valueExpr; - } - - private SQLExpr valueExpr; - - private OPERATOR OPERATOR; - - private Object relationshipType; - - private boolean isNested; - private String nestedPath; - - private boolean isChildren; - private String childType; - - public Condition(CONN conn, String field, SQLExpr nameExpr, String condition, Object obj, SQLExpr valueExpr) - throws SqlParseException { - this(conn, field, nameExpr, condition, obj, valueExpr, null); + public enum OPERATOR { + EQ, + GT, + LT, + GTE, + LTE, + N, + LIKE, + NLIKE, + REGEXP, + IS, + ISN, + IN, + NIN, + BETWEEN, + NBETWEEN, + GEO_INTERSECTS, + GEO_BOUNDING_BOX, + GEO_DISTANCE, + GEO_POLYGON, + IN_TERMS, + TERM, + IDS_QUERY, + NESTED_COMPLEX, + NOT_EXISTS_NESTED_COMPLEX, + CHILDREN_COMPLEX, + SCRIPT, + NIN_TERMS, + NTERM, + NREGEXP; + + public static Map methodNameToOpear; + + public static Map operStringToOpear; + + public static Map simpleOperStringToOpear; + + private static BiMap negatives; + + private static BiMap simpleReverses; + + static { + methodNameToOpear = new HashMap<>(); + methodNameToOpear.put("term", TERM); + methodNameToOpear.put("matchterm", TERM); + methodNameToOpear.put("match_term", TERM); + methodNameToOpear.put("terms", IN_TERMS); + methodNameToOpear.put("in_terms", IN_TERMS); + methodNameToOpear.put("ids", IDS_QUERY); + methodNameToOpear.put("ids_query", IDS_QUERY); + methodNameToOpear.put("regexp", REGEXP); + methodNameToOpear.put("regexp_query", REGEXP); } - public Condition(CONN conn, String field, SQLExpr nameExpr, OPERATOR condition, Object obj, SQLExpr valueExpr) - throws SqlParseException { - this(conn, field, nameExpr, condition, obj, valueExpr, null); + static { + operStringToOpear = new HashMap<>(); + operStringToOpear.put("=", EQ); + operStringToOpear.put(">", GT); + operStringToOpear.put("<", LT); + operStringToOpear.put(">=", GTE); + operStringToOpear.put("<=", LTE); + operStringToOpear.put("<>", N); + operStringToOpear.put("LIKE", LIKE); + operStringToOpear.put("NOT", N); + operStringToOpear.put("NOT LIKE", NLIKE); + operStringToOpear.put("IS", IS); + operStringToOpear.put("IS NOT", ISN); + operStringToOpear.put("IN", IN); + operStringToOpear.put("NOT IN", NIN); + operStringToOpear.put("BETWEEN", BETWEEN); + operStringToOpear.put("NOT BETWEEN", NBETWEEN); + operStringToOpear.put("GEO_INTERSECTS", GEO_INTERSECTS); + operStringToOpear.put("GEO_BOUNDING_BOX", GEO_BOUNDING_BOX); + operStringToOpear.put("GEO_DISTANCE", GEO_DISTANCE); + operStringToOpear.put("GEO_POLYGON", GEO_POLYGON); + operStringToOpear.put("NESTED", NESTED_COMPLEX); + operStringToOpear.put("CHILDREN", CHILDREN_COMPLEX); + operStringToOpear.put("SCRIPT", SCRIPT); } - public Condition(CONN conn, String name, SQLExpr nameExpr, String oper, - Object value, SQLExpr valueExpr, Object relationshipType) throws SqlParseException { - super(conn); - - this.OPERATOR = null; - this.name = name; - this.value = value; - this.nameExpr = nameExpr; - this.valueExpr = valueExpr; - - this.relationshipType = relationshipType; - - if (this.relationshipType != null) { - if (this.relationshipType instanceof NestedType) { - NestedType nestedType = (NestedType) relationshipType; - - this.isNested = true; - this.nestedPath = nestedType.path; - this.isChildren = false; - this.childType = ""; - } else if (relationshipType instanceof ChildrenType) { - ChildrenType childrenType = (ChildrenType) relationshipType; - - this.isNested = false; - this.nestedPath = ""; - this.isChildren = true; - this.childType = childrenType.childType; - } - } else { - this.isNested = false; - this.nestedPath = ""; - this.isChildren = false; - this.childType = ""; - } - - if (OPERATOR.operStringToOpear.containsKey(oper)) { - this.OPERATOR = OPERATOR.operStringToOpear.get(oper); - } else { - throw new SqlParseException("Unsupported operation: " + oper); - } + static { + simpleOperStringToOpear = new HashMap<>(); + simpleOperStringToOpear.put("=", EQ); + simpleOperStringToOpear.put(">", GT); + simpleOperStringToOpear.put("<", LT); + simpleOperStringToOpear.put(">=", GTE); + simpleOperStringToOpear.put("<=", LTE); + simpleOperStringToOpear.put("<>", N); } - - public Condition(CONN conn, - String name, - SQLExpr nameExpr, - OPERATOR oper, - Object value, - SQLExpr valueExpr, - Object relationshipType - ) throws SqlParseException { - super(conn); - - this.OPERATOR = null; - this.nameExpr = nameExpr; - this.valueExpr = valueExpr; - this.name = name; - this.value = value; - this.OPERATOR = oper; - this.relationshipType = relationshipType; - - if (this.relationshipType != null) { - if (this.relationshipType instanceof NestedType) { - NestedType nestedType = (NestedType) relationshipType; - - this.isNested = true; - this.nestedPath = nestedType.path; - this.isChildren = false; - this.childType = ""; - } else if (relationshipType instanceof ChildrenType) { - ChildrenType childrenType = (ChildrenType) relationshipType; - - this.isNested = false; - this.nestedPath = ""; - this.isChildren = true; - this.childType = childrenType.childType; - } - } else { - this.isNested = false; - this.nestedPath = ""; - this.isChildren = false; - this.childType = ""; - } + static { + negatives = HashBiMap.create(7); + negatives.put(EQ, N); + negatives.put(IN_TERMS, NIN_TERMS); + negatives.put(TERM, NTERM); + negatives.put(GT, LTE); + negatives.put(LT, GTE); + negatives.put(LIKE, NLIKE); + negatives.put(IS, ISN); + negatives.put(IN, NIN); + negatives.put(BETWEEN, NBETWEEN); + negatives.put(NESTED_COMPLEX, NOT_EXISTS_NESTED_COMPLEX); + negatives.put(REGEXP, NREGEXP); } - public String getOpertatorSymbol() throws SqlParseException { - switch (OPERATOR) { - case EQ: - return "=="; - case GT: - return ">"; - case LT: - return "<"; - case GTE: - return ">="; - case LTE: - return "<="; - case N: - return "<>"; - case IS: - return "=="; - - case ISN: - return "!="; - default: - throw new SqlParseException(StringUtils.format("Failed to parse operator [%s]", OPERATOR)); - } + static { + simpleReverses = HashBiMap.create(4); + simpleReverses.put(EQ, EQ); + simpleReverses.put(GT, LT); + simpleReverses.put(GTE, LTE); + simpleReverses.put(N, N); } - - public String getName() { - return name; + public OPERATOR negative() throws SqlParseException { + OPERATOR negative = negatives.get(this); + negative = negative != null ? negative : negatives.inverse().get(this); + if (negative == null) { + throw new SqlParseException( + StringUtils.format("Negative operator [%s] is not supported.", this.name())); + } + return negative; } - public void setName(String name) { - this.name = name; + public OPERATOR simpleReverse() throws SqlParseException { + OPERATOR reverse = simpleReverses.get(this); + reverse = reverse != null ? reverse : simpleReverses.inverse().get(this); + if (reverse == null) { + throw new SqlParseException( + StringUtils.format("Simple reverse operator [%s] is not supported.", this.name())); + } + return reverse; } - public Object getValue() { - return value; + public Boolean isSimpleOperator() { + return simpleOperStringToOpear.containsValue(this); } - - public void setValue(Object value) { - this.value = value; + } + + private String name; + + private SQLExpr nameExpr; + + private Object value; + + public SQLExpr getNameExpr() { + return nameExpr; + } + + public SQLExpr getValueExpr() { + return valueExpr; + } + + private SQLExpr valueExpr; + + private OPERATOR OPERATOR; + + private Object relationshipType; + + private boolean isNested; + private String nestedPath; + + private boolean isChildren; + private String childType; + + public Condition( + CONN conn, String field, SQLExpr nameExpr, String condition, Object obj, SQLExpr valueExpr) + throws SqlParseException { + this(conn, field, nameExpr, condition, obj, valueExpr, null); + } + + public Condition( + CONN conn, String field, SQLExpr nameExpr, OPERATOR condition, Object obj, SQLExpr valueExpr) + throws SqlParseException { + this(conn, field, nameExpr, condition, obj, valueExpr, null); + } + + public Condition( + CONN conn, + String name, + SQLExpr nameExpr, + String oper, + Object value, + SQLExpr valueExpr, + Object relationshipType) + throws SqlParseException { + super(conn); + + this.OPERATOR = null; + this.name = name; + this.value = value; + this.nameExpr = nameExpr; + this.valueExpr = valueExpr; + + this.relationshipType = relationshipType; + + if (this.relationshipType != null) { + if (this.relationshipType instanceof NestedType) { + NestedType nestedType = (NestedType) relationshipType; + + this.isNested = true; + this.nestedPath = nestedType.path; + this.isChildren = false; + this.childType = ""; + } else if (relationshipType instanceof ChildrenType) { + ChildrenType childrenType = (ChildrenType) relationshipType; + + this.isNested = false; + this.nestedPath = ""; + this.isChildren = true; + this.childType = childrenType.childType; + } + } else { + this.isNested = false; + this.nestedPath = ""; + this.isChildren = false; + this.childType = ""; } - public OPERATOR getOPERATOR() { - return OPERATOR; + if (OPERATOR.operStringToOpear.containsKey(oper)) { + this.OPERATOR = OPERATOR.operStringToOpear.get(oper); + } else { + throw new SqlParseException("Unsupported operation: " + oper); } - - public void setOPERATOR(OPERATOR OPERATOR) { - this.OPERATOR = OPERATOR; + } + + public Condition( + CONN conn, + String name, + SQLExpr nameExpr, + OPERATOR oper, + Object value, + SQLExpr valueExpr, + Object relationshipType) + throws SqlParseException { + super(conn); + + this.OPERATOR = null; + this.nameExpr = nameExpr; + this.valueExpr = valueExpr; + this.name = name; + this.value = value; + this.OPERATOR = oper; + this.relationshipType = relationshipType; + + if (this.relationshipType != null) { + if (this.relationshipType instanceof NestedType) { + NestedType nestedType = (NestedType) relationshipType; + + this.isNested = true; + this.nestedPath = nestedType.path; + this.isChildren = false; + this.childType = ""; + } else if (relationshipType instanceof ChildrenType) { + ChildrenType childrenType = (ChildrenType) relationshipType; + + this.isNested = false; + this.nestedPath = ""; + this.isChildren = true; + this.childType = childrenType.childType; + } + } else { + this.isNested = false; + this.nestedPath = ""; + this.isChildren = false; + this.childType = ""; } - - public Object getRelationshipType() { - return relationshipType; + } + + public String getOpertatorSymbol() throws SqlParseException { + switch (OPERATOR) { + case EQ: + return "=="; + case GT: + return ">"; + case LT: + return "<"; + case GTE: + return ">="; + case LTE: + return "<="; + case N: + return "<>"; + case IS: + return "=="; + + case ISN: + return "!="; + default: + throw new SqlParseException(StringUtils.format("Failed to parse operator [%s]", OPERATOR)); } - - public void setRelationshipType(Object relationshipType) { - this.relationshipType = relationshipType; - } - - public boolean isNested() { - return isNested; - } - - public void setNested(boolean isNested) { - this.isNested = isNested; - } - - public String getNestedPath() { - return nestedPath; - } - - public void setNestedPath(String nestedPath) { - this.nestedPath = nestedPath; - } - - public boolean isChildren() { - return isChildren; - } - - public void setChildren(boolean isChildren) { - this.isChildren = isChildren; - } - - public String getChildType() { - return childType; - } - - public void setChildType(String childType) { - this.childType = childType; - } - - /** - * Return true if the opear is {@link OPERATOR#NESTED_COMPLEX} - * For example, the opear is {@link OPERATOR#NESTED_COMPLEX} when condition is - * nested('projects', projects.started_year > 2000 OR projects.name LIKE '%security%') - */ - public boolean isNestedComplex() { - return OPERATOR.NESTED_COMPLEX == OPERATOR; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public Object getValue() { + return value; + } + + public void setValue(Object value) { + this.value = value; + } + + public OPERATOR getOPERATOR() { + return OPERATOR; + } + + public void setOPERATOR(OPERATOR OPERATOR) { + this.OPERATOR = OPERATOR; + } + + public Object getRelationshipType() { + return relationshipType; + } + + public void setRelationshipType(Object relationshipType) { + this.relationshipType = relationshipType; + } + + public boolean isNested() { + return isNested; + } + + public void setNested(boolean isNested) { + this.isNested = isNested; + } + + public String getNestedPath() { + return nestedPath; + } + + public void setNestedPath(String nestedPath) { + this.nestedPath = nestedPath; + } + + public boolean isChildren() { + return isChildren; + } + + public void setChildren(boolean isChildren) { + this.isChildren = isChildren; + } + + public String getChildType() { + return childType; + } + + public void setChildType(String childType) { + this.childType = childType; + } + + /** + * Return true if the opear is {@link OPERATOR#NESTED_COMPLEX} For example, the opear is {@link + * OPERATOR#NESTED_COMPLEX} when condition is nested('projects', projects.started_year > 2000 OR + * projects.name LIKE '%security%') + */ + public boolean isNestedComplex() { + return OPERATOR.NESTED_COMPLEX == OPERATOR; + } + + @Override + public String toString() { + String result = ""; + + if (this.isNested()) { + result = "nested condition "; + if (this.getNestedPath() != null) { + result += "on path:" + this.getNestedPath() + " "; + } + } else if (this.isChildren()) { + result = "children condition "; + + if (this.getChildType() != null) { + result += "on child: " + this.getChildType() + " "; + } } - @Override - public String toString() { - String result = ""; - - if (this.isNested()) { - result = "nested condition "; - if (this.getNestedPath() != null) { - result += "on path:" + this.getNestedPath() + " "; - } - } else if (this.isChildren()) { - result = "children condition "; - - if (this.getChildType() != null) { - result += "on child: " + this.getChildType() + " "; - } - } - - if (value instanceof Object[]) { - result += this.conn + " " + this.name + " " + this.OPERATOR + " " + Arrays.toString((Object[]) value); - } else { - result += this.conn + " " + this.name + " " + this.OPERATOR + " " + this.value; - } - - return result; + if (value instanceof Object[]) { + result += + this.conn + + " " + + this.name + + " " + + this.OPERATOR + + " " + + Arrays.toString((Object[]) value); + } else { + result += this.conn + " " + this.name + " " + this.OPERATOR + " " + this.value; } - @Override - public Object clone() throws CloneNotSupportedException { - try { - return new Condition(this.getConn(), this.getName(), this.getNameExpr(), - this.getOPERATOR(), this.getValue(), this.getValueExpr(), this.getRelationshipType()); - } catch (SqlParseException e) { + return result; + } + + @Override + public Object clone() throws CloneNotSupportedException { + try { + return new Condition( + this.getConn(), + this.getName(), + this.getNameExpr(), + this.getOPERATOR(), + this.getValue(), + this.getValueExpr(), + this.getRelationshipType()); + } catch (SqlParseException e) { - } - return null; } + return null; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Delete.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Delete.java index 587a8b3ef9..efa77da0a5 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Delete.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Delete.java @@ -3,12 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; -/** - * SQL Delete statement. - */ -public class Delete extends Query { - -} +/** SQL Delete statement. */ +public class Delete extends Query {} diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Field.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Field.java index 1b6be05f20..09471fa2d7 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Field.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Field.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; import com.alibaba.druid.sql.ast.SQLExpr; @@ -13,143 +12,136 @@ import org.opensearch.sql.legacy.parser.NestedType; /** - * - * * @author ansj */ public class Field implements Cloneable { - /** - * Constant for '*' field in SELECT - */ - public static final Field STAR = new Field("*", ""); - - protected String name; - protected SQLAggregateOption option; - private String alias; - private NestedType nested; - private ChildrenType children; - private SQLExpr expression; - - public Field(String name, String alias) { - this.name = name; - this.alias = alias; - this.nested = null; - this.children = null; - this.option = null; - } - - public Field(String name, String alias, NestedType nested, ChildrenType children) { - this.name = name; - this.alias = alias; - this.nested = nested; - this.children = children; - } - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public String getAlias() { - return alias; - } - - public void setAlias(String alias) { - this.alias = alias; - } - - public boolean isNested() { - return this.nested != null; - } - - public boolean isReverseNested() { - return this.nested != null && this.nested.isReverse(); - } - - public void setNested(NestedType nested) { - this.nested = nested; - } - - public String getNestedPath() { - if (this.nested == null) { - return null; - } - - return this.nested.path; - } - - public boolean isChildren() { - return this.children != null; - } - - public void setChildren(ChildrenType children) { - this.children = children; - } - - public String getChildType() { - if (this.children == null) { - return null; - } - return this.children.childType; - } - - public void setAggregationOption(SQLAggregateOption option) { - this.option = option; - } - - public SQLAggregateOption getOption() { - return option; - } - - @Override - public String toString() { - return this.name; - } + /** Constant for '*' field in SELECT */ + public static final Field STAR = new Field("*", ""); + + protected String name; + protected SQLAggregateOption option; + private String alias; + private NestedType nested; + private ChildrenType children; + private SQLExpr expression; + + public Field(String name, String alias) { + this.name = name; + this.alias = alias; + this.nested = null; + this.children = null; + this.option = null; + } + + public Field(String name, String alias, NestedType nested, ChildrenType children) { + this.name = name; + this.alias = alias; + this.nested = nested; + this.children = children; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getAlias() { + return alias; + } + + public void setAlias(String alias) { + this.alias = alias; + } + + public boolean isNested() { + return this.nested != null; + } + + public boolean isReverseNested() { + return this.nested != null && this.nested.isReverse(); + } + + public void setNested(NestedType nested) { + this.nested = nested; + } + + public String getNestedPath() { + if (this.nested == null) { + return null; + } + + return this.nested.path; + } + + public boolean isChildren() { + return this.children != null; + } + + public void setChildren(ChildrenType children) { + this.children = children; + } + + public String getChildType() { + if (this.children == null) { + return null; + } + return this.children.childType; + } + + public void setAggregationOption(SQLAggregateOption option) { + this.option = option; + } + + public SQLAggregateOption getOption() { + return option; + } + + @Override + public String toString() { + return this.name; + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (obj.getClass() != this.getClass()) { + return false; + } + Field other = (Field) obj; + boolean namesAreEqual = + (other.getName() == null && this.name == null) || other.getName().equals(this.name); + if (!namesAreEqual) { + return false; + } + return (other.getAlias() == null && this.alias == null) || other.getAlias().equals(this.alias); + } + + @Override + public int hashCode() { // Bug: equals() is present but hashCode was missing + return Objects.hash(name, alias); + } + + @Override + protected Object clone() throws CloneNotSupportedException { + return new Field(new String(this.name), new String(this.alias)); + } + + /** Returns true if the field is script field. */ + public boolean isScriptField() { + return false; + } + + public void setExpression(SQLExpr expression) { + this.expression = expression; + } - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (obj.getClass() != this.getClass()) { - return false; - } - Field other = (Field) obj; - boolean namesAreEqual = (other.getName() == null && this.name == null) - || other.getName().equals(this.name); - if (!namesAreEqual) { - return false; - } - return (other.getAlias() == null && this.alias == null) - || other.getAlias().equals(this.alias); - } - - @Override - public int hashCode() { // Bug: equals() is present but hashCode was missing - return Objects.hash(name, alias); - } - - @Override - protected Object clone() throws CloneNotSupportedException { - return new Field(new String(this.name), new String(this.alias)); - } - - /** - * Returns true if the field is script field. - */ - public boolean isScriptField() { - return false; - } - - public void setExpression(SQLExpr expression) { - this.expression = expression; - } - - public SQLExpr getExpression() { - return expression; - } + public SQLExpr getExpression() { + return expression; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/From.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/From.java index 6455df727c..67ac7f0e3c 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/From.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/From.java @@ -3,55 +3,49 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; - -/** - * Represents the from clause. - * Contains index and type which the - * query refer to. - */ +/** Represents the from clause. Contains index and type which the query refer to. */ public class From { - private String index; - private String alias; - - /** - * Extract index and type from the 'from' string - * - * @param from The part after the FROM keyword. - */ - public From(String from) { - index = from; - } - - public From(String from, String alias) { - this(from); - this.alias = alias; - } - - public String getIndex() { - return index; - } - - public void setIndex(String index) { - this.index = index; - } - - public String getAlias() { - return alias; - } - - public void setAlias(String alias) { - this.alias = alias; - } - - @Override - public String toString() { - StringBuilder str = new StringBuilder(index); - if (alias != null) { - str.append(" AS ").append(alias); - } - return str.toString(); + private String index; + private String alias; + + /** + * Extract index and type from the 'from' string + * + * @param from The part after the FROM keyword. + */ + public From(String from) { + index = from; + } + + public From(String from, String alias) { + this(from); + this.alias = alias; + } + + public String getIndex() { + return index; + } + + public void setIndex(String index) { + this.index = index; + } + + public String getAlias() { + return alias; + } + + public void setAlias(String alias) { + this.alias = alias; + } + + @Override + public String toString() { + StringBuilder str = new StringBuilder(index); + if (alias != null) { + str.append(" AS ").append(alias); } + return str.toString(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Having.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Having.java index 30cfba4c7a..393ee0aa6e 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Having.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Having.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; import static java.util.stream.Collectors.joining; @@ -27,196 +26,188 @@ /** * Domain object for HAVING clause in SQL which covers both the parsing and explain logic. - *

- * Responsibilities: - * 1. Parsing: parse conditions out during initialization - * 2. Explain: translate conditions to OpenSearch query DSL (Bucket Selector Aggregation) + * + *

Responsibilities: 1. Parsing: parse conditions out during initialization 2. Explain: translate + * conditions to OpenSearch query DSL (Bucket Selector Aggregation) */ public class Having { - private static final String BUCKET_SELECTOR_NAME = "bucket_filter"; - private static final String PARAMS = "params."; - private static final String AND = " && "; - private static final String OR = " || "; - - /** - * Conditions parsed out of HAVING clause - */ - private final List conditions; - - private HavingParser havingParser; - - public List getHavingFields() { - return havingParser.getHavingFields(); + private static final String BUCKET_SELECTOR_NAME = "bucket_filter"; + private static final String PARAMS = "params."; + private static final String AND = " && "; + private static final String OR = " || "; + + /** Conditions parsed out of HAVING clause */ + private final List conditions; + + private HavingParser havingParser; + + public List getHavingFields() { + return havingParser.getHavingFields(); + } + + /** + * Construct by HAVING expression + * + * @param havingExpr having expression + * @param parser where parser + * @throws SqlParseException exception thrown by where parser + */ + public Having(SQLExpr havingExpr, WhereParser parser) throws SqlParseException { + havingParser = new HavingParser(parser); + conditions = parseHavingExprToConditions(havingExpr, havingParser); + } + + public List getConditions() { + return conditions; + } + + /** + * Construct by GROUP BY expression with null check + * + * @param groupByExpr group by expression + * @param parser where parser + * @throws SqlParseException exception thrown by where parser + */ + public Having(SQLSelectGroupByClause groupByExpr, WhereParser parser) throws SqlParseException { + this(groupByExpr == null ? null : groupByExpr.getHaving(), parser); + } + + /** + * Add Bucket Selector Aggregation under group by aggregation with sibling of aggregation of + * fields in SELECT. OpenSearch makes sure that all sibling runs before bucket selector + * aggregation. + * + * @param groupByAgg aggregation builder for GROUP BY clause + * @param fields fields in SELECT clause + * @throws SqlParseException exception thrown for unknown expression + */ + public void explain(AggregationBuilder groupByAgg, List fields) throws SqlParseException { + if (groupByAgg == null || conditions.isEmpty()) { + return; } - /** - * Construct by HAVING expression - * - * @param havingExpr having expression - * @param parser where parser - * @throws SqlParseException exception thrown by where parser - */ - public Having(SQLExpr havingExpr, WhereParser parser) throws SqlParseException { - havingParser = new HavingParser(parser); - conditions = parseHavingExprToConditions(havingExpr, havingParser); + // parsing the fields from SELECT and HAVING clause + groupByAgg.subAggregation( + bucketSelector( + BUCKET_SELECTOR_NAME, + contextForFieldsInSelect(Iterables.concat(fields, getHavingFields())), + explainConditions())); + } + + private List parseHavingExprToConditions(SQLExpr havingExpr, HavingParser parser) + throws SqlParseException { + if (havingExpr == null) { + return Collections.emptyList(); } - public List getConditions() { - return conditions; + Where where = Where.newInstance(); + parser.parseWhere(havingExpr, where); + return where.getWheres(); + } + + private Map contextForFieldsInSelect(Iterable fields) { + Map context = new HashMap<>(); + for (Field field : fields) { + if (field instanceof MethodField) { + // It's required to add to context even if alias in SELECT is exactly same name as that in + // script + context.put( + field.getAlias(), bucketsPath(field.getAlias(), ((MethodField) field).getParams())); + } } - - /** - * Construct by GROUP BY expression with null check - * - * @param groupByExpr group by expression - * @param parser where parser - * @throws SqlParseException exception thrown by where parser - */ - public Having(SQLSelectGroupByClause groupByExpr, WhereParser parser) throws SqlParseException { - this(groupByExpr == null ? null : groupByExpr.getHaving(), parser); + return context; + } + + private Script explainConditions() throws SqlParseException { + return new Script(doExplain(conditions)); + } + + /** + * Explain conditions recursively. Example: HAVING c >= 2 OR NOT (a > 20 AND c <= 10 OR a < 1) OR + * a < 5 Object: Where(?: [ Condition(?:c>=2), Where(or: [ Where(?:a<=20), Where(or:c>10), + * Where(and:a>=1)], ]), Condition(or:a<5) ]) + * + *

Note: a) Where(connector : condition expression). b) Condition is a subclass of Where. c) + * connector=? means it doesn't matter for first condition in the list + * + * @param wheres conditions + * @return painless script string + * @throws SqlParseException unknown type of expression other than identifier and value + */ + private String doExplain(List wheres) throws SqlParseException { + if (wheres == null || wheres.isEmpty()) { + return ""; } - /** - * Add Bucket Selector Aggregation under group by aggregation with sibling of aggregation of fields in SELECT. - * OpenSearch makes sure that all sibling runs before bucket selector aggregation. - * - * @param groupByAgg aggregation builder for GROUP BY clause - * @param fields fields in SELECT clause - * @throws SqlParseException exception thrown for unknown expression - */ - public void explain(AggregationBuilder groupByAgg, List fields) throws SqlParseException { - if (groupByAgg == null || conditions.isEmpty()) { - return; - } - - // parsing the fields from SELECT and HAVING clause - groupByAgg.subAggregation(bucketSelector(BUCKET_SELECTOR_NAME, - contextForFieldsInSelect(Iterables.concat(fields, getHavingFields())), - explainConditions())); + StringBuilder script = new StringBuilder(); + for (Where cond : wheres) { + if (script.length() > 0) { + script.append(cond.getConn() == Where.CONN.AND ? AND : OR); + } + + if (cond instanceof Condition) { + script.append(createScript((Condition) cond)); + } else { + script.append('(').append(doExplain(cond.getWheres())).append(')'); + } } - - private List parseHavingExprToConditions(SQLExpr havingExpr, HavingParser parser) - throws SqlParseException { - if (havingExpr == null) { - return Collections.emptyList(); + return script.toString(); + } + + private String createScript(Condition cond) throws SqlParseException { + String name = cond.getName(); + Object value = cond.getValue(); + switch (cond.getOPERATOR()) { + case EQ: + case GT: + case LT: + case GTE: + case LTE: + case IS: + case ISN: + return expr(name, cond.getOpertatorSymbol(), value); + case N: + return expr(name, "!=", value); + case BETWEEN: + { + Object[] values = (Object[]) value; + return expr(name, ">=", values[0]) + AND + expr(name, "<=", values[1]); } - - Where where = Where.newInstance(); - parser.parseWhere(havingExpr, where); - return where.getWheres(); - } - - private Map contextForFieldsInSelect(Iterable fields) { - Map context = new HashMap<>(); - for (Field field : fields) { - if (field instanceof MethodField) { - // It's required to add to context even if alias in SELECT is exactly same name as that in script - context.put(field.getAlias(), bucketsPath(field.getAlias(), ((MethodField) field).getParams())); - } + case NBETWEEN: + { + Object[] values = (Object[]) value; + return expr(name, "<", values[0]) + OR + expr(name, ">", values[1]); } - return context; - } - - private Script explainConditions() throws SqlParseException { - return new Script(doExplain(conditions)); + case IN: + return Arrays.stream((Object[]) value) + .map(val -> expr(name, "==", val)) + .collect(joining(OR)); + case NIN: + return Arrays.stream((Object[]) value) + .map(val -> expr(name, "!=", val)) + .collect(joining(AND)); + default: + throw new SqlParseException( + "Unsupported operation in HAVING clause: " + cond.getOPERATOR()); } - - /** - * Explain conditions recursively. - * Example: HAVING c >= 2 OR NOT (a > 20 AND c <= 10 OR a < 1) OR a < 5 - * Object: Where(?: - * [ - * Condition(?:c>=2), - * Where(or: - * [ - * Where(?:a<=20), Where(or:c>10), Where(and:a>=1)], - * ]), - * Condition(or:a<5) - * ]) - *

- * Note: a) Where(connector : condition expression). - * b) Condition is a subclass of Where. - * c) connector=? means it doesn't matter for first condition in the list - * - * @param wheres conditions - * @return painless script string - * @throws SqlParseException unknown type of expression other than identifier and value - */ - private String doExplain(List wheres) throws SqlParseException { - if (wheres == null || wheres.isEmpty()) { - return ""; - } - - StringBuilder script = new StringBuilder(); - for (Where cond : wheres) { - if (script.length() > 0) { - script.append(cond.getConn() == Where.CONN.AND ? AND : OR); - } - - if (cond instanceof Condition) { - script.append(createScript((Condition) cond)); - } else { - script.append('('). - append(doExplain(cond.getWheres())). - append(')'); - } - } - return script.toString(); - } - - private String createScript(Condition cond) throws SqlParseException { - String name = cond.getName(); - Object value = cond.getValue(); - switch (cond.getOPERATOR()) { - case EQ: - case GT: - case LT: - case GTE: - case LTE: - case IS: - case ISN: - return expr(name, cond.getOpertatorSymbol(), value); - case N: - return expr(name, "!=", value); - case BETWEEN: { - Object[] values = (Object[]) value; - return expr(name, ">=", values[0]) + AND + expr(name, "<=", values[1]); - } - case NBETWEEN: { - Object[] values = (Object[]) value; - return expr(name, "<", values[0]) + OR + expr(name, ">", values[1]); - } - case IN: - return Arrays.stream((Object[]) value). - map(val -> expr(name, "==", val)). - collect(joining(OR)); - case NIN: - return Arrays.stream((Object[]) value). - map(val -> expr(name, "!=", val)). - collect(joining(AND)); - default: - throw new SqlParseException("Unsupported operation in HAVING clause: " + cond.getOPERATOR()); - } - } - - private String expr(String name, String operator, Object value) { - return String.join(" ", PARAMS + name, operator, value.toString()); - } - - /** - * Build the buckets_path. - * If the field is nested field, using the bucket path. - * else using the alias. - */ - private String bucketsPath(String alias, List kvValueList) { - if (kvValueList.size() == 1) { - KVValue kvValue = kvValueList.get(0); - if (StringUtils.equals(kvValue.key, "nested") - && kvValue.value instanceof NestedType) { - return ((NestedType) kvValue.value).getBucketPath(); - } - } - return alias; + } + + private String expr(String name, String operator, Object value) { + return String.join(" ", PARAMS + name, operator, value.toString()); + } + + /** + * Build the buckets_path. If the field is nested field, using the bucket path. else using the + * alias. + */ + private String bucketsPath(String alias, List kvValueList) { + if (kvValueList.size() == 1) { + KVValue kvValue = kvValueList.get(0); + if (StringUtils.equals(kvValue.key, "nested") && kvValue.value instanceof NestedType) { + return ((NestedType) kvValue.value).getBucketPath(); + } } + return alias; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/IndexStatement.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/IndexStatement.java index e97a482b40..2a5be5728c 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/IndexStatement.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/IndexStatement.java @@ -3,89 +3,87 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; -/** - * Class used to differentiate SHOW and DESCRIBE statements - */ +/** Class used to differentiate SHOW and DESCRIBE statements */ public class IndexStatement implements QueryStatement { - private StatementType statementType; - private String query; - private String indexPattern; - private String columnPattern; - - public IndexStatement(StatementType statementType, String query) { - this.statementType = statementType; - this.query = query; - - parseQuery(); - } - - private void parseQuery() { - String[] statement = query.split(" "); - - int tokenLength = statement.length; - try { - for (int i = 1; i < tokenLength; i++) { - switch (statement[i].toUpperCase()) { - case "TABLES": - if (i + 1 < tokenLength && statement[i + 1].equalsIgnoreCase("LIKE")) { - if (i + 2 < tokenLength) { - indexPattern = replaceWildcard(statement[i + 2]); - i += 2; - } - } - break; - case "COLUMNS": - if (i + 1 < tokenLength && statement[i + 1].equalsIgnoreCase("LIKE")) { - if (i + 2 < tokenLength) { - columnPattern = replaceWildcard(statement[i + 2]); - i += 2; - } - } - break; - } + private StatementType statementType; + private String query; + private String indexPattern; + private String columnPattern; + + public IndexStatement(StatementType statementType, String query) { + this.statementType = statementType; + this.query = query; + + parseQuery(); + } + + private void parseQuery() { + String[] statement = query.split(" "); + + int tokenLength = statement.length; + try { + for (int i = 1; i < tokenLength; i++) { + switch (statement[i].toUpperCase()) { + case "TABLES": + if (i + 1 < tokenLength && statement[i + 1].equalsIgnoreCase("LIKE")) { + if (i + 2 < tokenLength) { + indexPattern = replaceWildcard(statement[i + 2]); + i += 2; + } } - - if (indexPattern == null) { - throw new IllegalArgumentException(); + break; + case "COLUMNS": + if (i + 1 < tokenLength && statement[i + 1].equalsIgnoreCase("LIKE")) { + if (i + 2 < tokenLength) { + columnPattern = replaceWildcard(statement[i + 2]); + i += 2; + } } - } catch (Exception e) { - throw new IllegalArgumentException("Expected syntax example: " + syntaxString(), e); + break; } - } + } - private String replaceWildcard(String str) { - return str.replace("%", ".*").replace("_", "."); + if (indexPattern == null) { + throw new IllegalArgumentException(); + } + } catch (Exception e) { + throw new IllegalArgumentException("Expected syntax example: " + syntaxString(), e); } + } - private String syntaxString() { - if (statementType.equals(StatementType.SHOW)) { - return "'SHOW TABLES LIKE '"; - } else { - return "'DESCRIBE TABLES LIKE
[COLUMNS LIKE ]'"; - } - } + private String replaceWildcard(String str) { + return str.replace("%", ".*").replace("_", "."); + } - public StatementType getStatementType() { - return statementType; + private String syntaxString() { + if (statementType.equals(StatementType.SHOW)) { + return "'SHOW TABLES LIKE
'"; + } else { + return "'DESCRIBE TABLES LIKE
[COLUMNS LIKE ]'"; } + } - public String getQuery() { - return query; - } + public StatementType getStatementType() { + return statementType; + } - public String getIndexPattern() { - return indexPattern; - } + public String getQuery() { + return query; + } - public String getColumnPattern() { - return columnPattern; - } + public String getIndexPattern() { + return indexPattern; + } - public enum StatementType { - SHOW, DESCRIBE - } + public String getColumnPattern() { + return columnPattern; + } + + public enum StatementType { + SHOW, + DESCRIBE + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/JoinSelect.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/JoinSelect.java index c77df6e9ad..211b33c68a 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/JoinSelect.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/JoinSelect.java @@ -3,85 +3,78 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; import com.alibaba.druid.sql.ast.statement.SQLJoinTableSource; import java.util.List; import org.opensearch.sql.legacy.domain.hints.Hint; -/** - * Created by Eliran on 20/8/2015. - */ +/** Created by Eliran on 20/8/2015. */ public class JoinSelect extends Query { + private TableOnJoinSelect firstTable; + private TableOnJoinSelect secondTable; + private Where connectedWhere; + private List hints; + private List connectedConditions; + private int totalLimit; - private TableOnJoinSelect firstTable; - private TableOnJoinSelect secondTable; - private Where connectedWhere; - private List hints; - private List connectedConditions; - private int totalLimit; - - private final int DEAFULT_NUM_OF_RESULTS = 200; - - private SQLJoinTableSource.JoinType joinType; - - - public JoinSelect() { - firstTable = new TableOnJoinSelect(); - secondTable = new TableOnJoinSelect(); + private final int DEAFULT_NUM_OF_RESULTS = 200; - totalLimit = DEAFULT_NUM_OF_RESULTS; - } + private SQLJoinTableSource.JoinType joinType; + public JoinSelect() { + firstTable = new TableOnJoinSelect(); + secondTable = new TableOnJoinSelect(); - public Where getConnectedWhere() { - return connectedWhere; - } + totalLimit = DEAFULT_NUM_OF_RESULTS; + } - public void setConnectedWhere(Where connectedWhere) { - this.connectedWhere = connectedWhere; - } + public Where getConnectedWhere() { + return connectedWhere; + } - public TableOnJoinSelect getFirstTable() { - return firstTable; - } + public void setConnectedWhere(Where connectedWhere) { + this.connectedWhere = connectedWhere; + } - public TableOnJoinSelect getSecondTable() { - return secondTable; - } + public TableOnJoinSelect getFirstTable() { + return firstTable; + } + public TableOnJoinSelect getSecondTable() { + return secondTable; + } - public SQLJoinTableSource.JoinType getJoinType() { - return joinType; - } + public SQLJoinTableSource.JoinType getJoinType() { + return joinType; + } - public void setJoinType(SQLJoinTableSource.JoinType joinType) { - this.joinType = joinType; - } + public void setJoinType(SQLJoinTableSource.JoinType joinType) { + this.joinType = joinType; + } - public List getHints() { - return hints; - } + public List getHints() { + return hints; + } - public void setHints(List hints) { - this.hints = hints; - } + public void setHints(List hints) { + this.hints = hints; + } - public int getTotalLimit() { - return totalLimit; - } + public int getTotalLimit() { + return totalLimit; + } - public List getConnectedConditions() { - return connectedConditions; - } + public List getConnectedConditions() { + return connectedConditions; + } - public void setConnectedConditions(List connectedConditions) { - this.connectedConditions = connectedConditions; - } + public void setConnectedConditions(List connectedConditions) { + this.connectedConditions = connectedConditions; + } - public void setTotalLimit(int totalLimit) { - this.totalLimit = totalLimit; - } + public void setTotalLimit(int totalLimit) { + this.totalLimit = totalLimit; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/KVValue.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/KVValue.java index 10e2ad3d12..d864cbac12 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/KVValue.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/KVValue.java @@ -3,30 +3,29 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; public class KVValue implements Cloneable { - public String key; - public Object value; + public String key; + public Object value; - public KVValue(Object value) { - this.value = value; - } + public KVValue(Object value) { + this.value = value; + } - public KVValue(String key, Object value) { - if (key != null) { - this.key = key.replace("'", ""); - } - this.value = value; + public KVValue(String key, Object value) { + if (key != null) { + this.key = key.replace("'", ""); } + this.value = value; + } - @Override - public String toString() { - if (key == null) { - return value.toString(); - } else { - return key + "=" + value; - } + @Override + public String toString() { + if (key == null) { + return value.toString(); + } else { + return key + "=" + value; } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/MethodField.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/MethodField.java index 4529c4344c..45d6d1053e 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/MethodField.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/MethodField.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; import com.alibaba.druid.sql.ast.expr.SQLAggregateOption; @@ -14,96 +13,94 @@ import org.opensearch.sql.legacy.utils.Util; /** - * - * * @author ansj */ public class MethodField extends Field { - private List params = null; - - public MethodField(String name, List params, SQLAggregateOption option, String alias) { - super(name, alias); - this.params = params; - this.option = option; - if (alias == null || alias.trim().length() == 0) { - Map paramsAsMap = this.getParamsAsMap(); - if (paramsAsMap.containsKey("alias")) { - this.setAlias(paramsAsMap.get("alias").toString()); - } else { - this.setAlias(this.toString()); - } - } + private List params = null; + + public MethodField(String name, List params, SQLAggregateOption option, String alias) { + super(name, alias); + this.params = params; + this.option = option; + if (alias == null || alias.trim().length() == 0) { + Map paramsAsMap = this.getParamsAsMap(); + if (paramsAsMap.containsKey("alias")) { + this.setAlias(paramsAsMap.get("alias").toString()); + } else { + this.setAlias(this.toString()); + } } + } - public List getParams() { - return params; - } + public List getParams() { + return params; + } - public Map getParamsAsMap() { - Map paramsAsMap = new HashMap<>(); - if (this.params == null) { - return paramsAsMap; - } - for (KVValue kvValue : this.params) { - paramsAsMap.put(kvValue.key, kvValue.value); - } - return paramsAsMap; + public Map getParamsAsMap() { + Map paramsAsMap = new HashMap<>(); + if (this.params == null) { + return paramsAsMap; } - - @Override - public String toString() { - if (option != null) { - return this.name + "(" + option + " " + Util.joiner(params, ",") + ")"; - } - return this.name + "(" + Util.joiner(params, ",") + ")"; + for (KVValue kvValue : this.params) { + paramsAsMap.put(kvValue.key, kvValue.value); } + return paramsAsMap; + } - @Override - public boolean isNested() { - Map paramsAsMap = this.getParamsAsMap(); - return paramsAsMap.containsKey("nested") || paramsAsMap.containsKey("reverse_nested"); + @Override + public String toString() { + if (option != null) { + return this.name + "(" + option + " " + Util.joiner(params, ",") + ")"; } - - @Override - public boolean isReverseNested() { - return this.getParamsAsMap().containsKey("reverse_nested"); + return this.name + "(" + Util.joiner(params, ",") + ")"; + } + + @Override + public boolean isNested() { + Map paramsAsMap = this.getParamsAsMap(); + return paramsAsMap.containsKey("nested") || paramsAsMap.containsKey("reverse_nested"); + } + + @Override + public boolean isReverseNested() { + return this.getParamsAsMap().containsKey("reverse_nested"); + } + + @Override + public String getNestedPath() { + if (!this.isNested()) { + return null; } - - @Override - public String getNestedPath() { - if (!this.isNested()) { - return null; - } - if (this.isReverseNested()) { - String reverseNestedPath = this.getParamsAsMap().get("reverse_nested").toString(); - return reverseNestedPath.isEmpty() ? null : reverseNestedPath; - } - - // Fix bug: NestedType.toString() isn't implemented which won't return desired nested path - Object nestedField = getParamsAsMap().get("nested"); - if (nestedField instanceof NestedType) { - return ((NestedType) nestedField).path; - } - return nestedField.toString(); + if (this.isReverseNested()) { + String reverseNestedPath = this.getParamsAsMap().get("reverse_nested").toString(); + return reverseNestedPath.isEmpty() ? null : reverseNestedPath; } - @Override - public boolean isChildren() { - Map paramsAsMap = this.getParamsAsMap(); - return paramsAsMap.containsKey("children"); + // Fix bug: NestedType.toString() isn't implemented which won't return desired nested path + Object nestedField = getParamsAsMap().get("nested"); + if (nestedField instanceof NestedType) { + return ((NestedType) nestedField).path; } - - @Override - public String getChildType() { - if (!this.isChildren()) { - return null; - } - - return this.getParamsAsMap().get("children").toString(); + return nestedField.toString(); + } + + @Override + public boolean isChildren() { + Map paramsAsMap = this.getParamsAsMap(); + return paramsAsMap.containsKey("children"); + } + + @Override + public String getChildType() { + if (!this.isChildren()) { + return null; } - @Override - public boolean isScriptField() { - return "script".equals(getName()); - } + return this.getParamsAsMap().get("children").toString(); + } + + @Override + public boolean isScriptField() { + return "script".equals(getName()); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Order.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Order.java index 2a9be3ce91..f593d6c428 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Order.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Order.java @@ -3,56 +3,53 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; /** - * - * * @author ansj */ public class Order { - private String nestedPath; - private String name; - private String type; - private Field sortField; - - public boolean isScript() { - return sortField != null && sortField.isScriptField(); - } - - public Order(String nestedPath, String name, String type, Field sortField) { - this.nestedPath = nestedPath; - this.name = name; - this.type = type; - this.sortField = sortField; - } - - public String getNestedPath() { - return nestedPath; - } - - public void setNestedPath(String nestedPath) { - this.nestedPath = nestedPath; - } - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public String getType() { - return type; - } - - public void setType(String type) { - this.type = type; - } - - public Field getSortField() { - return sortField; - } + private String nestedPath; + private String name; + private String type; + private Field sortField; + + public boolean isScript() { + return sortField != null && sortField.isScriptField(); + } + + public Order(String nestedPath, String name, String type, Field sortField) { + this.nestedPath = nestedPath; + this.name = name; + this.type = type; + this.sortField = sortField; + } + + public String getNestedPath() { + return nestedPath; + } + + public void setNestedPath(String nestedPath) { + this.nestedPath = nestedPath; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public Field getSortField() { + return sortField; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Paramer.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Paramer.java index 6cdf0148a8..38ca556199 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Paramer.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Paramer.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; import com.alibaba.druid.sql.ast.SQLExpr; @@ -25,163 +24,164 @@ import org.opensearch.sql.legacy.exception.SqlParseException; import org.opensearch.sql.legacy.utils.Util; - public class Paramer { - public String analysis; - public Float boost; - public String value; - public Integer slop; - - public Map fieldsBoosts = new HashMap<>(); - public String type; - public Float tieBreaker; - public Operator operator; - - public String default_field; - - public static Paramer parseParamer(SQLMethodInvokeExpr method) throws SqlParseException { - Paramer instance = new Paramer(); - List parameters = method.getParameters(); - for (SQLExpr expr : parameters) { - if (expr instanceof SQLCharExpr) { - if (instance.value == null) { - instance.value = ((SQLCharExpr) expr).getText(); - } else { - instance.analysis = ((SQLCharExpr) expr).getText(); - } - } else if (expr instanceof SQLNumericLiteralExpr) { - instance.boost = ((SQLNumericLiteralExpr) expr).getNumber().floatValue(); - } else if (expr instanceof SQLBinaryOpExpr) { - SQLBinaryOpExpr sqlExpr = (SQLBinaryOpExpr) expr; - switch (Util.expr2Object(sqlExpr.getLeft()).toString()) { - case "query": - instance.value = Util.expr2Object(sqlExpr.getRight()).toString(); - break; - case "analyzer": - instance.analysis = Util.expr2Object(sqlExpr.getRight()).toString(); - break; - case "boost": - instance.boost = Float.parseFloat(Util.expr2Object(sqlExpr.getRight()).toString()); - break; - case "slop": - instance.slop = Integer.parseInt(Util.expr2Object(sqlExpr.getRight()).toString()); - break; - - case "fields": - int index; - for (String f : Strings.splitStringByCommaToArray( - Util.expr2Object(sqlExpr.getRight()).toString())) { - index = f.lastIndexOf('^'); - if (-1 < index) { - instance.fieldsBoosts.put(f.substring(0, index), - Float.parseFloat(f.substring(index + 1))); - } else { - instance.fieldsBoosts.put(f, 1.0F); - } - } - break; - case "type": - instance.type = Util.expr2Object(sqlExpr.getRight()).toString(); - break; - case "tie_breaker": - instance.tieBreaker = Float.parseFloat(Util.expr2Object(sqlExpr.getRight()).toString()); - break; - case "operator": - instance.operator = Operator.fromString(Util.expr2Object(sqlExpr.getRight()).toString()); - break; - - case "default_field": - instance.default_field = Util.expr2Object(sqlExpr.getRight()).toString(); - break; - - default: - break; - } + public String analysis; + public Float boost; + public String value; + public Integer slop; + + public Map fieldsBoosts = new HashMap<>(); + public String type; + public Float tieBreaker; + public Operator operator; + + public String default_field; + + public static Paramer parseParamer(SQLMethodInvokeExpr method) throws SqlParseException { + Paramer instance = new Paramer(); + List parameters = method.getParameters(); + for (SQLExpr expr : parameters) { + if (expr instanceof SQLCharExpr) { + if (instance.value == null) { + instance.value = ((SQLCharExpr) expr).getText(); + } else { + instance.analysis = ((SQLCharExpr) expr).getText(); + } + } else if (expr instanceof SQLNumericLiteralExpr) { + instance.boost = ((SQLNumericLiteralExpr) expr).getNumber().floatValue(); + } else if (expr instanceof SQLBinaryOpExpr) { + SQLBinaryOpExpr sqlExpr = (SQLBinaryOpExpr) expr; + switch (Util.expr2Object(sqlExpr.getLeft()).toString()) { + case "query": + instance.value = Util.expr2Object(sqlExpr.getRight()).toString(); + break; + case "analyzer": + instance.analysis = Util.expr2Object(sqlExpr.getRight()).toString(); + break; + case "boost": + instance.boost = Float.parseFloat(Util.expr2Object(sqlExpr.getRight()).toString()); + break; + case "slop": + instance.slop = Integer.parseInt(Util.expr2Object(sqlExpr.getRight()).toString()); + break; + + case "fields": + int index; + for (String f : + Strings.splitStringByCommaToArray( + Util.expr2Object(sqlExpr.getRight()).toString())) { + index = f.lastIndexOf('^'); + if (-1 < index) { + instance.fieldsBoosts.put( + f.substring(0, index), Float.parseFloat(f.substring(index + 1))); + } else { + instance.fieldsBoosts.put(f, 1.0F); + } } - } - - return instance; + break; + case "type": + instance.type = Util.expr2Object(sqlExpr.getRight()).toString(); + break; + case "tie_breaker": + instance.tieBreaker = Float.parseFloat(Util.expr2Object(sqlExpr.getRight()).toString()); + break; + case "operator": + instance.operator = + Operator.fromString(Util.expr2Object(sqlExpr.getRight()).toString()); + break; + + case "default_field": + instance.default_field = Util.expr2Object(sqlExpr.getRight()).toString(); + break; + + default: + break; + } + } } - public static ToXContent fullParamer(MatchPhraseQueryBuilder query, Paramer paramer) { - if (paramer.analysis != null) { - query.analyzer(paramer.analysis); - } + return instance; + } - if (paramer.boost != null) { - query.boost(paramer.boost); - } + public static ToXContent fullParamer(MatchPhraseQueryBuilder query, Paramer paramer) { + if (paramer.analysis != null) { + query.analyzer(paramer.analysis); + } - if (paramer.slop != null) { - query.slop(paramer.slop); - } + if (paramer.boost != null) { + query.boost(paramer.boost); + } - return query; + if (paramer.slop != null) { + query.slop(paramer.slop); } - public static ToXContent fullParamer(MatchQueryBuilder query, Paramer paramer) { - if (paramer.analysis != null) { - query.analyzer(paramer.analysis); - } + return query; + } - if (paramer.boost != null) { - query.boost(paramer.boost); - } - return query; + public static ToXContent fullParamer(MatchQueryBuilder query, Paramer paramer) { + if (paramer.analysis != null) { + query.analyzer(paramer.analysis); } - public static ToXContent fullParamer(WildcardQueryBuilder query, Paramer paramer) { - if (paramer.boost != null) { - query.boost(paramer.boost); - } - return query; + if (paramer.boost != null) { + query.boost(paramer.boost); } + return query; + } - public static ToXContent fullParamer(QueryStringQueryBuilder query, Paramer paramer) { - if (paramer.analysis != null) { - query.analyzer(paramer.analysis); - } + public static ToXContent fullParamer(WildcardQueryBuilder query, Paramer paramer) { + if (paramer.boost != null) { + query.boost(paramer.boost); + } + return query; + } - if (paramer.boost != null) { - query.boost(paramer.boost); - } + public static ToXContent fullParamer(QueryStringQueryBuilder query, Paramer paramer) { + if (paramer.analysis != null) { + query.analyzer(paramer.analysis); + } - if (paramer.slop != null) { - query.phraseSlop(paramer.slop); - } + if (paramer.boost != null) { + query.boost(paramer.boost); + } - if (paramer.default_field != null) { - query.defaultField(paramer.default_field); - } + if (paramer.slop != null) { + query.phraseSlop(paramer.slop); + } - return query; + if (paramer.default_field != null) { + query.defaultField(paramer.default_field); } - public static ToXContent fullParamer(MultiMatchQueryBuilder query, Paramer paramer) { - if (paramer.analysis != null) { - query.analyzer(paramer.analysis); - } + return query; + } - if (paramer.boost != null) { - query.boost(paramer.boost); - } + public static ToXContent fullParamer(MultiMatchQueryBuilder query, Paramer paramer) { + if (paramer.analysis != null) { + query.analyzer(paramer.analysis); + } - if (paramer.slop != null) { - query.slop(paramer.slop); - } + if (paramer.boost != null) { + query.boost(paramer.boost); + } - if (paramer.type != null) { - query.type(paramer.type); - } + if (paramer.slop != null) { + query.slop(paramer.slop); + } - if (paramer.tieBreaker != null) { - query.tieBreaker(paramer.tieBreaker); - } + if (paramer.type != null) { + query.type(paramer.type); + } - if (paramer.operator != null) { - query.operator(paramer.operator); - } + if (paramer.tieBreaker != null) { + query.tieBreaker(paramer.tieBreaker); + } - return query; + if (paramer.operator != null) { + query.operator(paramer.operator); } + + return query; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Query.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Query.java index b0538591b8..6f891e7fc5 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Query.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Query.java @@ -3,45 +3,39 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; import java.util.ArrayList; import java.util.List; -/** - * Represents abstract query. every query - * has indexes, types, and where clause. - */ +/** Represents abstract query. every query has indexes, types, and where clause. */ public abstract class Query implements QueryStatement { - private Where where = null; - private List from = new ArrayList<>(); - - - public Where getWhere() { - return this.where; - } - - public void setWhere(Where where) { - this.where = where; - } - - public List getFrom() { - return from; - } - - - /** - * Get the indexes the query refer to. - * - * @return list of strings, the indexes names - */ - public String[] getIndexArr() { - String[] indexArr = new String[this.from.size()]; - for (int i = 0; i < indexArr.length; i++) { - indexArr[i] = this.from.get(i).getIndex(); - } - return indexArr; + private Where where = null; + private List from = new ArrayList<>(); + + public Where getWhere() { + return this.where; + } + + public void setWhere(Where where) { + this.where = where; + } + + public List getFrom() { + return from; + } + + /** + * Get the indexes the query refer to. + * + * @return list of strings, the indexes names + */ + public String[] getIndexArr() { + String[] indexArr = new String[this.from.size()]; + for (int i = 0; i < indexArr.length; i++) { + indexArr[i] = this.from.get(i).getIndex(); } + return indexArr; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/QueryActionRequest.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/QueryActionRequest.java index f13e053d92..f536e3ad6f 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/QueryActionRequest.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/QueryActionRequest.java @@ -3,20 +3,17 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; import lombok.Getter; import lombok.RequiredArgsConstructor; import org.opensearch.sql.legacy.executor.Format; -/** - * The definition of QueryActionRequest. - */ +/** The definition of QueryActionRequest. */ @Getter @RequiredArgsConstructor public class QueryActionRequest { - private final String sql; - private final ColumnTypeProvider typeProvider; - private final Format format; + private final String sql; + private final ColumnTypeProvider typeProvider; + private final Format format; } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/QueryStatement.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/QueryStatement.java index 26c0b07517..71fe64906a 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/QueryStatement.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/QueryStatement.java @@ -3,11 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; -/** - * Identifier interface used to encompass Query and IndexStatements - */ -public interface QueryStatement { -} +/** Identifier interface used to encompass Query and IndexStatements */ +public interface QueryStatement {} diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/ScriptMethodField.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/ScriptMethodField.java index bdc42b4ff3..bb4d17d897 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/ScriptMethodField.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/ScriptMethodField.java @@ -3,29 +3,27 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; import com.alibaba.druid.sql.ast.expr.SQLAggregateOption; import java.util.List; -/** - * Stores information about function name for script fields - */ +/** Stores information about function name for script fields */ public class ScriptMethodField extends MethodField { - private final String functionName; + private final String functionName; - public ScriptMethodField(String functionName, List params, SQLAggregateOption option, String alias) { - super("script", params, option, alias); - this.functionName = functionName; - } + public ScriptMethodField( + String functionName, List params, SQLAggregateOption option, String alias) { + super("script", params, option, alias); + this.functionName = functionName; + } - public String getFunctionName() { - return functionName; - } + public String getFunctionName() { + return functionName; + } - @Override - public boolean isScriptField() { - return true; - } + @Override + public boolean isScriptField() { + return true; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/SearchResult.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/SearchResult.java index 5b7b73a910..e951c84961 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/SearchResult.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/SearchResult.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; import java.util.ArrayList; @@ -29,128 +28,120 @@ import org.opensearch.sql.legacy.exception.SqlParseException; public class SearchResult { - /** - * - */ - private List> results; - - private long total; - - double maxScore = 0; - - public SearchResult(SearchResponse resp) { - SearchHits hits = resp.getHits(); - this.total = Optional.ofNullable(hits.getTotalHits()).map(totalHits -> totalHits.value).orElse(0L); - results = new ArrayList<>(hits.getHits().length); - for (SearchHit searchHit : hits.getHits()) { - if (searchHit.getSourceAsMap() != null) { - results.add(searchHit.getSourceAsMap()); - } else if (searchHit.getFields() != null) { - Map fields = searchHit.getFields(); - results.add(toFieldsMap(fields)); - } - - } + /** */ + private List> results; + + private long total; + + double maxScore = 0; + + public SearchResult(SearchResponse resp) { + SearchHits hits = resp.getHits(); + this.total = + Optional.ofNullable(hits.getTotalHits()).map(totalHits -> totalHits.value).orElse(0L); + results = new ArrayList<>(hits.getHits().length); + for (SearchHit searchHit : hits.getHits()) { + if (searchHit.getSourceAsMap() != null) { + results.add(searchHit.getSourceAsMap()); + } else if (searchHit.getFields() != null) { + Map fields = searchHit.getFields(); + results.add(toFieldsMap(fields)); + } } + } - public SearchResult(SearchResponse resp, Select select) throws SqlParseException { - Aggregations aggs = resp.getAggregations(); - if (aggs.get("filter") != null) { - InternalFilter inf = aggs.get("filter"); - aggs = inf.getAggregations(); - } - if (aggs.get("group by") != null) { - InternalTerms terms = aggs.get("group by"); - Collection buckets = terms.getBuckets(); - this.total = buckets.size(); - results = new ArrayList<>(buckets.size()); - for (Bucket bucket : buckets) { - Map aggsMap = toAggsMap(bucket.getAggregations().getAsMap()); - aggsMap.put("docCount", bucket.getDocCount()); - results.add(aggsMap); - } - } else { - results = new ArrayList<>(1); - this.total = 1; - Map map = new HashMap<>(); - for (Aggregation aggregation : aggs) { - map.put(aggregation.getName(), covenValue(aggregation)); - } - results.add(map); - } - + public SearchResult(SearchResponse resp, Select select) throws SqlParseException { + Aggregations aggs = resp.getAggregations(); + if (aggs.get("filter") != null) { + InternalFilter inf = aggs.get("filter"); + aggs = inf.getAggregations(); } - - /** - * - * - * @param fields - * @return - */ - private Map toFieldsMap(Map fields) { - Map result = new HashMap<>(); - for (Entry entry : fields.entrySet()) { - if (entry.getValue().getValues().size() > 1) { - result.put(entry.getKey(), entry.getValue().getValues()); - } else { - result.put(entry.getKey(), entry.getValue().getValue()); - } - - } - return result; + if (aggs.get("group by") != null) { + InternalTerms terms = aggs.get("group by"); + Collection buckets = terms.getBuckets(); + this.total = buckets.size(); + results = new ArrayList<>(buckets.size()); + for (Bucket bucket : buckets) { + Map aggsMap = toAggsMap(bucket.getAggregations().getAsMap()); + aggsMap.put("docCount", bucket.getDocCount()); + results.add(aggsMap); + } + } else { + results = new ArrayList<>(1); + this.total = 1; + Map map = new HashMap<>(); + for (Aggregation aggregation : aggs) { + map.put(aggregation.getName(), covenValue(aggregation)); + } + results.add(map); } - - /** - * - * - * @param fields - * @return - * @throws SqlParseException - */ - private Map toAggsMap(Map fields) throws SqlParseException { - Map result = new HashMap<>(); - for (Entry entry : fields.entrySet()) { - result.put(entry.getKey(), covenValue(entry.getValue())); - } - return result; + } + + /** + * @param fields + * @return + */ + private Map toFieldsMap(Map fields) { + Map result = new HashMap<>(); + for (Entry entry : fields.entrySet()) { + if (entry.getValue().getValues().size() > 1) { + result.put(entry.getKey(), entry.getValue().getValues()); + } else { + result.put(entry.getKey(), entry.getValue().getValue()); + } } - - private Object covenValue(Aggregation value) throws SqlParseException { - if (value instanceof InternalNumericMetricsAggregation.SingleValue) { - return ((InternalNumericMetricsAggregation.SingleValue) value).value(); - } else if (value instanceof InternalValueCount) { - return ((InternalValueCount) value).getValue(); - } else if (value instanceof InternalTopHits) { - return (value); - } else if (value instanceof LongTerms) { - return value; - } else { - throw new SqlParseException("Unknown aggregation value type: " + value.getClass().getSimpleName()); - } + return result; + } + + /** + * @param fields + * @return + * @throws SqlParseException + */ + private Map toAggsMap(Map fields) throws SqlParseException { + Map result = new HashMap<>(); + for (Entry entry : fields.entrySet()) { + result.put(entry.getKey(), covenValue(entry.getValue())); } - - public List> getResults() { - return results; + return result; + } + + private Object covenValue(Aggregation value) throws SqlParseException { + if (value instanceof InternalNumericMetricsAggregation.SingleValue) { + return ((InternalNumericMetricsAggregation.SingleValue) value).value(); + } else if (value instanceof InternalValueCount) { + return ((InternalValueCount) value).getValue(); + } else if (value instanceof InternalTopHits) { + return (value); + } else if (value instanceof LongTerms) { + return value; + } else { + throw new SqlParseException( + "Unknown aggregation value type: " + value.getClass().getSimpleName()); } + } - public void setResults(List> results) { - this.results = results; - } + public List> getResults() { + return results; + } - public long getTotal() { - return total; - } + public void setResults(List> results) { + this.results = results; + } - public void setTotal(long total) { - this.total = total; - } + public long getTotal() { + return total; + } - public double getMaxScore() { - return maxScore; - } + public void setTotal(long total) { + this.total = total; + } - public void setMaxScore(double maxScore) { - this.maxScore = maxScore; - } + public double getMaxScore() { + return maxScore; + } + public void setMaxScore(double maxScore) { + this.maxScore = maxScore; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Select.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Select.java index cd600d856e..2faa8cc6e5 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Select.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Select.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; import static com.alibaba.druid.sql.ast.statement.SQLJoinTableSource.JoinType; @@ -16,7 +15,6 @@ import org.opensearch.sql.legacy.domain.hints.Hint; import org.opensearch.sql.legacy.parser.SubQueryExpression; - /** * sql select * @@ -24,167 +22,169 @@ */ public class Select extends Query { - /** - * Using this functions will cause query to execute as aggregation. - */ - private static final Set AGGREGATE_FUNCTIONS = - ImmutableSet.of( - "SUM", "MAX", "MIN", "AVG", - "TOPHITS", "COUNT", "STATS", "EXTENDED_STATS", - "PERCENTILES", "SCRIPTED_METRIC" - ); - - private List hints = new ArrayList<>(); - private List fields = new ArrayList<>(); - private List> groupBys = new ArrayList<>(); - private Having having; - private List orderBys = new ArrayList<>(); - private int offset; - private Integer rowCount; - private boolean containsSubQueries; - private List subQueries; - private boolean selectAll = false; - private JoinType nestedJoinType = JoinType.COMMA; - - public boolean isQuery = false; - public boolean isAggregate = false; - - public static final int DEFAULT_LIMIT = 200; - - public Select() { - } - - public List getFields() { - return fields; - } - - public void setOffset(int offset) { - this.offset = offset; - } - - public void setRowCount(Integer rowCount) { - this.rowCount = rowCount; - } - - public void addGroupBy(Field field) { - List wrapper = new ArrayList<>(); - wrapper.add(field); - addGroupBy(wrapper); - } - - public void addGroupBy(List fields) { - isAggregate = true; - selectAll = false; - this.groupBys.add(fields); - } + /** Using this functions will cause query to execute as aggregation. */ + private static final Set AGGREGATE_FUNCTIONS = + ImmutableSet.of( + "SUM", + "MAX", + "MIN", + "AVG", + "TOPHITS", + "COUNT", + "STATS", + "EXTENDED_STATS", + "PERCENTILES", + "SCRIPTED_METRIC"); + + private List hints = new ArrayList<>(); + private List fields = new ArrayList<>(); + private List> groupBys = new ArrayList<>(); + private Having having; + private List orderBys = new ArrayList<>(); + private int offset; + private Integer rowCount; + private boolean containsSubQueries; + private List subQueries; + private boolean selectAll = false; + private JoinType nestedJoinType = JoinType.COMMA; + + public boolean isQuery = false; + public boolean isAggregate = false; + + public static final int DEFAULT_LIMIT = 200; + + public Select() {} + + public List getFields() { + return fields; + } + + public void setOffset(int offset) { + this.offset = offset; + } + + public void setRowCount(Integer rowCount) { + this.rowCount = rowCount; + } + + public void addGroupBy(Field field) { + List wrapper = new ArrayList<>(); + wrapper.add(field); + addGroupBy(wrapper); + } + + public void addGroupBy(List fields) { + isAggregate = true; + selectAll = false; + this.groupBys.add(fields); + } + + public List> getGroupBys() { + return groupBys; + } + + public Having getHaving() { + return having; + } + + public void setHaving(Having having) { + this.having = having; + } + + public List getOrderBys() { + return orderBys; + } + + public int getOffset() { + return offset; + } + + public Integer getRowCount() { + return rowCount; + } + + public void addOrderBy(String nestedPath, String name, String type, Field field) { + if ("_score".equals(name)) { + isQuery = true; + } + this.orderBys.add(new Order(nestedPath, name, type, field)); + } + + public void addField(Field field) { + if (field == null) { + return; + } + if (field == STAR && !isAggregate) { + // Ignore GROUP BY since columns present in result are decided by column list in GROUP BY + this.selectAll = true; + return; + } + + if (field instanceof MethodField + && AGGREGATE_FUNCTIONS.contains(field.getName().toUpperCase())) { + isAggregate = true; + } + + fields.add(field); + } + + public List getHints() { + return hints; + } + + public JoinType getNestedJoinType() { + return nestedJoinType; + } + + public void setNestedJoinType(JoinType nestedJoinType) { + this.nestedJoinType = nestedJoinType; + } + + public void fillSubQueries() { + subQueries = new ArrayList<>(); + Where where = this.getWhere(); + fillSubQueriesFromWhereRecursive(where); + } - public List> getGroupBys() { - return groupBys; + private void fillSubQueriesFromWhereRecursive(Where where) { + if (where == null) { + return; } - - public Having getHaving() { - return having; - } - - public void setHaving(Having having) { - this.having = having; - } - - public List getOrderBys() { - return orderBys; - } - - public int getOffset() { - return offset; - } - - public Integer getRowCount() { - return rowCount; - } - - public void addOrderBy(String nestedPath, String name, String type, Field field) { - if ("_score".equals(name)) { - isQuery = true; + if (where instanceof Condition) { + Condition condition = (Condition) where; + if (condition.getValue() instanceof SubQueryExpression) { + this.subQueries.add((SubQueryExpression) condition.getValue()); + this.containsSubQueries = true; + } + if (condition.getValue() instanceof Object[]) { + + for (Object o : (Object[]) condition.getValue()) { + if (o instanceof SubQueryExpression) { + this.subQueries.add((SubQueryExpression) o); + this.containsSubQueries = true; + } } - this.orderBys.add(new Order(nestedPath, name, type, field)); + } + } else { + for (Where innerWhere : where.getWheres()) { + fillSubQueriesFromWhereRecursive(innerWhere); + } } + } - public void addField(Field field) { - if (field == null) { - return; - } - if (field == STAR && !isAggregate) { - // Ignore GROUP BY since columns present in result are decided by column list in GROUP BY - this.selectAll = true; - return; - } - - if (field instanceof MethodField && AGGREGATE_FUNCTIONS.contains(field.getName().toUpperCase())) { - isAggregate = true; - } - - fields.add(field); - } - - public List getHints() { - return hints; - } - - - public JoinType getNestedJoinType() { - return nestedJoinType; - } - - public void setNestedJoinType(JoinType nestedJoinType) { - this.nestedJoinType = nestedJoinType; - } + public boolean containsSubQueries() { + return containsSubQueries; + } + public List getSubQueries() { + return subQueries; + } - public void fillSubQueries() { - subQueries = new ArrayList<>(); - Where where = this.getWhere(); - fillSubQueriesFromWhereRecursive(where); - } - - private void fillSubQueriesFromWhereRecursive(Where where) { - if (where == null) { - return; - } - if (where instanceof Condition) { - Condition condition = (Condition) where; - if (condition.getValue() instanceof SubQueryExpression) { - this.subQueries.add((SubQueryExpression) condition.getValue()); - this.containsSubQueries = true; - } - if (condition.getValue() instanceof Object[]) { - - for (Object o : (Object[]) condition.getValue()) { - if (o instanceof SubQueryExpression) { - this.subQueries.add((SubQueryExpression) o); - this.containsSubQueries = true; - } - } - } - } else { - for (Where innerWhere : where.getWheres()) { - fillSubQueriesFromWhereRecursive(innerWhere); - } - } - } - - public boolean containsSubQueries() { - return containsSubQueries; - } - - public List getSubQueries() { - return subQueries; - } + public boolean isOrderdSelect() { + return this.getOrderBys() != null && this.getOrderBys().size() > 0; + } - public boolean isOrderdSelect() { - return this.getOrderBys() != null && this.getOrderBys().size() > 0; - } - - public boolean isSelectAll() { - return selectAll; - } + public boolean isSelectAll() { + return selectAll; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/TableOnJoinSelect.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/TableOnJoinSelect.java index cf27cb51ee..e0dcb2899f 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/TableOnJoinSelect.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/TableOnJoinSelect.java @@ -3,45 +3,40 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; import java.util.List; -/** - * Created by Eliran on 28/8/2015. - */ +/** Created by Eliran on 28/8/2015. */ public class TableOnJoinSelect extends Select { - private List connectedFields; - private List selectedFields; - private String alias; - - public TableOnJoinSelect() { - } + private List connectedFields; + private List selectedFields; + private String alias; + public TableOnJoinSelect() {} - public List getConnectedFields() { - return connectedFields; - } + public List getConnectedFields() { + return connectedFields; + } - public void setConnectedFields(List connectedFields) { - this.connectedFields = connectedFields; - } + public void setConnectedFields(List connectedFields) { + this.connectedFields = connectedFields; + } - public List getSelectedFields() { - return selectedFields; - } + public List getSelectedFields() { + return selectedFields; + } - public void setSelectedFields(List selectedFields) { - this.selectedFields = selectedFields; - } + public void setSelectedFields(List selectedFields) { + this.selectedFields = selectedFields; + } - public String getAlias() { - return alias; - } + public String getAlias() { + return alias; + } - public void setAlias(String alias) { - this.alias = alias; - } + public void setAlias(String alias) { + this.alias = alias; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Where.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Where.java index ae05e33e51..d6f767203b 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/Where.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/Where.java @@ -3,70 +3,69 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain; import java.util.LinkedList; public class Where implements Cloneable { - public enum CONN { - AND, OR; + public enum CONN { + AND, + OR; - public CONN negative() { - return this == AND ? OR : AND; - } + public CONN negative() { + return this == AND ? OR : AND; } + } - public static Where newInstance() { - return new Where(CONN.AND); - } + public static Where newInstance() { + return new Where(CONN.AND); + } - private LinkedList wheres = new LinkedList<>(); + private LinkedList wheres = new LinkedList<>(); - protected CONN conn; + protected CONN conn; - public Where(String connStr) { - this.conn = CONN.valueOf(connStr.toUpperCase()); - } + public Where(String connStr) { + this.conn = CONN.valueOf(connStr.toUpperCase()); + } - public Where(CONN conn) { - this.conn = conn; - } + public Where(CONN conn) { + this.conn = conn; + } - public void addWhere(Where where) { - wheres.add(where); - } + public void addWhere(Where where) { + wheres.add(where); + } - public CONN getConn() { - return this.conn; - } - - public void setConn(CONN conn) { - this.conn = conn; - } + public CONN getConn() { + return this.conn; + } - public LinkedList getWheres() { - return wheres; - } + public void setConn(CONN conn) { + this.conn = conn; + } - @Override - public String toString() { - if (wheres.size() > 0) { - String whereStr = wheres.toString(); - return this.conn + " ( " + whereStr.substring(1, whereStr.length() - 1) + " ) "; - } else { - return ""; - } + public LinkedList getWheres() { + return wheres; + } + @Override + public String toString() { + if (wheres.size() > 0) { + String whereStr = wheres.toString(); + return this.conn + " ( " + whereStr.substring(1, whereStr.length() - 1) + " ) "; + } else { + return ""; } + } - @Override - public Object clone() throws CloneNotSupportedException { - Where clonedWhere = new Where(this.getConn()); - for (Where innerWhere : this.getWheres()) { - clonedWhere.addWhere((Where) innerWhere.clone()); - } - return clonedWhere; + @Override + public Object clone() throws CloneNotSupportedException { + Where clonedWhere = new Where(this.getConn()); + for (Where innerWhere : this.getWheres()) { + clonedWhere.addWhere((Where) innerWhere.clone()); } + return clonedWhere; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/bucketpath/BucketPath.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/bucketpath/BucketPath.java index 996caae5e2..ee19c08725 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/bucketpath/BucketPath.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/bucketpath/BucketPath.java @@ -3,39 +3,35 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain.bucketpath; import java.util.ArrayDeque; import java.util.Deque; /** - * The bucket path syntax - * [ , ]* [ , ] + * The bucket path syntax [ , ]* [ , + * ] * - * https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline.html#buckets-path-syntax + *

https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-pipeline.html#buckets-path-syntax */ public class BucketPath { - private Deque pathStack = new ArrayDeque<>(); + private Deque pathStack = new ArrayDeque<>(); - public BucketPath add(Path path) { - if (pathStack.isEmpty()) { - assert path.isMetricPath() : "The last path in the bucket path must be Metric"; - } else { - assert path.isAggPath() : "All the other path in the bucket path must be Agg"; - } - pathStack.push(path); - return this; + public BucketPath add(Path path) { + if (pathStack.isEmpty()) { + assert path.isMetricPath() : "The last path in the bucket path must be Metric"; + } else { + assert path.isAggPath() : "All the other path in the bucket path must be Agg"; } + pathStack.push(path); + return this; + } - /** - * Return the bucket path. - * Return "", if there is no agg or metric available - */ - public String getBucketPath() { - String bucketPath = pathStack.isEmpty() ? "" : pathStack.pop().getPath(); - return pathStack.stream() - .map(path -> path.getSeparator() + path.getPath()) - .reduce(bucketPath, String::concat); - } + /** Return the bucket path. Return "", if there is no agg or metric available */ + public String getBucketPath() { + String bucketPath = pathStack.isEmpty() ? "" : pathStack.pop().getPath(); + return pathStack.stream() + .map(path -> path.getSeparator() + path.getPath()) + .reduce(bucketPath, String::concat); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/bucketpath/Path.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/bucketpath/Path.java index d5c897cf90..4827e0e61c 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/bucketpath/Path.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/bucketpath/Path.java @@ -3,49 +3,49 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain.bucketpath; public class Path { - private final String path; - private final String separator; - private final PathType type; - - private Path(String path, String separator, PathType type) { - this.path = path; - this.separator = separator; - this.type = type; - } - - public String getPath() { - return path; - } - - public String getSeparator() { - return separator; - } - - public PathType getType() { - return type; - } - - public boolean isMetricPath() { - return type == PathType.METRIC; - } - - public boolean isAggPath() { - return type == PathType.AGG; - } - - public static Path getAggPath(String path) { - return new Path(path, ">", PathType.AGG); - } - - public static Path getMetricPath(String path) { - return new Path(path, ".", PathType.METRIC); - } - - public enum PathType { - AGG, METRIC - } + private final String path; + private final String separator; + private final PathType type; + + private Path(String path, String separator, PathType type) { + this.path = path; + this.separator = separator; + this.type = type; + } + + public String getPath() { + return path; + } + + public String getSeparator() { + return separator; + } + + public PathType getType() { + return type; + } + + public boolean isMetricPath() { + return type == PathType.METRIC; + } + + public boolean isAggPath() { + return type == PathType.AGG; + } + + public static Path getAggPath(String path) { + return new Path(path, ">", PathType.AGG); + } + + public static Path getMetricPath(String path) { + return new Path(path, ".", PathType.METRIC); + } + + public enum PathType { + AGG, + METRIC + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/hints/Hint.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/hints/Hint.java index 8a5c174c41..b83c63aae1 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/hints/Hint.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/hints/Hint.java @@ -3,26 +3,23 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain.hints; -/** - * Created by Eliran on 5/9/2015. - */ +/** Created by Eliran on 5/9/2015. */ public class Hint { - private HintType type; - private Object[] params; + private HintType type; + private Object[] params; - public Hint(HintType type, Object[] params) { - this.type = type; - this.params = params; - } + public Hint(HintType type, Object[] params) { + this.type = type; + this.params = params; + } - public HintType getType() { - return type; - } + public HintType getType() { + return type; + } - public Object[] getParams() { - return params; - } + public Object[] getParams() { + return params; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/hints/HintFactory.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/hints/HintFactory.java index 18c68d57ab..ec258e0fbc 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/hints/HintFactory.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/hints/HintFactory.java @@ -3,10 +3,8 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain.hints; - import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; import com.fasterxml.jackson.dataformat.yaml.YAMLParser; import java.io.IOException; @@ -18,211 +16,217 @@ import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.sql.legacy.exception.SqlParseException; -/** - * Created by Eliran on 5/9/2015. - */ +/** Created by Eliran on 5/9/2015. */ public class HintFactory { - private static final String PREFIX = "! "; + private static final String PREFIX = "! "; - public static Hint getHintFromString(String hintAsString) throws SqlParseException { - if (hintAsString.startsWith("! USE_NESTED_LOOPS") || hintAsString.startsWith("! USE_NL")) { - return new Hint(HintType.USE_NESTED_LOOPS, null); - } - - if (hintAsString.startsWith("! SHARD_SIZE")) { - String[] numbers = getParamsFromHint(hintAsString, "! SHARD_SIZE"); - //todo: check if numbers etc.. - List params = new ArrayList<>(); - for (String number : numbers) { - if (number.equals("null") || number.equals("infinity")) { - params.add(null); - } else { - params.add(Integer.parseInt(number)); - } - } - return new Hint(HintType.SHARD_SIZE, params.toArray()); - } - - if (hintAsString.equals("! HASH_WITH_TERMS_FILTER")) { - return new Hint(HintType.HASH_WITH_TERMS_FILTER, null); - } - if (hintAsString.startsWith("! JOIN_TABLES_LIMIT")) { - String[] numbers = getParamsFromHint(hintAsString, "! JOIN_TABLES_LIMIT"); - //todo: check if numbers etc.. - List params = new ArrayList<>(); - for (String number : numbers) { - if (number.equals("null") || number.equals("infinity")) { - params.add(null); - } else { - params.add(Integer.parseInt(number)); - } - } - - return new Hint(HintType.JOIN_LIMIT, params.toArray()); - } - if (hintAsString.startsWith("! NL_MULTISEARCH_SIZE")) { - String[] number = getParamsFromHint(hintAsString, "! NL_MULTISEARCH_SIZE"); - //todo: check if numbers etc.. - int multiSearchSize = Integer.parseInt(number[0]); - return new Hint(HintType.NL_MULTISEARCH_SIZE, new Object[]{multiSearchSize}); - } - if (hintAsString.startsWith("! USE_SCROLL")) { - String[] scrollParams = getParamsFromHint(hintAsString, "! USE_SCROLL"); - if (scrollParams != null && scrollParams.length == 2) { - String param = scrollParams[0]; - return new Hint(HintType.USE_SCROLL, - new Object[]{ - (param.startsWith("\"") && param.endsWith("\"")) - || (param.startsWith("'") && param.endsWith("'")) - ? param.substring(1, param.length() - 1) : Integer.parseInt(param), - Integer.parseInt(scrollParams[1])}); - } else { - return new Hint(HintType.USE_SCROLL, new Object[]{50, 60000}); - } - } - if (hintAsString.startsWith("! IGNORE_UNAVAILABLE")) { - return new Hint(HintType.IGNORE_UNAVAILABLE, null); - } - if (hintAsString.startsWith("! DOCS_WITH_AGGREGATION")) { - Integer[] params = parseParamsAsInts(hintAsString, "! DOCS_WITH_AGGREGATION"); - return new Hint(HintType.DOCS_WITH_AGGREGATION, params); - } - if (hintAsString.startsWith("! ROUTINGS")) { - String[] routings = getParamsFromHint(hintAsString, "! ROUTINGS"); - return new Hint(HintType.ROUTINGS, routings); - } - if (hintAsString.startsWith("! HIGHLIGHT")) { - String[] heighlights = getParamsFromHint(hintAsString, "! HIGHLIGHT"); - ArrayList hintParams = new ArrayList(); - hintParams.add(heighlights[0]); - if (heighlights.length > 1) { - StringBuilder builder = new StringBuilder(); - for (int i = 1; i < heighlights.length; i++) { - if (i != 1) { - builder.append("\n"); - } - builder.append(heighlights[i]); - } - String heighlightParam = builder.toString(); - YAMLFactory yamlFactory = new YAMLFactory(); - YAMLParser yamlParser = null; - try { - yamlParser = yamlFactory.createParser(heighlightParam.toCharArray()); - YamlXContentParser yamlXContentParser = new YamlXContentParser(NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, yamlParser); - Map map = yamlXContentParser.map(); - hintParams.add(map); - } catch (IOException e) { - throw new SqlParseException("could not parse heighlight hint: " + e.getMessage()); - } - } - return new Hint(HintType.HIGHLIGHT, hintParams.toArray()); - } - if (hintAsString.startsWith("! MINUS_SCROLL_FETCH_AND_RESULT_LIMITS")) { - Integer[] params = parseParamsAsInts(hintAsString, "! MINUS_SCROLL_FETCH_AND_RESULT_LIMITS"); - if (params.length > 3) { - throw new SqlParseException("MINUS_FETCH_AND_RESULT_LIMITS should have 3 int params " - + "(maxFromFirst,maxFromSecond,hitsPerScrollShard)"); - } - Integer[] paramsWithDefaults = new Integer[3]; - int defaultMaxFetchFromTable = 100000; - int defaultFetchOnScroll = 1000; - paramsWithDefaults[0] = defaultMaxFetchFromTable; - paramsWithDefaults[1] = defaultMaxFetchFromTable; - paramsWithDefaults[2] = defaultFetchOnScroll; - for (int i = 0; i < params.length; i++) { - paramsWithDefaults[i] = params[i]; - } - - return new Hint(HintType.MINUS_FETCH_AND_RESULT_LIMITS, paramsWithDefaults); - } - if (hintAsString.startsWith("! MINUS_USE_TERMS_OPTIMIZATION")) { - String[] param = getParamsFromHint(hintAsString, "! MINUS_USE_TERMS_OPTIMIZATION"); - boolean shouldLowerStringOnTerms = false; - if (param != null) { - if (param.length != 1) { - throw new SqlParseException( - "MINUS_USE_TERMS_OPTIMIZATION should have none or one boolean param: false/true "); - } - try { - shouldLowerStringOnTerms = Boolean.parseBoolean(param[0].toLowerCase()); - } catch (Exception e) { - throw new SqlParseException("MINUS_USE_TERMS_OPTIMIZATION should have none or one boolean param: " - + "false/true , got:" + param[0]); - } - } - return new Hint(HintType.MINUS_USE_TERMS_OPTIMIZATION, new Object[]{shouldLowerStringOnTerms}); - } - if (hintAsString.startsWith("! COLLAPSE")) { - String collapse = getParamFromHint(hintAsString, "! COLLAPSE"); - return new Hint(HintType.COLLAPSE, new String[]{collapse}); - } - if (hintAsString.startsWith("! POST_FILTER")) { - String postFilter = getParamFromHint(hintAsString, "! POST_FILTER"); - return new Hint(HintType.POST_FILTER, new String[]{postFilter}); - } - - Hint queryPlanHint = parseHintForQueryPlanner(hintAsString); - if (queryPlanHint != null) { - return queryPlanHint; - } - - return null; - } - - /** - * Parse hints for hash join in new query planning framework. - * Only check syntax error here and leave semantics interpret work for planner. - */ - private static Hint parseHintForQueryPlanner(String hintStr) { - if (hintStr.contains("(") - && (hintStr.startsWith("! JOIN_ALGORITHM_BLOCK_SIZE") - || hintStr.startsWith("! JOIN_SCROLL_PAGE_SIZE") - || hintStr.startsWith("! JOIN_CIRCUIT_BREAK_LIMIT") - || hintStr.startsWith("! JOIN_BACK_OFF_RETRY_INTERVALS") - || hintStr.startsWith("! JOIN_TIME_OUT") - )) { // Note that Trie tree is needed here if many hint options - - String hintName = hintStr.substring(PREFIX.length(), hintStr.indexOf('(')).trim(); - String hintPrefix = PREFIX + hintName; - HintType hintType = HintType.valueOf(hintName); - Integer[] params = parseParamsAsInts(hintStr, hintPrefix); - - if (params != null && params.length > 0) { - return new Hint(hintType, params); - } - } else if (hintStr.startsWith("! JOIN_ALGORITHM_USE_LEGACY")) { - return new Hint(HintType.JOIN_ALGORITHM_USE_LEGACY, new Object[0]); - } - return null; + public static Hint getHintFromString(String hintAsString) throws SqlParseException { + if (hintAsString.startsWith("! USE_NESTED_LOOPS") || hintAsString.startsWith("! USE_NL")) { + return new Hint(HintType.USE_NESTED_LOOPS, null); } - private static String getParamFromHint(String hint, String prefix) { - if (!hint.contains("(")) { - return null; - } - return hint.replace(prefix, "").replaceAll("\\s*\\(\\s*", "").replaceAll("\\s*\\,\\s*", ",") - .replaceAll("\\s*\\)\\s*", ""); + if (hintAsString.startsWith("! SHARD_SIZE")) { + String[] numbers = getParamsFromHint(hintAsString, "! SHARD_SIZE"); + // todo: check if numbers etc.. + List params = new ArrayList<>(); + for (String number : numbers) { + if (number.equals("null") || number.equals("infinity")) { + params.add(null); + } else { + params.add(Integer.parseInt(number)); + } + } + return new Hint(HintType.SHARD_SIZE, params.toArray()); } - private static String[] getParamsFromHint(String hint, String prefix) { - String param = getParamFromHint(hint, prefix); - return param != null ? param.split(",") : null; + if (hintAsString.equals("! HASH_WITH_TERMS_FILTER")) { + return new Hint(HintType.HASH_WITH_TERMS_FILTER, null); + } + if (hintAsString.startsWith("! JOIN_TABLES_LIMIT")) { + String[] numbers = getParamsFromHint(hintAsString, "! JOIN_TABLES_LIMIT"); + // todo: check if numbers etc.. + List params = new ArrayList<>(); + for (String number : numbers) { + if (number.equals("null") || number.equals("infinity")) { + params.add(null); + } else { + params.add(Integer.parseInt(number)); + } + } + + return new Hint(HintType.JOIN_LIMIT, params.toArray()); + } + if (hintAsString.startsWith("! NL_MULTISEARCH_SIZE")) { + String[] number = getParamsFromHint(hintAsString, "! NL_MULTISEARCH_SIZE"); + // todo: check if numbers etc.. + int multiSearchSize = Integer.parseInt(number[0]); + return new Hint(HintType.NL_MULTISEARCH_SIZE, new Object[] {multiSearchSize}); + } + if (hintAsString.startsWith("! USE_SCROLL")) { + String[] scrollParams = getParamsFromHint(hintAsString, "! USE_SCROLL"); + if (scrollParams != null && scrollParams.length == 2) { + String param = scrollParams[0]; + return new Hint( + HintType.USE_SCROLL, + new Object[] { + (param.startsWith("\"") && param.endsWith("\"")) + || (param.startsWith("'") && param.endsWith("'")) + ? param.substring(1, param.length() - 1) + : Integer.parseInt(param), + Integer.parseInt(scrollParams[1]) + }); + } else { + return new Hint(HintType.USE_SCROLL, new Object[] {50, 60000}); + } + } + if (hintAsString.startsWith("! IGNORE_UNAVAILABLE")) { + return new Hint(HintType.IGNORE_UNAVAILABLE, null); + } + if (hintAsString.startsWith("! DOCS_WITH_AGGREGATION")) { + Integer[] params = parseParamsAsInts(hintAsString, "! DOCS_WITH_AGGREGATION"); + return new Hint(HintType.DOCS_WITH_AGGREGATION, params); + } + if (hintAsString.startsWith("! ROUTINGS")) { + String[] routings = getParamsFromHint(hintAsString, "! ROUTINGS"); + return new Hint(HintType.ROUTINGS, routings); + } + if (hintAsString.startsWith("! HIGHLIGHT")) { + String[] heighlights = getParamsFromHint(hintAsString, "! HIGHLIGHT"); + ArrayList hintParams = new ArrayList(); + hintParams.add(heighlights[0]); + if (heighlights.length > 1) { + StringBuilder builder = new StringBuilder(); + for (int i = 1; i < heighlights.length; i++) { + if (i != 1) { + builder.append("\n"); + } + builder.append(heighlights[i]); + } + String heighlightParam = builder.toString(); + YAMLFactory yamlFactory = new YAMLFactory(); + YAMLParser yamlParser = null; + try { + yamlParser = yamlFactory.createParser(heighlightParam.toCharArray()); + YamlXContentParser yamlXContentParser = + new YamlXContentParser( + NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, yamlParser); + Map map = yamlXContentParser.map(); + hintParams.add(map); + } catch (IOException e) { + throw new SqlParseException("could not parse heighlight hint: " + e.getMessage()); + } + } + return new Hint(HintType.HIGHLIGHT, hintParams.toArray()); + } + if (hintAsString.startsWith("! MINUS_SCROLL_FETCH_AND_RESULT_LIMITS")) { + Integer[] params = parseParamsAsInts(hintAsString, "! MINUS_SCROLL_FETCH_AND_RESULT_LIMITS"); + if (params.length > 3) { + throw new SqlParseException( + "MINUS_FETCH_AND_RESULT_LIMITS should have 3 int params " + + "(maxFromFirst,maxFromSecond,hitsPerScrollShard)"); + } + Integer[] paramsWithDefaults = new Integer[3]; + int defaultMaxFetchFromTable = 100000; + int defaultFetchOnScroll = 1000; + paramsWithDefaults[0] = defaultMaxFetchFromTable; + paramsWithDefaults[1] = defaultMaxFetchFromTable; + paramsWithDefaults[2] = defaultFetchOnScroll; + for (int i = 0; i < params.length; i++) { + paramsWithDefaults[i] = params[i]; + } + + return new Hint(HintType.MINUS_FETCH_AND_RESULT_LIMITS, paramsWithDefaults); + } + if (hintAsString.startsWith("! MINUS_USE_TERMS_OPTIMIZATION")) { + String[] param = getParamsFromHint(hintAsString, "! MINUS_USE_TERMS_OPTIMIZATION"); + boolean shouldLowerStringOnTerms = false; + if (param != null) { + if (param.length != 1) { + throw new SqlParseException( + "MINUS_USE_TERMS_OPTIMIZATION should have none or one boolean param: false/true "); + } + try { + shouldLowerStringOnTerms = Boolean.parseBoolean(param[0].toLowerCase()); + } catch (Exception e) { + throw new SqlParseException( + "MINUS_USE_TERMS_OPTIMIZATION should have none or one boolean param: " + + "false/true , got:" + + param[0]); + } + } + return new Hint( + HintType.MINUS_USE_TERMS_OPTIMIZATION, new Object[] {shouldLowerStringOnTerms}); + } + if (hintAsString.startsWith("! COLLAPSE")) { + String collapse = getParamFromHint(hintAsString, "! COLLAPSE"); + return new Hint(HintType.COLLAPSE, new String[] {collapse}); + } + if (hintAsString.startsWith("! POST_FILTER")) { + String postFilter = getParamFromHint(hintAsString, "! POST_FILTER"); + return new Hint(HintType.POST_FILTER, new String[] {postFilter}); } - private static Integer[] parseParamsAsInts(String hintAsString, String startWith) { - String[] number = getParamsFromHint(hintAsString, startWith); - if (number == null) { - return new Integer[0]; - } - //todo: check if numbers etc.. - Integer[] params = new Integer[number.length]; - for (int i = 0; i < params.length; i++) { - params[i] = Integer.parseInt(number[i]); - } - return params; + Hint queryPlanHint = parseHintForQueryPlanner(hintAsString); + if (queryPlanHint != null) { + return queryPlanHint; } + return null; + } + + /** + * Parse hints for hash join in new query planning framework. Only check syntax error here and + * leave semantics interpret work for planner. + */ + private static Hint parseHintForQueryPlanner(String hintStr) { + if (hintStr.contains("(") + && (hintStr.startsWith("! JOIN_ALGORITHM_BLOCK_SIZE") + || hintStr.startsWith("! JOIN_SCROLL_PAGE_SIZE") + || hintStr.startsWith("! JOIN_CIRCUIT_BREAK_LIMIT") + || hintStr.startsWith("! JOIN_BACK_OFF_RETRY_INTERVALS") + || hintStr.startsWith( + "! JOIN_TIME_OUT"))) { // Note that Trie tree is needed here if many hint options + + String hintName = hintStr.substring(PREFIX.length(), hintStr.indexOf('(')).trim(); + String hintPrefix = PREFIX + hintName; + HintType hintType = HintType.valueOf(hintName); + Integer[] params = parseParamsAsInts(hintStr, hintPrefix); + + if (params != null && params.length > 0) { + return new Hint(hintType, params); + } + } else if (hintStr.startsWith("! JOIN_ALGORITHM_USE_LEGACY")) { + return new Hint(HintType.JOIN_ALGORITHM_USE_LEGACY, new Object[0]); + } + return null; + } + private static String getParamFromHint(String hint, String prefix) { + if (!hint.contains("(")) { + return null; + } + return hint.replace(prefix, "") + .replaceAll("\\s*\\(\\s*", "") + .replaceAll("\\s*\\,\\s*", ",") + .replaceAll("\\s*\\)\\s*", ""); + } + + private static String[] getParamsFromHint(String hint, String prefix) { + String param = getParamFromHint(hint, prefix); + return param != null ? param.split(",") : null; + } + + private static Integer[] parseParamsAsInts(String hintAsString, String startWith) { + String[] number = getParamsFromHint(hintAsString, startWith); + if (number == null) { + return new Integer[0]; + } + // todo: check if numbers etc.. + Integer[] params = new Integer[number.length]; + for (int i = 0; i < params.length; i++) { + params[i] = Integer.parseInt(number[i]); + } + return params; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/domain/hints/HintType.java b/legacy/src/main/java/org/opensearch/sql/legacy/domain/hints/HintType.java index 7d3444c36c..0134ef0874 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/domain/hints/HintType.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/domain/hints/HintType.java @@ -3,31 +3,28 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.domain.hints; -/** - * Created by Eliran on 29/8/2015. - */ +/** Created by Eliran on 29/8/2015. */ public enum HintType { - HASH_WITH_TERMS_FILTER, - JOIN_LIMIT, - USE_NESTED_LOOPS, - NL_MULTISEARCH_SIZE, - USE_SCROLL, - IGNORE_UNAVAILABLE, - DOCS_WITH_AGGREGATION, - ROUTINGS, - SHARD_SIZE, - HIGHLIGHT, - MINUS_FETCH_AND_RESULT_LIMITS, - MINUS_USE_TERMS_OPTIMIZATION, - COLLAPSE, - POST_FILTER, - JOIN_ALGORITHM_BLOCK_SIZE, - JOIN_ALGORITHM_USE_LEGACY, - JOIN_SCROLL_PAGE_SIZE, - JOIN_CIRCUIT_BREAK_LIMIT, - JOIN_BACK_OFF_RETRY_INTERVALS, - JOIN_TIME_OUT + HASH_WITH_TERMS_FILTER, + JOIN_LIMIT, + USE_NESTED_LOOPS, + NL_MULTISEARCH_SIZE, + USE_SCROLL, + IGNORE_UNAVAILABLE, + DOCS_WITH_AGGREGATION, + ROUTINGS, + SHARD_SIZE, + HIGHLIGHT, + MINUS_FETCH_AND_RESULT_LIMITS, + MINUS_USE_TERMS_OPTIMIZATION, + COLLAPSE, + POST_FILTER, + JOIN_ALGORITHM_BLOCK_SIZE, + JOIN_ALGORITHM_USE_LEGACY, + JOIN_SCROLL_PAGE_SIZE, + JOIN_CIRCUIT_BREAK_LIMIT, + JOIN_BACK_OFF_RETRY_INTERVALS, + JOIN_TIME_OUT } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/LocalClusterState.java b/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/LocalClusterState.java index 37d9322b46..cc91fb8b39 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/LocalClusterState.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/LocalClusterState.java @@ -3,11 +3,8 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.esdomain; -import static org.opensearch.common.settings.Settings.EMPTY; - import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import java.io.IOException; @@ -25,7 +22,6 @@ import org.opensearch.cluster.ClusterState; import org.opensearch.cluster.metadata.IndexNameExpressionResolver; import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.collect.Tuple; import org.opensearch.common.settings.Setting; import org.opensearch.index.IndexNotFoundException; import org.opensearch.sql.common.setting.Settings; @@ -33,188 +29,181 @@ import org.opensearch.sql.opensearch.setting.OpenSearchSettings; /** - * Local cluster state information which may be stale but help avoid blocking operation in NIO thread. - *

- * 1) Why extending TransportAction doesn't work here? - * TransportAction enforce implementation to be performed remotely but local cluster state read is expected here. - *

- * 2) Why injection by AbstractModule doesn't work here? - * Because this state needs to be used across the plugin, ex. in rewriter, pretty formatter etc. + * Local cluster state information which may be stale but help avoid blocking operation in NIO + * thread. + * + *

1) Why extending TransportAction doesn't work here? TransportAction enforce implementation to + * be performed remotely but local cluster state read is expected here. + * + *

2) Why injection by AbstractModule doesn't work here? Because this state needs to be used + * across the plugin, ex. in rewriter, pretty formatter etc. */ public class LocalClusterState { - private static final Logger LOG = LogManager.getLogger(); + private static final Logger LOG = LogManager.getLogger(); - private static final Function> ALL_FIELDS = (anyIndex -> (anyField -> true)); + private static final Function> ALL_FIELDS = + (anyIndex -> (anyField -> true)); - /** - * Singleton instance - */ - private static LocalClusterState INSTANCE; + /** Singleton instance */ + private static LocalClusterState INSTANCE; - /** - * Current cluster state on local node - */ - private ClusterService clusterService; + /** Current cluster state on local node */ + private ClusterService clusterService; - private OpenSearchSettings pluginSettings; + private OpenSearchSettings pluginSettings; - /** - * Index name expression resolver to get concrete index name - */ - private IndexNameExpressionResolver resolver; + /** Index name expression resolver to get concrete index name */ + private IndexNameExpressionResolver resolver; - /** - * Thread-safe mapping cache to save the computation of sourceAsMap() which is not lightweight as thought - * Array cannot be used as key because hashCode() always return reference address, so either use wrapper or List. - */ - private final Cache, IndexMappings> cache; + /** + * Thread-safe mapping cache to save the computation of sourceAsMap() which is not lightweight as + * thought Array cannot be used as key because hashCode() always return reference address, so + * either use wrapper or List. + */ + private final Cache, IndexMappings> cache; - /** - * Latest setting value for each registered key. Thread-safe is required. - */ - private final Map latestSettings = new ConcurrentHashMap<>(); - - public static synchronized LocalClusterState state() { - if (INSTANCE == null) { - INSTANCE = new LocalClusterState(); - } - return INSTANCE; - } + /** Latest setting value for each registered key. Thread-safe is required. */ + private final Map latestSettings = new ConcurrentHashMap<>(); - /** - * Give testing code a chance to inject mock object - */ - public static synchronized void state(LocalClusterState instance) { - INSTANCE = instance; + public static synchronized LocalClusterState state() { + if (INSTANCE == null) { + INSTANCE = new LocalClusterState(); } - - public void setClusterService(ClusterService clusterService) { - this.clusterService = clusterService; - - clusterService.addListener(event -> { - if (event.metadataChanged()) { - // State in cluster service is already changed to event.state() before listener fired - if (LOG.isDebugEnabled()) { - LOG.debug("Metadata in cluster state changed: {}", - new IndexMappings(clusterService.state().metadata())); - } - cache.invalidateAll(); + return INSTANCE; + } + + /** Give testing code a chance to inject mock object */ + public static synchronized void state(LocalClusterState instance) { + INSTANCE = instance; + } + + public void setClusterService(ClusterService clusterService) { + this.clusterService = clusterService; + + clusterService.addListener( + event -> { + if (event.metadataChanged()) { + // State in cluster service is already changed to event.state() before listener fired + if (LOG.isDebugEnabled()) { + LOG.debug( + "Metadata in cluster state changed: {}", + new IndexMappings(clusterService.state().metadata())); } + cache.invalidateAll(); + } }); - } - - public void setPluginSettings(OpenSearchSettings settings) { - this.pluginSettings = settings; - for (Setting setting: settings.getSettings()) { - clusterService.getClusterSettings().addSettingsUpdateConsumer( - setting, - newVal -> { - if (LOG.isDebugEnabled()) { - LOG.debug("The value of setting [{}] changed to [{}]", setting.getKey(), newVal); - } - latestSettings.put(setting.getKey(), newVal); + } + + public void setPluginSettings(OpenSearchSettings settings) { + this.pluginSettings = settings; + for (Setting setting : settings.getSettings()) { + clusterService + .getClusterSettings() + .addSettingsUpdateConsumer( + setting, + newVal -> { + if (LOG.isDebugEnabled()) { + LOG.debug("The value of setting [{}] changed to [{}]", setting.getKey(), newVal); } - ); - } - - } - - public void setResolver(IndexNameExpressionResolver resolver) { - this.resolver = resolver; - } - - private LocalClusterState() { - cache = CacheBuilder.newBuilder().maximumSize(100).build(); - } - - /** - * Get plugin setting value by key. Return default value if not configured explicitly. - * @param key setting key registered during plugin bootstrap. - * @return setting value or default. - */ - @SuppressWarnings("unchecked") - public T getSettingValue(Settings.Key key) { - Objects.requireNonNull(pluginSettings, "SQL plugin setting is null"); - return (T) latestSettings.getOrDefault(key.getKeyValue(), - pluginSettings.getSettingValue(key)); - } - - /** - * Get field mappings by index expressions. All types and fields are included in response. - */ - public IndexMappings getFieldMappings(String[] indices) { - return getFieldMappings(indices, ALL_FIELDS); + latestSettings.put(setting.getKey(), newVal); + }); } - - /** - * Get field mappings by index expressions, type and field filter. Because IndexMetaData/MappingMetaData - * is hard to convert to FieldMappingMetaData, custom mapping domain objects are being used here. In future, - * it should be moved to domain model layer for all OpenSearch specific knowledge. - *

- * Note that cluster state may be change inside OpenSearch so it's possible to read different state in 2 accesses - * to ClusterService.state() here. - * - * @param indices index name expression - * @param fieldFilter field filter predicate - * @return index mapping(s) - */ - private IndexMappings getFieldMappings(String[] indices, Function> fieldFilter) { - Objects.requireNonNull(clusterService, "Cluster service is null"); - Objects.requireNonNull(resolver, "Index name expression resolver is null"); - - try { - ClusterState state = clusterService.state(); - String[] concreteIndices = resolveIndexExpression(state, indices); - - IndexMappings mappings; - if (fieldFilter == ALL_FIELDS) { - mappings = findMappingsInCache(state, concreteIndices); - } else { - mappings = findMappings(state, concreteIndices, fieldFilter); - } - - LOG.debug("Found mappings: {}", mappings); - return mappings; - } catch (IndexNotFoundException e) { - throw e; - } catch (Exception e) { - throw new IllegalStateException( - "Failed to read mapping in cluster state for indices=" - + Arrays.toString(indices) , e); - } + } + + public void setResolver(IndexNameExpressionResolver resolver) { + this.resolver = resolver; + } + + private LocalClusterState() { + cache = CacheBuilder.newBuilder().maximumSize(100).build(); + } + + /** + * Get plugin setting value by key. Return default value if not configured explicitly. + * + * @param key setting key registered during plugin bootstrap. + * @return setting value or default. + */ + @SuppressWarnings("unchecked") + public T getSettingValue(Settings.Key key) { + Objects.requireNonNull(pluginSettings, "SQL plugin setting is null"); + return (T) latestSettings.getOrDefault(key.getKeyValue(), pluginSettings.getSettingValue(key)); + } + + /** Get field mappings by index expressions. All types and fields are included in response. */ + public IndexMappings getFieldMappings(String[] indices) { + return getFieldMappings(indices, ALL_FIELDS); + } + + /** + * Get field mappings by index expressions, type and field filter. Because + * IndexMetaData/MappingMetaData is hard to convert to FieldMappingMetaData, custom mapping domain + * objects are being used here. In future, it should be moved to domain model layer for all + * OpenSearch specific knowledge. + * + *

Note that cluster state may be change inside OpenSearch so it's possible to read different + * state in 2 accesses to ClusterService.state() here. + * + * @param indices index name expression + * @param fieldFilter field filter predicate + * @return index mapping(s) + */ + private IndexMappings getFieldMappings( + String[] indices, Function> fieldFilter) { + Objects.requireNonNull(clusterService, "Cluster service is null"); + Objects.requireNonNull(resolver, "Index name expression resolver is null"); + + try { + ClusterState state = clusterService.state(); + String[] concreteIndices = resolveIndexExpression(state, indices); + + IndexMappings mappings; + if (fieldFilter == ALL_FIELDS) { + mappings = findMappingsInCache(state, concreteIndices); + } else { + mappings = findMappings(state, concreteIndices, fieldFilter); + } + + LOG.debug("Found mappings: {}", mappings); + return mappings; + } catch (IndexNotFoundException e) { + throw e; + } catch (Exception e) { + throw new IllegalStateException( + "Failed to read mapping in cluster state for indices=" + Arrays.toString(indices), e); } + } - private String[] resolveIndexExpression(ClusterState state, String[] indices) { - String[] concreteIndices = resolver.concreteIndexNames(state, IndicesOptions.strictExpandOpen(), true, indices); + private String[] resolveIndexExpression(ClusterState state, String[] indices) { + String[] concreteIndices = + resolver.concreteIndexNames(state, IndicesOptions.strictExpandOpen(), true, indices); - if (LOG.isDebugEnabled()) { - LOG.debug("Resolved index expression {} to concrete index names {}", - Arrays.toString(indices), Arrays.toString(concreteIndices)); - } - return concreteIndices; + if (LOG.isDebugEnabled()) { + LOG.debug( + "Resolved index expression {} to concrete index names {}", + Arrays.toString(indices), + Arrays.toString(concreteIndices)); } - - private IndexMappings findMappings(ClusterState state, String[] indices, - Function> fieldFilter) throws IOException { - LOG.debug("Cache didn't help. Load and parse mapping in cluster state"); - return new IndexMappings( - state.metadata().findMappings(indices, fieldFilter) - ); - } - - private IndexMappings findMappingsInCache(ClusterState state, String[] indices) - throws ExecutionException { - LOG.debug("Looking for mapping in cache: {}", cache.asMap()); - return cache.get(sortToList(indices), - () -> findMappings(state, indices, ALL_FIELDS) - ); - } - - private List sortToList(T[] array) { - // Mostly array has single element - Arrays.sort(array); - return Arrays.asList(array); - } - + return concreteIndices; + } + + private IndexMappings findMappings( + ClusterState state, String[] indices, Function> fieldFilter) + throws IOException { + LOG.debug("Cache didn't help. Load and parse mapping in cluster state"); + return new IndexMappings(state.metadata().findMappings(indices, fieldFilter)); + } + + private IndexMappings findMappingsInCache(ClusterState state, String[] indices) + throws ExecutionException { + LOG.debug("Looking for mapping in cache: {}", cache.asMap()); + return cache.get(sortToList(indices), () -> findMappings(state, indices, ALL_FIELDS)); + } + + private List sortToList(T[] array) { + // Mostly array has single element + Arrays.sort(array); + return Arrays.asList(array); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/OpenSearchClient.java b/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/OpenSearchClient.java index a823947466..fd02486fae 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/OpenSearchClient.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/OpenSearchClient.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.esdomain; import java.util.ArrayList; @@ -19,47 +18,57 @@ public class OpenSearchClient { - private static final Logger LOG = LogManager.getLogger(); - private static final int[] retryIntervals = new int[]{4, 12, 20, 20}; - private final Client client; + private static final Logger LOG = LogManager.getLogger(); + private static final int[] retryIntervals = new int[] {4, 12, 20, 20}; + private final Client client; - public OpenSearchClient(Client client) { - this.client = client; - } + public OpenSearchClient(Client client) { + this.client = client; + } - public MultiSearchResponse.Item[] multiSearch(MultiSearchRequest multiSearchRequest) { - MultiSearchResponse.Item[] responses = new MultiSearchResponse.Item[multiSearchRequest.requests().size()]; - multiSearchRetry(responses, multiSearchRequest, - IntStream.range(0, multiSearchRequest.requests().size()).boxed().collect(Collectors.toList()), 0); + public MultiSearchResponse.Item[] multiSearch(MultiSearchRequest multiSearchRequest) { + MultiSearchResponse.Item[] responses = + new MultiSearchResponse.Item[multiSearchRequest.requests().size()]; + multiSearchRetry( + responses, + multiSearchRequest, + IntStream.range(0, multiSearchRequest.requests().size()) + .boxed() + .collect(Collectors.toList()), + 0); - return responses; - } + return responses; + } - private void multiSearchRetry(MultiSearchResponse.Item[] responses, MultiSearchRequest multiSearchRequest, - List indices, int retry) { - MultiSearchRequest multiSearchRequestRetry = new MultiSearchRequest(); - for (int i : indices) { - multiSearchRequestRetry.add(multiSearchRequest.requests().get(i)); - } - MultiSearchResponse.Item[] res = client.multiSearch(multiSearchRequestRetry).actionGet().getResponses(); - List indicesFailure = new ArrayList<>(); - //Could get EsRejectedExecutionException and OpenSearchException as getCause - for (int i = 0; i < res.length; i++) { - if (res[i].isFailure()) { - indicesFailure.add(indices.get(i)); - if (retry == 3) { - responses[indices.get(i)] = res[i]; - } - } else { - responses[indices.get(i)] = res[i]; - } - } - if (!indicesFailure.isEmpty()) { - LOG.info("OpenSearch multisearch has failures on retry {}", retry); - if (retry < 3) { - BackOffRetryStrategy.backOffSleep(retryIntervals[retry]); - multiSearchRetry(responses, multiSearchRequest, indicesFailure, retry + 1); - } + private void multiSearchRetry( + MultiSearchResponse.Item[] responses, + MultiSearchRequest multiSearchRequest, + List indices, + int retry) { + MultiSearchRequest multiSearchRequestRetry = new MultiSearchRequest(); + for (int i : indices) { + multiSearchRequestRetry.add(multiSearchRequest.requests().get(i)); + } + MultiSearchResponse.Item[] res = + client.multiSearch(multiSearchRequestRetry).actionGet().getResponses(); + List indicesFailure = new ArrayList<>(); + // Could get EsRejectedExecutionException and OpenSearchException as getCause + for (int i = 0; i < res.length; i++) { + if (res[i].isFailure()) { + indicesFailure.add(indices.get(i)); + if (retry == 3) { + responses[indices.get(i)] = res[i]; } + } else { + responses[indices.get(i)] = res[i]; + } + } + if (!indicesFailure.isEmpty()) { + LOG.info("OpenSearch multisearch has failures on retry {}", retry); + if (retry < 3) { + BackOffRetryStrategy.backOffSleep(retryIntervals[retry]); + multiSearchRetry(responses, multiSearchRequest, indicesFailure, retry + 1); + } } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMapping.java b/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMapping.java index bc6c26a6d6..89f8f9ac89 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMapping.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMapping.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.esdomain.mapping; import static java.util.Collections.emptyMap; @@ -16,121 +15,119 @@ /** * Field mapping that parses native OpenSearch mapping. - *

- * NOTE that approaches in this class are NOT reliable because of the OpenSearch mapping query API used. - * We should deprecate this in future and parse field mapping in more solid way. + * + *

NOTE that approaches in this class are NOT reliable because of the OpenSearch mapping query + * API used. We should deprecate this in future and parse field mapping in more solid way. */ public class FieldMapping { - /** - * Name of the Field to be parsed - */ - private final String fieldName; - - /** - * Native mapping information returned from OpenSearch - */ - private final Map typeMappings; - - /** - * Maps a field name to Field object that specified in query explicitly - */ - private final Map specifiedFieldsByName; - - public FieldMapping(String fieldName) { - this(fieldName, emptyMap(), emptyMap()); - } - - public FieldMapping(String fieldName, - Map typeMappings, - Map specifiedFieldByNames) { - - this.fieldName = fieldName; - this.typeMappings = typeMappings; - this.specifiedFieldsByName = specifiedFieldByNames; - } - - /** - * Is field specified explicitly in query - * - * @return true if specified - */ - public boolean isSpecified() { - return specifiedFieldsByName.containsKey(fieldName); - } - - /** - * Verify if property field matches wildcard pattern specified in query - * - * @return true if matched - */ - public boolean isWildcardSpecified() { - return specifiedFieldsByName.containsKey(path() + ".*"); - } - - /** - * Is field a property field, which means either object field or nested field. - * - * @return true for property field - */ - public boolean isPropertyField() { - int numOfDots = StringUtils.countMatches(fieldName, '.'); - return numOfDots > 1 || (numOfDots == 1 && !isMultiField()); - } - - /** - * Is field a/in multi-field, for example, field "a.keyword" in field "a" - * - * @return true for multi field - */ - public boolean isMultiField() { - return fieldName.endsWith(".keyword"); + /** Name of the Field to be parsed */ + private final String fieldName; + + /** Native mapping information returned from OpenSearch */ + private final Map typeMappings; + + /** Maps a field name to Field object that specified in query explicitly */ + private final Map specifiedFieldsByName; + + public FieldMapping(String fieldName) { + this(fieldName, emptyMap(), emptyMap()); + } + + public FieldMapping( + String fieldName, + Map typeMappings, + Map specifiedFieldByNames) { + + this.fieldName = fieldName; + this.typeMappings = typeMappings; + this.specifiedFieldsByName = specifiedFieldByNames; + } + + /** + * Is field specified explicitly in query + * + * @return true if specified + */ + public boolean isSpecified() { + return specifiedFieldsByName.containsKey(fieldName); + } + + /** + * Verify if property field matches wildcard pattern specified in query + * + * @return true if matched + */ + public boolean isWildcardSpecified() { + return specifiedFieldsByName.containsKey(path() + ".*"); + } + + /** + * Is field a property field, which means either object field or nested field. + * + * @return true for property field + */ + public boolean isPropertyField() { + int numOfDots = StringUtils.countMatches(fieldName, '.'); + return numOfDots > 1 || (numOfDots == 1 && !isMultiField()); + } + + /** + * Is field a/in multi-field, for example, field "a.keyword" in field "a" + * + * @return true for multi field + */ + public boolean isMultiField() { + return fieldName.endsWith(".keyword"); + } + + /** + * Is field meta field, such as _id, _index, _source etc. + * + * @return true for meta field + */ + public boolean isMetaField() { + return fieldName.startsWith("_"); + } + + /** + * Path of property field, for example "employee" in "employee.manager" + * + * @return path of property field + */ + public String path() { + int lastDot = fieldName.lastIndexOf("."); + if (lastDot == -1) { + throw new IllegalStateException( + "path() is being invoked on the wrong field [" + fieldName + "]"); } - - /** - * Is field meta field, such as _id, _index, _source etc. - * - * @return true for meta field - */ - public boolean isMetaField() { - return fieldName.startsWith("_"); - } - - /** - * Path of property field, for example "employee" in "employee.manager" - * - * @return path of property field - */ - public String path() { - int lastDot = fieldName.lastIndexOf("."); - if (lastDot == -1) { - throw new IllegalStateException("path() is being invoked on the wrong field [" + fieldName + "]"); - } - return fieldName.substring(0, lastDot); - } - - /** - * Find field type in OpenSearch Get Field Mapping API response. Note that Get Field Mapping API does NOT return - * the type for object or nested field. In this case, object type is used as default under the assumption - * that the field queried here must exist (which is true if semantic analyzer is enabled). - * - * @return field type if found in mapping, otherwise "object" type returned - */ - @SuppressWarnings("unchecked") - public String type() { - FieldMappingMetadata metaData = typeMappings.get(fieldName); - if (metaData == null) { - return DescribeResultSet.DEFAULT_OBJECT_DATATYPE; - } - - Map source = metaData.sourceAsMap(); - String[] fieldPath = fieldName.split("\\."); - - // For object/nested field, fieldName is full path though only innermost field name present in mapping - // For example, fieldName='employee.location.city', metaData='{"city":{"type":"text"}}' - String innermostFieldName = (fieldPath.length == 1) ? fieldName : fieldPath[fieldPath.length - 1]; - Map fieldMapping = (Map) source.get(innermostFieldName); - return (String) fieldMapping.get("type"); + return fieldName.substring(0, lastDot); + } + + /** + * Find field type in OpenSearch Get Field Mapping API response. Note that Get Field Mapping API + * does NOT return the type for object or nested field. In this case, object type is used as + * default under the assumption that the field queried here must exist (which is true if semantic + * analyzer is enabled). + * + * @return field type if found in mapping, otherwise "object" type returned + */ + @SuppressWarnings("unchecked") + public String type() { + FieldMappingMetadata metaData = typeMappings.get(fieldName); + if (metaData == null) { + return DescribeResultSet.DEFAULT_OBJECT_DATATYPE; } + Map source = metaData.sourceAsMap(); + String[] fieldPath = fieldName.split("\\."); + + // For object/nested field, fieldName is full path though only innermost field name present in + // mapping + // For example, fieldName='employee.location.city', metaData='{"city":{"type":"text"}}' + String innermostFieldName = + (fieldPath.length == 1) ? fieldName : fieldPath[fieldPath.length - 1]; + Map fieldMapping = (Map) source.get(innermostFieldName); + return (String) fieldMapping.get("type"); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMappings.java b/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMappings.java index 6f73da62e4..bff494ed0e 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMappings.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMappings.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.esdomain.mapping; import java.util.HashMap; @@ -16,132 +15,106 @@ /** * Field mappings in a specific type. - *

- * Sample: - * fieldMappings: { - * 'properties': { - * 'balance': { - * 'type': long - * }, - * 'age': { - * 'type': integer - * }, - * 'state': { - * 'type': text - * } - * 'name': { - * 'type': text - * 'fields': { - * 'keyword': { - * 'type': keyword, - * 'ignore_above': 256 - * } - * } - * } - * } - * } + * + *

Sample: fieldMappings: { 'properties': { 'balance': { 'type': long }, 'age': { 'type': integer + * }, 'state': { 'type': text } 'name': { 'type': text 'fields': { 'keyword': { 'type': keyword, + * 'ignore_above': 256 } } } } } */ @SuppressWarnings("unchecked") public class FieldMappings implements Mappings> { - private static final String PROPERTIES = "properties"; - - /** - * Mapping from field name to its type - */ - private final Map fieldMappings; - - public FieldMappings(MappingMetadata mappings) { - fieldMappings = mappings.sourceAsMap(); - } + private static final String PROPERTIES = "properties"; - public FieldMappings(Map> mapping) { - Map finalMapping = new HashMap<>(); - finalMapping.put(PROPERTIES, mapping); - fieldMappings = finalMapping; - } - - @Override - public boolean has(String path) { - return mapping(path) != null; - } + /** Mapping from field name to its type */ + private final Map fieldMappings; - /** - * Different from default implementation that search mapping for path is required - */ - @Override - public Map mapping(String path) { - Map mapping = fieldMappings; - for (String name : path.split("\\.")) { - if (mapping == null || !mapping.containsKey(PROPERTIES)) { - return null; - } - - mapping = (Map) - ((Map) mapping.get(PROPERTIES)).get(name); - } - return mapping; - } + public FieldMappings(MappingMetadata mappings) { + fieldMappings = mappings.sourceAsMap(); + } - @Override - public Map> data() { - // Is this assumption true? Is it possible mapping of field is NOT a Map? - return (Map>) fieldMappings.get(PROPERTIES); - } + public FieldMappings(Map> mapping) { + Map finalMapping = new HashMap<>(); + finalMapping.put(PROPERTIES, mapping); + fieldMappings = finalMapping; + } - public void flat(BiConsumer func) { - flatMappings(data(), Optional.empty(), func); - } + @Override + public boolean has(String path) { + return mapping(path) != null; + } - @SuppressWarnings("unchecked") - private void flatMappings(Map> mappings, - Optional path, - BiConsumer func) { - mappings.forEach( - (fieldName, mapping) -> { - String fullFieldName = path.map(s -> s + "." + fieldName).orElse(fieldName); - String type = (String) mapping.getOrDefault("type", "object"); - func.accept(fullFieldName, type); - - if (mapping.containsKey("fields")) { - ((Map>) mapping.get("fields")).forEach( - (innerFieldName, innerMapping) -> - func.accept(fullFieldName + "." + innerFieldName, - (String) innerMapping.getOrDefault("type", "object")) - ); - } - - if (mapping.containsKey("properties")) { - flatMappings( - (Map>) mapping.get("properties"), - Optional.of(fullFieldName), - func - ); - } - } - ); - } + /** Different from default implementation that search mapping for path is required */ + @Override + public Map mapping(String path) { + Map mapping = fieldMappings; + for (String name : path.split("\\.")) { + if (mapping == null || !mapping.containsKey(PROPERTIES)) { + return null; + } - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - FieldMappings that = (FieldMappings) o; - return Objects.equals(fieldMappings, that.fieldMappings); + mapping = (Map) ((Map) mapping.get(PROPERTIES)).get(name); } - - @Override - public int hashCode() { - return Objects.hash(fieldMappings); + return mapping; + } + + @Override + public Map> data() { + // Is this assumption true? Is it possible mapping of field is NOT a Map? + return (Map>) fieldMappings.get(PROPERTIES); + } + + public void flat(BiConsumer func) { + flatMappings(data(), Optional.empty(), func); + } + + @SuppressWarnings("unchecked") + private void flatMappings( + Map> mappings, + Optional path, + BiConsumer func) { + mappings.forEach( + (fieldName, mapping) -> { + String fullFieldName = path.map(s -> s + "." + fieldName).orElse(fieldName); + String type = (String) mapping.getOrDefault("type", "object"); + func.accept(fullFieldName, type); + + if (mapping.containsKey("fields")) { + ((Map>) mapping.get("fields")) + .forEach( + (innerFieldName, innerMapping) -> + func.accept( + fullFieldName + "." + innerFieldName, + (String) innerMapping.getOrDefault("type", "object"))); + } + + if (mapping.containsKey("properties")) { + flatMappings( + (Map>) mapping.get("properties"), + Optional.of(fullFieldName), + func); + } + }); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; } - - @Override - public String toString() { - return "FieldMappings" + new JSONObject(fieldMappings).toString(2); + if (o == null || getClass() != o.getClass()) { + return false; } - + FieldMappings that = (FieldMappings) o; + return Objects.equals(fieldMappings, that.fieldMappings); + } + + @Override + public int hashCode() { + return Objects.hash(fieldMappings); + } + + @Override + public String toString() { + return "FieldMappings" + new JSONObject(fieldMappings).toString(2); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/IndexMappings.java b/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/IndexMappings.java index 3b89eef02f..dd09713d64 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/IndexMappings.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/IndexMappings.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.esdomain.mapping; import static java.util.Collections.emptyMap; @@ -15,68 +14,63 @@ /** * Index mappings in the cluster. - *

- * Sample: - * indexMappings: { - * 'accounts': typeMappings1, - * 'logs': typeMappings2 - * } - *

- * Difference between response of getMapping/clusterState and getFieldMapping: - *

- * 1) MappingMetadata: - * ((Map) ((Map) (mapping.get("bank").get("account").sourceAsMap().get("properties"))).get("balance")).get("type") - *

- * 2) FieldMetadata: - * ((Map) client.admin().indices().getFieldMappings(request).actionGet().mappings().get("bank") + * + *

Sample: indexMappings: { 'accounts': typeMappings1, 'logs': typeMappings2 } + * + *

Difference between response of getMapping/clusterState and getFieldMapping: + * + *

1) MappingMetadata: ((Map) ((Map) + * (mapping.get("bank").get("account").sourceAsMap().get("properties"))).get("balance")).get("type") + * + *

2) FieldMetadata: ((Map) + * client.admin().indices().getFieldMappings(request).actionGet().mappings().get("bank") * .get("account").get("balance").sourceAsMap().get("balance")).get("type") */ public class IndexMappings implements Mappings { - public static final IndexMappings EMPTY = new IndexMappings(); + public static final IndexMappings EMPTY = new IndexMappings(); - /** - * Mapping from Index name to mappings of all fields in it - */ - private final Map indexMappings; + /** Mapping from Index name to mappings of all fields in it */ + private final Map indexMappings; - public IndexMappings() { - this.indexMappings = emptyMap(); - } + public IndexMappings() { + this.indexMappings = emptyMap(); + } - public IndexMappings(Metadata metaData) { - this.indexMappings = buildMappings(metaData.indices(), - indexMetaData -> new FieldMappings(indexMetaData.mapping())); - } + public IndexMappings(Metadata metaData) { + this.indexMappings = + buildMappings( + metaData.indices(), indexMetaData -> new FieldMappings(indexMetaData.mapping())); + } - public IndexMappings(Map mappings) { - this.indexMappings = buildMappings(mappings, FieldMappings::new); - } + public IndexMappings(Map mappings) { + this.indexMappings = buildMappings(mappings, FieldMappings::new); + } - @Override - public Map data() { - return indexMappings; - } + @Override + public Map data() { + return indexMappings; + } - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - IndexMappings that = (IndexMappings) o; - return Objects.equals(indexMappings, that.indexMappings); + @Override + public boolean equals(Object o) { + if (this == o) { + return true; } - - @Override - public int hashCode() { - return Objects.hash(indexMappings); + if (o == null || getClass() != o.getClass()) { + return false; } + IndexMappings that = (IndexMappings) o; + return Objects.equals(indexMappings, that.indexMappings); + } - @Override - public String toString() { - return "IndexMappings{" + indexMappings + '}'; - } + @Override + public int hashCode() { + return Objects.hash(indexMappings); + } + + @Override + public String toString() { + return "IndexMappings{" + indexMappings + '}'; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/Mappings.java b/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/Mappings.java index 03bfcaf030..3cf02b55d8 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/Mappings.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/esdomain/mapping/Mappings.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.esdomain.mapping; import java.util.Collection; @@ -12,43 +11,43 @@ import java.util.stream.Collectors; /** - * Mappings interface to provide default implementation (minimal set of Map methods) for subclass in hierarchy. + * Mappings interface to provide default implementation (minimal set of Map methods) for subclass in + * hierarchy. * * @param Type of nested mapping */ public interface Mappings { - default boolean has(String name) { - return data().containsKey(name); - } + default boolean has(String name) { + return data().containsKey(name); + } - default Collection allNames() { - return data().keySet(); - } + default Collection allNames() { + return data().keySet(); + } - default T mapping(String name) { - return data().get(name); - } + default T mapping(String name) { + return data().get(name); + } - default T firstMapping() { - return allMappings().iterator().next(); - } + default T firstMapping() { + return allMappings().iterator().next(); + } - default Collection allMappings() { - return data().values(); - } + default Collection allMappings() { + return data().values(); + } - default boolean isEmpty() { - return data().isEmpty(); - } + default boolean isEmpty() { + return data().isEmpty(); + } - Map data(); + Map data(); - /** - * Build a map from an existing map by applying provided function to each value. - */ - default Map buildMappings(Map mappings, Function func) { - return mappings.entrySet().stream().collect( + /** Build a map from an existing map by applying provided function to each value. */ + default Map buildMappings(Map mappings, Function func) { + return mappings.entrySet().stream() + .collect( Collectors.toUnmodifiableMap(Map.Entry::getKey, func.compose(Map.Entry::getValue))); - } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/exception/SQLFeatureDisabledException.java b/legacy/src/main/java/org/opensearch/sql/legacy/exception/SQLFeatureDisabledException.java index 52cdda3cdd..4578cd6c93 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/exception/SQLFeatureDisabledException.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/exception/SQLFeatureDisabledException.java @@ -3,15 +3,13 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.exception; public class SQLFeatureDisabledException extends Exception { - private static final long serialVersionUID = 1L; - - public SQLFeatureDisabledException(String message) { - super(message); - } + private static final long serialVersionUID = 1L; + public SQLFeatureDisabledException(String message) { + super(message); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/exception/SqlFeatureNotImplementedException.java b/legacy/src/main/java/org/opensearch/sql/legacy/exception/SqlFeatureNotImplementedException.java index 9225986132..43ad6d97b5 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/exception/SqlFeatureNotImplementedException.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/exception/SqlFeatureNotImplementedException.java @@ -3,21 +3,20 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.exception; /** - * Intended for cases when we knowingly omitted some case, letting users know that we didn't implemented feature, but - * it may be implemented in future. + * Intended for cases when we knowingly omitted some case, letting users know that we didn't + * implemented feature, but it may be implemented in future. */ public class SqlFeatureNotImplementedException extends RuntimeException { - private static final long serialVersionUID = 1; + private static final long serialVersionUID = 1; - public SqlFeatureNotImplementedException(String message) { - super(message); - } + public SqlFeatureNotImplementedException(String message) { + super(message); + } - public SqlFeatureNotImplementedException(String message, Throwable cause) { - super(message, cause); - } + public SqlFeatureNotImplementedException(String message, Throwable cause) { + super(message, cause); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/exception/SqlParseException.java b/legacy/src/main/java/org/opensearch/sql/legacy/exception/SqlParseException.java index c93ad2a2fa..a09ddc97d1 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/exception/SqlParseException.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/exception/SqlParseException.java @@ -3,16 +3,13 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.exception; public class SqlParseException extends Exception { - public SqlParseException(String message) { - super(message); - } - - - private static final long serialVersionUID = 1L; + public SqlParseException(String message) { + super(message); + } + private static final long serialVersionUID = 1L; } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/ActionRequestRestExecutorFactory.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/ActionRequestRestExecutorFactory.java index d56ff231e0..c58bba9e26 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/ActionRequestRestExecutorFactory.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/ActionRequestRestExecutorFactory.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor; import org.opensearch.sql.legacy.executor.csv.CSVResultRestExecutor; @@ -12,41 +11,37 @@ import org.opensearch.sql.legacy.query.join.OpenSearchJoinQueryAction; import org.opensearch.sql.legacy.query.multi.MultiQueryAction; -/** - * Created by Eliran on 26/12/2015. - */ +/** Created by Eliran on 26/12/2015. */ public class ActionRequestRestExecutorFactory { - /** - * Create executor based on the format and wrap with AsyncRestExecutor - * to async blocking execute() call if necessary. - * - * @param format format of response - * @param queryAction query action - * @return executor - */ - public static RestExecutor createExecutor(Format format, QueryAction queryAction) { - switch (format) { - case CSV: - return new AsyncRestExecutor(new CSVResultRestExecutor()); - case JSON: - return new AsyncRestExecutor( - new ElasticDefaultRestExecutor(queryAction), - action -> isJoin(action) || isUnionMinus(action) - ); - case JDBC: - case RAW: - case TABLE: - default: - return new AsyncRestExecutor(new PrettyFormatRestExecutor(format.getFormatName())); - } + /** + * Create executor based on the format and wrap with AsyncRestExecutor to async blocking execute() + * call if necessary. + * + * @param format format of response + * @param queryAction query action + * @return executor + */ + public static RestExecutor createExecutor(Format format, QueryAction queryAction) { + switch (format) { + case CSV: + return new AsyncRestExecutor(new CSVResultRestExecutor()); + case JSON: + return new AsyncRestExecutor( + new ElasticDefaultRestExecutor(queryAction), + action -> isJoin(action) || isUnionMinus(action)); + case JDBC: + case RAW: + case TABLE: + default: + return new AsyncRestExecutor(new PrettyFormatRestExecutor(format.getFormatName())); } + } - private static boolean isJoin(QueryAction queryAction) { - return queryAction instanceof OpenSearchJoinQueryAction; - } - - private static boolean isUnionMinus(QueryAction queryAction) { - return queryAction instanceof MultiQueryAction; - } + private static boolean isJoin(QueryAction queryAction) { + return queryAction instanceof OpenSearchJoinQueryAction; + } + private static boolean isUnionMinus(QueryAction queryAction) { + return queryAction instanceof MultiQueryAction; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/AsyncRestExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/AsyncRestExecutor.java index 1df0036bab..4fdf6391bd 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/AsyncRestExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/AsyncRestExecutor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor; import java.io.IOException; @@ -15,9 +14,9 @@ import org.opensearch.OpenSearchException; import org.opensearch.client.Client; import org.opensearch.common.unit.TimeValue; +import org.opensearch.core.rest.RestStatus; import org.opensearch.rest.BytesRestResponse; import org.opensearch.rest.RestChannel; -import org.opensearch.core.rest.RestStatus; import org.opensearch.sql.common.setting.Settings; import org.opensearch.sql.common.utils.QueryContext; import org.opensearch.sql.legacy.esdomain.LocalClusterState; @@ -29,135 +28,141 @@ import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.Transports; -/** - * A RestExecutor wrapper to execute request asynchronously to avoid blocking transport thread. - */ +/** A RestExecutor wrapper to execute request asynchronously to avoid blocking transport thread. */ public class AsyncRestExecutor implements RestExecutor { - /** - * Custom thread pool name managed by OpenSearch - */ - public static final String SQL_WORKER_THREAD_POOL_NAME = "sql-worker"; - - private static final Logger LOG = LogManager.getLogger(AsyncRestExecutor.class); - - /** - * Treat all actions as blocking which means async all actions, - * ex. execute() in csv executor or pretty format executor - */ - private static final Predicate ALL_ACTION_IS_BLOCKING = anyAction -> true; - - /** - * Delegated rest executor to async - */ - private final RestExecutor executor; - - /** - * Request type that expect to async to avoid blocking - */ - private final Predicate isBlocking; - - - AsyncRestExecutor(RestExecutor executor) { - this(executor, ALL_ACTION_IS_BLOCKING); - } - - AsyncRestExecutor(RestExecutor executor, Predicate isBlocking) { - this.executor = executor; - this.isBlocking = isBlocking; + /** Custom thread pool name managed by OpenSearch */ + public static final String SQL_WORKER_THREAD_POOL_NAME = "sql-worker"; + + private static final Logger LOG = LogManager.getLogger(AsyncRestExecutor.class); + + /** + * Treat all actions as blocking which means async all actions, ex. execute() in csv executor or + * pretty format executor + */ + private static final Predicate ALL_ACTION_IS_BLOCKING = anyAction -> true; + + /** Delegated rest executor to async */ + private final RestExecutor executor; + + /** Request type that expect to async to avoid blocking */ + private final Predicate isBlocking; + + AsyncRestExecutor(RestExecutor executor) { + this(executor, ALL_ACTION_IS_BLOCKING); + } + + AsyncRestExecutor(RestExecutor executor, Predicate isBlocking) { + this.executor = executor; + this.isBlocking = isBlocking; + } + + @Override + public void execute( + Client client, Map params, QueryAction queryAction, RestChannel channel) + throws Exception { + if (isBlockingAction(queryAction) && isRunningInTransportThread()) { + if (LOG.isDebugEnabled()) { + LOG.debug( + "[{}] Async blocking query action [{}] for executor [{}] in current thread [{}]", + QueryContext.getRequestId(), + name(executor), + name(queryAction), + Thread.currentThread().getName()); + } + async(client, params, queryAction, channel); + } else { + if (LOG.isDebugEnabled()) { + LOG.debug( + "[{}] Continue running query action [{}] for executor [{}] in current thread [{}]", + QueryContext.getRequestId(), + name(executor), + name(queryAction), + Thread.currentThread().getName()); + } + doExecuteWithTimeMeasured(client, params, queryAction, channel); } - - @Override - public void execute(Client client, Map params, QueryAction queryAction, RestChannel channel) - throws Exception { - if (isBlockingAction(queryAction) && isRunningInTransportThread()) { - if (LOG.isDebugEnabled()) { - LOG.debug("[{}] Async blocking query action [{}] for executor [{}] in current thread [{}]", - QueryContext.getRequestId(), name(executor), name(queryAction), Thread.currentThread().getName()); - } - async(client, params, queryAction, channel); - } else { - if (LOG.isDebugEnabled()) { - LOG.debug("[{}] Continue running query action [{}] for executor [{}] in current thread [{}]", - QueryContext.getRequestId(), name(executor), name(queryAction), Thread.currentThread().getName()); - } + } + + @Override + public String execute(Client client, Map params, QueryAction queryAction) + throws Exception { + // Result is always required and no easy way to async it here. + return executor.execute(client, params, queryAction); + } + + private boolean isBlockingAction(QueryAction queryAction) { + return isBlocking.test(queryAction); + } + + private boolean isRunningInTransportThread() { + return Transports.isTransportThread(Thread.currentThread()); + } + + /** Run given task in thread pool asynchronously */ + private void async( + Client client, Map params, QueryAction queryAction, RestChannel channel) { + + ThreadPool threadPool = client.threadPool(); + Runnable runnable = + () -> { + try { doExecuteWithTimeMeasured(client, params, queryAction, channel); - } - } - - @Override - public String execute(Client client, Map params, QueryAction queryAction) throws Exception { - // Result is always required and no easy way to async it here. - return executor.execute(client, params, queryAction); - } - - private boolean isBlockingAction(QueryAction queryAction) { - return isBlocking.test(queryAction); - } - - private boolean isRunningInTransportThread() { - return Transports.isTransportThread(Thread.currentThread()); - } - - /** - * Run given task in thread pool asynchronously - */ - private void async(Client client, Map params, QueryAction queryAction, RestChannel channel) { - - ThreadPool threadPool = client.threadPool(); - Runnable runnable = () -> { - try { - doExecuteWithTimeMeasured(client, params, queryAction, channel); - } catch (IOException | SqlParseException | OpenSearchException e) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); - LOG.warn("[{}] [MCB] async task got an IO/SQL exception: {}", QueryContext.getRequestId(), - e.getMessage()); - channel.sendResponse(new BytesRestResponse(RestStatus.INTERNAL_SERVER_ERROR, e.getMessage())); - } catch (IllegalStateException e) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); - LOG.warn("[{}] [MCB] async task got a runtime exception: {}", QueryContext.getRequestId(), - e.getMessage()); - channel.sendResponse(new BytesRestResponse(RestStatus.INSUFFICIENT_STORAGE, - "Memory circuit is broken.")); - } catch (Throwable t) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); - LOG.warn("[{}] [MCB] async task got an unknown throwable: {}", QueryContext.getRequestId(), - t.getMessage()); - channel.sendResponse(new BytesRestResponse(RestStatus.INTERNAL_SERVER_ERROR, - String.valueOf(t.getMessage()))); - } finally { - BackOffRetryStrategy.releaseMem(executor); - } + } catch (IOException | SqlParseException | OpenSearchException e) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); + LOG.warn( + "[{}] [MCB] async task got an IO/SQL exception: {}", + QueryContext.getRequestId(), + e.getMessage()); + channel.sendResponse( + new BytesRestResponse(RestStatus.INTERNAL_SERVER_ERROR, e.getMessage())); + } catch (IllegalStateException e) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); + LOG.warn( + "[{}] [MCB] async task got a runtime exception: {}", + QueryContext.getRequestId(), + e.getMessage()); + channel.sendResponse( + new BytesRestResponse( + RestStatus.INSUFFICIENT_STORAGE, "Memory circuit is broken.")); + } catch (Throwable t) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); + LOG.warn( + "[{}] [MCB] async task got an unknown throwable: {}", + QueryContext.getRequestId(), + t.getMessage()); + channel.sendResponse( + new BytesRestResponse( + RestStatus.INTERNAL_SERVER_ERROR, String.valueOf(t.getMessage()))); + } finally { + BackOffRetryStrategy.releaseMem(executor); + } }; - // Preserve context of calling thread to ensure headers of requests are forwarded when running blocking actions - threadPool.schedule( - QueryContext.withCurrentContext(runnable), - new TimeValue(0L), - SQL_WORKER_THREAD_POOL_NAME - ); + // Preserve context of calling thread to ensure headers of requests are forwarded when running + // blocking actions + threadPool.schedule( + QueryContext.withCurrentContext(runnable), new TimeValue(0L), SQL_WORKER_THREAD_POOL_NAME); + } + + /** Time the real execution of Executor and log slow query for troubleshooting */ + private void doExecuteWithTimeMeasured( + Client client, Map params, QueryAction action, RestChannel channel) + throws Exception { + long startTime = System.nanoTime(); + try { + executor.execute(client, params, action, channel); + } finally { + Duration elapsed = Duration.ofNanos(System.nanoTime() - startTime); + int slowLogThreshold = LocalClusterState.state().getSettingValue(Settings.Key.SQL_SLOWLOG); + if (elapsed.getSeconds() >= slowLogThreshold) { + LOG.warn( + "[{}] Slow query: elapsed={} (ms)", QueryContext.getRequestId(), elapsed.toMillis()); + } } + } - /** - * Time the real execution of Executor and log slow query for troubleshooting - */ - private void doExecuteWithTimeMeasured(Client client, - Map params, - QueryAction action, - RestChannel channel) throws Exception { - long startTime = System.nanoTime(); - try { - executor.execute(client, params, action, channel); - } finally { - Duration elapsed = Duration.ofNanos(System.nanoTime() - startTime); - int slowLogThreshold = LocalClusterState.state().getSettingValue(Settings.Key.SQL_SLOWLOG); - if (elapsed.getSeconds() >= slowLogThreshold) { - LOG.warn("[{}] Slow query: elapsed={} (ms)", QueryContext.getRequestId(), elapsed.toMillis()); - } - } - } - - private String name(Object object) { - return object.getClass().getSimpleName(); - } + private String name(Object object) { + return object.getClass().getSimpleName(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/ElasticDefaultRestExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/ElasticDefaultRestExecutor.java index 7ba5f384c0..54c4dd5abb 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/ElasticDefaultRestExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/ElasticDefaultRestExecutor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor; import com.google.common.collect.Maps; @@ -23,7 +22,6 @@ import org.opensearch.index.reindex.DeleteByQueryRequest; import org.opensearch.rest.BytesRestResponse; import org.opensearch.rest.RestChannel; -import org.opensearch.core.rest.RestStatus; import org.opensearch.rest.action.RestStatusToXContentListener; import org.opensearch.search.SearchHits; import org.opensearch.sql.legacy.exception.SqlParseException; @@ -36,90 +34,94 @@ import org.opensearch.sql.legacy.query.join.JoinRequestBuilder; import org.opensearch.sql.legacy.query.multi.MultiQueryRequestBuilder; - public class ElasticDefaultRestExecutor implements RestExecutor { - /** - * Request builder to generate OpenSearch DSL - */ - private final SqlElasticRequestBuilder requestBuilder; + /** Request builder to generate OpenSearch DSL */ + private final SqlElasticRequestBuilder requestBuilder; - private static final Logger LOG = LogManager.getLogger(ElasticDefaultRestExecutor.class); + private static final Logger LOG = LogManager.getLogger(ElasticDefaultRestExecutor.class); - public ElasticDefaultRestExecutor(QueryAction queryAction) { - // Put explain() here to make it run in NIO thread - try { - this.requestBuilder = queryAction.explain(); - } catch (SqlParseException e) { - throw new IllegalStateException("Failed to explain query action", e); - } + public ElasticDefaultRestExecutor(QueryAction queryAction) { + // Put explain() here to make it run in NIO thread + try { + this.requestBuilder = queryAction.explain(); + } catch (SqlParseException e) { + throw new IllegalStateException("Failed to explain query action", e); } + } - /** - * Execute the ActionRequest and returns the REST response using the channel. - */ - @Override - public void execute(Client client, Map params, QueryAction queryAction, RestChannel channel) - throws Exception { - ActionRequest request = requestBuilder.request(); + /** Execute the ActionRequest and returns the REST response using the channel. */ + @Override + public void execute( + Client client, Map params, QueryAction queryAction, RestChannel channel) + throws Exception { + ActionRequest request = requestBuilder.request(); - if (requestBuilder instanceof JoinRequestBuilder) { - ElasticJoinExecutor executor = ElasticJoinExecutor.createJoinExecutor(client, requestBuilder); - executor.run(); - executor.sendResponse(channel); - } else if (requestBuilder instanceof MultiQueryRequestBuilder) { - ElasticHitsExecutor executor = MultiRequestExecutorFactory.createExecutor(client, - (MultiQueryRequestBuilder) requestBuilder); - executor.run(); - sendDefaultResponse(executor.getHits(), channel); - } else if (request instanceof SearchRequest) { - client.search((SearchRequest) request, new RestStatusToXContentListener<>(channel)); - } else if (request instanceof DeleteByQueryRequest) { - requestBuilder.getBuilder().execute( - new BulkIndexByScrollResponseContentListener(channel, Maps.newHashMap())); - } else if (request instanceof GetIndexRequest) { - requestBuilder.getBuilder().execute(new GetIndexRequestRestListener(channel, (GetIndexRequest) request)); - } else if (request instanceof SearchScrollRequest) { - client.searchScroll((SearchScrollRequest) request, new RestStatusToXContentListener<>(channel)); - } else { - throw new Exception(String.format("Unsupported ActionRequest provided: %s", request.getClass().getName())); - } + if (requestBuilder instanceof JoinRequestBuilder) { + ElasticJoinExecutor executor = ElasticJoinExecutor.createJoinExecutor(client, requestBuilder); + executor.run(); + executor.sendResponse(channel); + } else if (requestBuilder instanceof MultiQueryRequestBuilder) { + ElasticHitsExecutor executor = + MultiRequestExecutorFactory.createExecutor( + client, (MultiQueryRequestBuilder) requestBuilder); + executor.run(); + sendDefaultResponse(executor.getHits(), channel); + } else if (request instanceof SearchRequest) { + client.search((SearchRequest) request, new RestStatusToXContentListener<>(channel)); + } else if (request instanceof DeleteByQueryRequest) { + requestBuilder + .getBuilder() + .execute(new BulkIndexByScrollResponseContentListener(channel, Maps.newHashMap())); + } else if (request instanceof GetIndexRequest) { + requestBuilder + .getBuilder() + .execute(new GetIndexRequestRestListener(channel, (GetIndexRequest) request)); + } else if (request instanceof SearchScrollRequest) { + client.searchScroll( + (SearchScrollRequest) request, new RestStatusToXContentListener<>(channel)); + } else { + throw new Exception( + String.format("Unsupported ActionRequest provided: %s", request.getClass().getName())); } + } - @Override - public String execute(Client client, Map params, QueryAction queryAction) throws Exception { - ActionRequest request = requestBuilder.request(); - - if (requestBuilder instanceof JoinRequestBuilder) { - ElasticJoinExecutor executor = ElasticJoinExecutor.createJoinExecutor(client, requestBuilder); - executor.run(); - return ElasticUtils.hitsAsStringResult(executor.getHits(), new MetaSearchResult()); - } else if (requestBuilder instanceof MultiQueryRequestBuilder) { - ElasticHitsExecutor executor = MultiRequestExecutorFactory.createExecutor(client, - (MultiQueryRequestBuilder) requestBuilder); - executor.run(); - return ElasticUtils.hitsAsStringResult(executor.getHits(), new MetaSearchResult()); - } else if (request instanceof SearchRequest) { - ActionFuture future = client.search((SearchRequest) request); - SearchResponse response = future.actionGet(); - return response.toString(); - } else if (request instanceof DeleteByQueryRequest) { - return requestBuilder.get().toString(); - } else if (request instanceof GetIndexRequest) { - return requestBuilder.getBuilder().execute().actionGet().toString(); - } else { - throw new Exception(String.format("Unsupported ActionRequest provided: %s", request.getClass().getName())); - } + @Override + public String execute(Client client, Map params, QueryAction queryAction) + throws Exception { + ActionRequest request = requestBuilder.request(); + if (requestBuilder instanceof JoinRequestBuilder) { + ElasticJoinExecutor executor = ElasticJoinExecutor.createJoinExecutor(client, requestBuilder); + executor.run(); + return ElasticUtils.hitsAsStringResult(executor.getHits(), new MetaSearchResult()); + } else if (requestBuilder instanceof MultiQueryRequestBuilder) { + ElasticHitsExecutor executor = + MultiRequestExecutorFactory.createExecutor( + client, (MultiQueryRequestBuilder) requestBuilder); + executor.run(); + return ElasticUtils.hitsAsStringResult(executor.getHits(), new MetaSearchResult()); + } else if (request instanceof SearchRequest) { + ActionFuture future = client.search((SearchRequest) request); + SearchResponse response = future.actionGet(); + return response.toString(); + } else if (request instanceof DeleteByQueryRequest) { + return requestBuilder.get().toString(); + } else if (request instanceof GetIndexRequest) { + return requestBuilder.getBuilder().execute().actionGet().toString(); + } else { + throw new Exception( + String.format("Unsupported ActionRequest provided: %s", request.getClass().getName())); } + } - private void sendDefaultResponse(SearchHits hits, RestChannel channel) { - try { - String json = ElasticUtils.hitsAsStringResult(hits, new MetaSearchResult()); - BytesRestResponse bytesRestResponse = new BytesRestResponse(RestStatus.OK, json); - channel.sendResponse(bytesRestResponse); - } catch (IOException e) { - e.printStackTrace(); - } + private void sendDefaultResponse(SearchHits hits, RestChannel channel) { + try { + String json = ElasticUtils.hitsAsStringResult(hits, new MetaSearchResult()); + BytesRestResponse bytesRestResponse = new BytesRestResponse(RestStatus.OK, json); + channel.sendResponse(bytesRestResponse); + } catch (IOException e) { + e.printStackTrace(); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/ElasticHitsExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/ElasticHitsExecutor.java index c48eb673bd..62a6d63ef7 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/ElasticHitsExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/ElasticHitsExecutor.java @@ -3,18 +3,15 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor; import java.io.IOException; import org.opensearch.search.SearchHits; import org.opensearch.sql.legacy.exception.SqlParseException; -/** - * Created by Eliran on 21/8/2016. - */ +/** Created by Eliran on 21/8/2016. */ public interface ElasticHitsExecutor { - void run() throws IOException, SqlParseException; + void run() throws IOException, SqlParseException; - SearchHits getHits(); + SearchHits getHits(); } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/ElasticResultHandler.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/ElasticResultHandler.java index ff241fce77..6f753a5e7c 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/ElasticResultHandler.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/ElasticResultHandler.java @@ -3,38 +3,34 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor; import java.util.Map; import org.opensearch.search.SearchHit; -/** - * Created by Eliran on 3/10/2015. - */ +/** Created by Eliran on 3/10/2015. */ public class ElasticResultHandler { - public static Object getFieldValue(SearchHit hit, String field) { - return deepSearchInMap(hit.getSourceAsMap(), field); - } + public static Object getFieldValue(SearchHit hit, String field) { + return deepSearchInMap(hit.getSourceAsMap(), field); + } - private static Object deepSearchInMap(Map fieldsMap, String name) { - if (name.contains(".")) { - String[] path = name.split("\\."); - Map currentObject = fieldsMap; - for (int i = 0; i < path.length - 1; i++) { - Object valueFromCurrentMap = currentObject.get(path[i]); - if (valueFromCurrentMap == null) { - return null; - } - if (!Map.class.isAssignableFrom(valueFromCurrentMap.getClass())) { - return null; - } - currentObject = (Map) valueFromCurrentMap; - } - return currentObject.get(path[path.length - 1]); + private static Object deepSearchInMap(Map fieldsMap, String name) { + if (name.contains(".")) { + String[] path = name.split("\\."); + Map currentObject = fieldsMap; + for (int i = 0; i < path.length - 1; i++) { + Object valueFromCurrentMap = currentObject.get(path[i]); + if (valueFromCurrentMap == null) { + return null; } - - return fieldsMap.get(name); + if (!Map.class.isAssignableFrom(valueFromCurrentMap.getClass())) { + return null; + } + currentObject = (Map) valueFromCurrentMap; + } + return currentObject.get(path[path.length - 1]); } + return fieldsMap.get(name); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/Format.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/Format.java index 454babd2e9..c47092f10b 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/Format.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/Format.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor; import com.google.common.collect.ImmutableMap; @@ -14,25 +13,25 @@ @RequiredArgsConstructor public enum Format { - JDBC("jdbc"), - JSON("json"), - CSV("csv"), - RAW("raw"), - TABLE("table"); + JDBC("jdbc"), + JSON("json"), + CSV("csv"), + RAW("raw"), + TABLE("table"); - @Getter - private final String formatName; + @Getter private final String formatName; - private static final Map ALL_FORMATS; - static { - ImmutableMap.Builder builder = new ImmutableMap.Builder<>(); - for (Format format : Format.values()) { - builder.put(format.formatName, format); - } - ALL_FORMATS = builder.build(); - } + private static final Map ALL_FORMATS; - public static Optional of(String formatName) { - return Optional.ofNullable(ALL_FORMATS.getOrDefault(formatName, null)); + static { + ImmutableMap.Builder builder = new ImmutableMap.Builder<>(); + for (Format format : Format.values()) { + builder.put(format.formatName, format); } + ALL_FORMATS = builder.build(); + } + + public static Optional of(String formatName) { + return Optional.ofNullable(ALL_FORMATS.getOrDefault(formatName, null)); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/GetIndexRequestRestListener.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/GetIndexRequestRestListener.java index 591319c74c..58808ee8f3 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/GetIndexRequestRestListener.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/GetIndexRequestRestListener.java @@ -3,100 +3,97 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor; import java.io.IOException; import java.util.List; - import org.opensearch.action.admin.indices.get.GetIndexRequest; import org.opensearch.action.admin.indices.get.GetIndexResponse; import org.opensearch.cluster.metadata.AliasMetadata; import org.opensearch.cluster.metadata.MappingMetadata; import org.opensearch.common.settings.Settings; +import org.opensearch.core.rest.RestStatus; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.rest.BytesRestResponse; import org.opensearch.rest.RestChannel; import org.opensearch.rest.RestResponse; -import org.opensearch.core.rest.RestStatus; import org.opensearch.rest.action.RestBuilderListener; import org.opensearch.sql.legacy.antlr.semantic.SemanticAnalysisException; -/** - * Created by Eliran on 6/10/2015. - */ +/** Created by Eliran on 6/10/2015. */ public class GetIndexRequestRestListener extends RestBuilderListener { - private GetIndexRequest getIndexRequest; - - public GetIndexRequestRestListener(RestChannel channel, GetIndexRequest getIndexRequest) { - super(channel); - this.getIndexRequest = getIndexRequest; - } - - @Override - public RestResponse buildResponse(GetIndexResponse getIndexResponse, XContentBuilder builder) throws Exception { - GetIndexRequest.Feature[] features = getIndexRequest.features(); - String[] indices = getIndexResponse.indices(); - - builder.startObject(); - for (String index : indices) { - builder.startObject(index); - for (GetIndexRequest.Feature feature : features) { - switch (feature) { - case ALIASES: - writeAliases(getIndexResponse.aliases().get(index), builder, channel.request()); - break; - case MAPPINGS: - writeMappings(getIndexResponse.mappings().get(index), builder, channel.request()); - break; - case SETTINGS: - writeSettings(getIndexResponse.settings().get(index), builder, channel.request()); - break; - default: - throw new SemanticAnalysisException("Unsupported feature: " + feature); - } - } - builder.endObject(); - + private GetIndexRequest getIndexRequest; + + public GetIndexRequestRestListener(RestChannel channel, GetIndexRequest getIndexRequest) { + super(channel); + this.getIndexRequest = getIndexRequest; + } + + @Override + public RestResponse buildResponse(GetIndexResponse getIndexResponse, XContentBuilder builder) + throws Exception { + GetIndexRequest.Feature[] features = getIndexRequest.features(); + String[] indices = getIndexResponse.indices(); + + builder.startObject(); + for (String index : indices) { + builder.startObject(index); + for (GetIndexRequest.Feature feature : features) { + switch (feature) { + case ALIASES: + writeAliases(getIndexResponse.aliases().get(index), builder, channel.request()); + break; + case MAPPINGS: + writeMappings(getIndexResponse.mappings().get(index), builder, channel.request()); + break; + case SETTINGS: + writeSettings(getIndexResponse.settings().get(index), builder, channel.request()); + break; + default: + throw new SemanticAnalysisException("Unsupported feature: " + feature); } - builder.endObject(); - - return new BytesRestResponse(RestStatus.OK, builder); - } - - private void writeAliases(List aliases, XContentBuilder builder, ToXContent.Params params) - throws IOException { - builder.startObject(Fields.ALIASES); - if (aliases != null) { - for (AliasMetadata alias : aliases) { - AliasMetadata.Builder.toXContent(alias, builder, params); - } - } - builder.endObject(); - } - - private void writeSettings(Settings settings, XContentBuilder builder, ToXContent.Params params) - throws IOException { - builder.startObject(Fields.SETTINGS); - settings.toXContent(builder, params); - builder.endObject(); + } + builder.endObject(); } - - private void writeMappings(MappingMetadata mappingMetadata, - XContentBuilder builder, ToXContent.Params params) throws IOException { - if ( mappingMetadata != null) { - builder.field(Fields.MAPPINGS); - builder.map(mappingMetadata.getSourceAsMap()); - } + builder.endObject(); + + return new BytesRestResponse(RestStatus.OK, builder); + } + + private void writeAliases( + List aliases, XContentBuilder builder, ToXContent.Params params) + throws IOException { + builder.startObject(Fields.ALIASES); + if (aliases != null) { + for (AliasMetadata alias : aliases) { + AliasMetadata.Builder.toXContent(alias, builder, params); + } } - - - static class Fields { - static final String ALIASES = "aliases"; - static final String MAPPINGS = "mappings"; - static final String SETTINGS = "settings"; - static final String WARMERS = "warmers"; + builder.endObject(); + } + + private void writeSettings(Settings settings, XContentBuilder builder, ToXContent.Params params) + throws IOException { + builder.startObject(Fields.SETTINGS); + settings.toXContent(builder, params); + builder.endObject(); + } + + private void writeMappings( + MappingMetadata mappingMetadata, XContentBuilder builder, ToXContent.Params params) + throws IOException { + if (mappingMetadata != null) { + builder.field(Fields.MAPPINGS); + builder.map(mappingMetadata.getSourceAsMap()); } + } + + static class Fields { + static final String ALIASES = "aliases"; + static final String MAPPINGS = "mappings"; + static final String SETTINGS = "settings"; + static final String WARMERS = "warmers"; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/QueryActionElasticExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/QueryActionElasticExecutor.java index bcb25fd39a..2e45fb45b7 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/QueryActionElasticExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/QueryActionElasticExecutor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor; import java.io.IOException; @@ -31,81 +30,85 @@ import org.opensearch.sql.legacy.query.multi.MultiQueryAction; import org.opensearch.sql.legacy.query.multi.MultiQueryRequestBuilder; -/** - * Created by Eliran on 3/10/2015. - */ +/** Created by Eliran on 3/10/2015. */ public class QueryActionElasticExecutor { - public static SearchHits executeSearchAction(DefaultQueryAction searchQueryAction) throws SqlParseException { - SqlOpenSearchRequestBuilder builder = searchQueryAction.explain(); - return ((SearchResponse) builder.get()).getHits(); - } + public static SearchHits executeSearchAction(DefaultQueryAction searchQueryAction) + throws SqlParseException { + SqlOpenSearchRequestBuilder builder = searchQueryAction.explain(); + return ((SearchResponse) builder.get()).getHits(); + } - public static SearchHits executeJoinSearchAction(Client client, OpenSearchJoinQueryAction joinQueryAction) - throws IOException, SqlParseException { - SqlElasticRequestBuilder joinRequestBuilder = joinQueryAction.explain(); - ElasticJoinExecutor executor = ElasticJoinExecutor.createJoinExecutor(client, joinRequestBuilder); - executor.run(); - return executor.getHits(); - } + public static SearchHits executeJoinSearchAction( + Client client, OpenSearchJoinQueryAction joinQueryAction) + throws IOException, SqlParseException { + SqlElasticRequestBuilder joinRequestBuilder = joinQueryAction.explain(); + ElasticJoinExecutor executor = + ElasticJoinExecutor.createJoinExecutor(client, joinRequestBuilder); + executor.run(); + return executor.getHits(); + } - public static Aggregations executeAggregationAction(AggregationQueryAction aggregationQueryAction) - throws SqlParseException { - SqlOpenSearchRequestBuilder select = aggregationQueryAction.explain(); - return ((SearchResponse) select.get()).getAggregations(); - } + public static Aggregations executeAggregationAction(AggregationQueryAction aggregationQueryAction) + throws SqlParseException { + SqlOpenSearchRequestBuilder select = aggregationQueryAction.explain(); + return ((SearchResponse) select.get()).getAggregations(); + } - public static List executeQueryPlanQueryAction(QueryPlanQueryAction queryPlanQueryAction) { - QueryPlanRequestBuilder select = (QueryPlanRequestBuilder) queryPlanQueryAction.explain(); - return select.execute(); - } + public static List executeQueryPlanQueryAction( + QueryPlanQueryAction queryPlanQueryAction) { + QueryPlanRequestBuilder select = (QueryPlanRequestBuilder) queryPlanQueryAction.explain(); + return select.execute(); + } - public static ActionResponse executeShowQueryAction(ShowQueryAction showQueryAction) { - return showQueryAction.explain().get(); - } + public static ActionResponse executeShowQueryAction(ShowQueryAction showQueryAction) { + return showQueryAction.explain().get(); + } - public static ActionResponse executeDescribeQueryAction(DescribeQueryAction describeQueryAction) { - return describeQueryAction.explain().get(); - } + public static ActionResponse executeDescribeQueryAction(DescribeQueryAction describeQueryAction) { + return describeQueryAction.explain().get(); + } - public static ActionResponse executeDeleteAction(DeleteQueryAction deleteQueryAction) throws SqlParseException { - return deleteQueryAction.explain().get(); - } + public static ActionResponse executeDeleteAction(DeleteQueryAction deleteQueryAction) + throws SqlParseException { + return deleteQueryAction.explain().get(); + } - public static SearchHits executeMultiQueryAction(Client client, MultiQueryAction queryAction) - throws SqlParseException, IOException { - SqlElasticRequestBuilder multiRequestBuilder = queryAction.explain(); - ElasticHitsExecutor executor = MultiRequestExecutorFactory.createExecutor(client, - (MultiQueryRequestBuilder) multiRequestBuilder); - executor.run(); - return executor.getHits(); - } + public static SearchHits executeMultiQueryAction(Client client, MultiQueryAction queryAction) + throws SqlParseException, IOException { + SqlElasticRequestBuilder multiRequestBuilder = queryAction.explain(); + ElasticHitsExecutor executor = + MultiRequestExecutorFactory.createExecutor( + client, (MultiQueryRequestBuilder) multiRequestBuilder); + executor.run(); + return executor.getHits(); + } - public static Object executeAnyAction(Client client, QueryAction queryAction) - throws SqlParseException, IOException { - if (queryAction instanceof DefaultQueryAction) { - return executeSearchAction((DefaultQueryAction) queryAction); - } - if (queryAction instanceof AggregationQueryAction) { - return executeAggregationAction((AggregationQueryAction) queryAction); - } - if (queryAction instanceof QueryPlanQueryAction) { - return executeQueryPlanQueryAction((QueryPlanQueryAction) queryAction); - } - if (queryAction instanceof ShowQueryAction) { - return executeShowQueryAction((ShowQueryAction) queryAction); - } - if (queryAction instanceof DescribeQueryAction) { - return executeDescribeQueryAction((DescribeQueryAction) queryAction); - } - if (queryAction instanceof OpenSearchJoinQueryAction) { - return executeJoinSearchAction(client, (OpenSearchJoinQueryAction) queryAction); - } - if (queryAction instanceof MultiQueryAction) { - return executeMultiQueryAction(client, (MultiQueryAction) queryAction); - } - if (queryAction instanceof DeleteQueryAction) { - return executeDeleteAction((DeleteQueryAction) queryAction); - } - return null; + public static Object executeAnyAction(Client client, QueryAction queryAction) + throws SqlParseException, IOException { + if (queryAction instanceof DefaultQueryAction) { + return executeSearchAction((DefaultQueryAction) queryAction); + } + if (queryAction instanceof AggregationQueryAction) { + return executeAggregationAction((AggregationQueryAction) queryAction); + } + if (queryAction instanceof QueryPlanQueryAction) { + return executeQueryPlanQueryAction((QueryPlanQueryAction) queryAction); + } + if (queryAction instanceof ShowQueryAction) { + return executeShowQueryAction((ShowQueryAction) queryAction); + } + if (queryAction instanceof DescribeQueryAction) { + return executeDescribeQueryAction((DescribeQueryAction) queryAction); + } + if (queryAction instanceof OpenSearchJoinQueryAction) { + return executeJoinSearchAction(client, (OpenSearchJoinQueryAction) queryAction); + } + if (queryAction instanceof MultiQueryAction) { + return executeMultiQueryAction(client, (MultiQueryAction) queryAction); + } + if (queryAction instanceof DeleteQueryAction) { + return executeDeleteAction((DeleteQueryAction) queryAction); } + return null; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/RestExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/RestExecutor.java index e0124fb8be..8a0ab65970 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/RestExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/RestExecutor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor; import java.util.Map; @@ -11,12 +10,12 @@ import org.opensearch.rest.RestChannel; import org.opensearch.sql.legacy.query.QueryAction; -/** - * Created by Eliran on 26/12/2015. - */ +/** Created by Eliran on 26/12/2015. */ public interface RestExecutor { - void execute(Client client, Map params, QueryAction queryAction, RestChannel channel) - throws Exception; + void execute( + Client client, Map params, QueryAction queryAction, RestChannel channel) + throws Exception; - String execute(Client client, Map params, QueryAction queryAction) throws Exception; + String execute(Client client, Map params, QueryAction queryAction) + throws Exception; } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/adapter/QueryPlanQueryAction.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/adapter/QueryPlanQueryAction.java index 091abca554..b0179d3d8d 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/adapter/QueryPlanQueryAction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/adapter/QueryPlanQueryAction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.adapter; import com.google.common.base.Strings; @@ -14,27 +13,31 @@ import org.opensearch.sql.legacy.query.SqlElasticRequestBuilder; /** - * The definition of QueryPlan of QueryAction which works as the adapter to the current QueryAction framework. + * The definition of QueryPlan of QueryAction which works as the adapter to the current QueryAction + * framework. */ public class QueryPlanQueryAction extends QueryAction { - private final QueryPlanRequestBuilder requestBuilder; + private final QueryPlanRequestBuilder requestBuilder; - public QueryPlanQueryAction(QueryPlanRequestBuilder requestBuilder) { - super(null, null); - this.requestBuilder = requestBuilder; - } + public QueryPlanQueryAction(QueryPlanRequestBuilder requestBuilder) { + super(null, null); + this.requestBuilder = requestBuilder; + } - @Override - public SqlElasticRequestBuilder explain() { - return requestBuilder; - } + @Override + public SqlElasticRequestBuilder explain() { + return requestBuilder; + } - @Override - public Optional> getFieldNames() { - List fieldNames = ((QueryPlanRequestBuilder) requestBuilder).outputColumns() - .stream() - .map(node -> Strings.isNullOrEmpty(node.getAlias()) ? node.getName() : node.getAlias()) + @Override + public Optional> getFieldNames() { + List fieldNames = + ((QueryPlanRequestBuilder) requestBuilder) + .outputColumns().stream() + .map( + node -> + Strings.isNullOrEmpty(node.getAlias()) ? node.getName() : node.getAlias()) .collect(Collectors.toList()); - return Optional.of(fieldNames); - } + return Optional.of(fieldNames); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/adapter/QueryPlanRequestBuilder.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/adapter/QueryPlanRequestBuilder.java index ef0bc85bc1..3933df9bbb 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/adapter/QueryPlanRequestBuilder.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/adapter/QueryPlanRequestBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.adapter; import java.util.List; @@ -16,38 +15,36 @@ import org.opensearch.sql.legacy.query.planner.core.BindingTupleQueryPlanner; import org.opensearch.sql.legacy.query.planner.core.ColumnNode; -/** - * The definition of QueryPlan SqlElasticRequestBuilder. - */ +/** The definition of QueryPlan SqlElasticRequestBuilder. */ @RequiredArgsConstructor public class QueryPlanRequestBuilder implements SqlElasticRequestBuilder { - private final BindingTupleQueryPlanner queryPlanner; - - public List execute() { - return queryPlanner.execute(); - } - - public List outputColumns() { - return queryPlanner.getColumnNodes(); - } - - @Override - public String explain() { - return queryPlanner.explain(); - } - - @Override - public ActionRequest request() { - throw new RuntimeException("unsupported operation"); - } - - @Override - public ActionResponse get() { - throw new RuntimeException("unsupported operation"); - } - - @Override - public ActionRequestBuilder getBuilder() { - throw new RuntimeException("unsupported operation"); - } + private final BindingTupleQueryPlanner queryPlanner; + + public List execute() { + return queryPlanner.execute(); + } + + public List outputColumns() { + return queryPlanner.getColumnNodes(); + } + + @Override + public String explain() { + return queryPlanner.explain(); + } + + @Override + public ActionRequest request() { + throw new RuntimeException("unsupported operation"); + } + + @Override + public ActionResponse get() { + throw new RuntimeException("unsupported operation"); + } + + @Override + public ActionRequestBuilder getBuilder() { + throw new RuntimeException("unsupported operation"); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResult.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResult.java index 680c0c8e85..8df3dd6b83 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResult.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResult.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.csv; import com.google.common.collect.ImmutableSet; @@ -12,86 +11,83 @@ import java.util.Set; import java.util.stream.Collectors; -/** - * Created by Eliran on 27/12/2015. - */ +/** Created by Eliran on 27/12/2015. */ public class CSVResult { - private static final Set SENSITIVE_CHAR = ImmutableSet.of("=", "+", "-", "@"); - - private final List headers; - private final List lines; - - /** - * Skip sanitizing if string line provided. This constructor is basically used by - * assertion in test code. - */ - public CSVResult(List headers, List lines) { - this.headers = headers; - this.lines = lines; + private static final Set SENSITIVE_CHAR = ImmutableSet.of("=", "+", "-", "@"); + + private final List headers; + private final List lines; + + /** + * Skip sanitizing if string line provided. This constructor is basically used by assertion in + * test code. + */ + public CSVResult(List headers, List lines) { + this.headers = headers; + this.lines = lines; + } + + /** + * Sanitize both headers and data lines by: 1) First prepend single quote if first char is + * sensitive (= - + @) 2) Second double quote entire cell if any comma found + */ + public CSVResult(String separator, List headers, List> lines) { + this.headers = sanitizeHeaders(separator, headers); + this.lines = sanitizeLines(separator, lines); + } + + /** + * Return CSV header names which are sanitized because OpenSearch allows special character present + * in field name too. + * + * @return CSV header name list after sanitized + */ + public List getHeaders() { + return headers; + } + + /** + * Return CSV lines in which each cell is sanitized to avoid CSV injection. + * + * @return CSV lines after sanitized + */ + public List getLines() { + return lines; + } + + private List sanitizeHeaders(String separator, List headers) { + return headers.stream() + .map(this::sanitizeCell) + .map(cell -> quoteIfRequired(separator, cell)) + .collect(Collectors.toList()); + } + + private List sanitizeLines(String separator, List> lines) { + List result = new ArrayList<>(); + for (List line : lines) { + result.add( + line.stream() + .map(this::sanitizeCell) + .map(cell -> quoteIfRequired(separator, cell)) + .collect(Collectors.joining(separator))); } + return result; + } - /** - * Sanitize both headers and data lines by: - * 1) First prepend single quote if first char is sensitive (= - + @) - * 2) Second double quote entire cell if any comma found - */ - public CSVResult(String separator, List headers, List> lines) { - this.headers = sanitizeHeaders(separator, headers); - this.lines = sanitizeLines(separator, lines); + private String sanitizeCell(String cell) { + if (isStartWithSensitiveChar(cell)) { + return "'" + cell; } + return cell; + } - /** - * Return CSV header names which are sanitized because OpenSearch allows - * special character present in field name too. - * @return CSV header name list after sanitized - */ - public List getHeaders() { - return headers; - } - - /** - * Return CSV lines in which each cell is sanitized to avoid CSV injection. - * @return CSV lines after sanitized - */ - public List getLines() { - return lines; - } - - private List sanitizeHeaders(String separator, List headers) { - return headers.stream(). - map(this::sanitizeCell). - map(cell -> quoteIfRequired(separator, cell)). - collect(Collectors.toList()); - } - - private List sanitizeLines(String separator, List> lines) { - List result = new ArrayList<>(); - for (List line : lines) { - result.add(line.stream(). - map(this::sanitizeCell). - map(cell -> quoteIfRequired(separator, cell)). - collect(Collectors.joining(separator))); - } - return result; - } - - private String sanitizeCell(String cell) { - if (isStartWithSensitiveChar(cell)) { - return "'" + cell; - } - return cell; - } - - private String quoteIfRequired(String separator, String cell) { - final String quote = "\""; - return cell.contains(separator) - ? quote + cell.replaceAll("\"", "\"\"") + quote : cell; - } - - private boolean isStartWithSensitiveChar(String cell) { - return SENSITIVE_CHAR.stream(). - anyMatch(cell::startsWith); - } + private String quoteIfRequired(String separator, String cell) { + final String quote = "\""; + return cell.contains(separator) ? quote + cell.replaceAll("\"", "\"\"") + quote : cell; + } + private boolean isStartWithSensitiveChar(String cell) { + return SENSITIVE_CHAR.stream().anyMatch(cell::startsWith); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResultRestExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResultRestExecutor.java index ae7623e3a2..a69ff31a49 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResultRestExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResultRestExecutor.java @@ -3,75 +3,78 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.csv; import com.google.common.base.Joiner; import java.util.List; import java.util.Map; import org.opensearch.client.Client; +import org.opensearch.core.rest.RestStatus; import org.opensearch.rest.BytesRestResponse; import org.opensearch.rest.RestChannel; -import org.opensearch.core.rest.RestStatus; import org.opensearch.sql.legacy.executor.QueryActionElasticExecutor; import org.opensearch.sql.legacy.executor.RestExecutor; import org.opensearch.sql.legacy.query.QueryAction; import org.opensearch.sql.legacy.query.join.BackOffRetryStrategy; -/** - * Created by Eliran on 26/12/2015. - */ +/** Created by Eliran on 26/12/2015. */ public class CSVResultRestExecutor implements RestExecutor { - @Override - public void execute(final Client client, final Map params, final QueryAction queryAction, - final RestChannel channel) throws Exception { + @Override + public void execute( + final Client client, + final Map params, + final QueryAction queryAction, + final RestChannel channel) + throws Exception { - final String csvString = execute(client, params, queryAction); - final BytesRestResponse bytesRestResponse = new BytesRestResponse(RestStatus.OK, csvString); + final String csvString = execute(client, params, queryAction); + final BytesRestResponse bytesRestResponse = new BytesRestResponse(RestStatus.OK, csvString); - if (!BackOffRetryStrategy.isHealthy(2 * bytesRestResponse.content().length(), this)) { - throw new IllegalStateException( - "[CSVResultRestExecutor] Memory could be insufficient when sendResponse()."); - } - - channel.sendResponse(bytesRestResponse); + if (!BackOffRetryStrategy.isHealthy(2 * bytesRestResponse.content().length(), this)) { + throw new IllegalStateException( + "[CSVResultRestExecutor] Memory could be insufficient when sendResponse()."); } - @Override - public String execute(final Client client, final Map params, final QueryAction queryAction) - throws Exception { + channel.sendResponse(bytesRestResponse); + } - final Object queryResult = QueryActionElasticExecutor.executeAnyAction(client, queryAction); + @Override + public String execute( + final Client client, final Map params, final QueryAction queryAction) + throws Exception { - final String separator = params.getOrDefault("separator", ","); - final String newLine = params.getOrDefault("newLine", "\n"); + final Object queryResult = QueryActionElasticExecutor.executeAnyAction(client, queryAction); - final boolean flat = getBooleanOrDefault(params, "flat", false); - final boolean includeScore = getBooleanOrDefault(params, "_score", false); - final boolean includeId = getBooleanOrDefault(params, "_id", false); + final String separator = params.getOrDefault("separator", ","); + final String newLine = params.getOrDefault("newLine", "\n"); - final List fieldNames = queryAction.getFieldNames().orElse(null); - final CSVResult result = new CSVResultsExtractor(includeScore, includeId) - .extractResults(queryResult, flat, separator, fieldNames); + final boolean flat = getBooleanOrDefault(params, "flat", false); + final boolean includeScore = getBooleanOrDefault(params, "_score", false); + final boolean includeId = getBooleanOrDefault(params, "_id", false); - return buildString(separator, result, newLine); - } + final List fieldNames = queryAction.getFieldNames().orElse(null); + final CSVResult result = + new CSVResultsExtractor(includeScore, includeId) + .extractResults(queryResult, flat, separator, fieldNames); - private boolean getBooleanOrDefault(Map params, String param, boolean defaultValue) { - boolean flat = defaultValue; - if (params.containsKey(param)) { - flat = Boolean.parseBoolean(params.get(param)); - } - return flat; - } + return buildString(separator, result, newLine); + } - private String buildString(String separator, CSVResult result, String newLine) { - StringBuilder csv = new StringBuilder(); - csv.append(Joiner.on(separator).join(result.getHeaders())); - csv.append(newLine); - csv.append(Joiner.on(newLine).join(result.getLines())); - return csv.toString(); + private boolean getBooleanOrDefault( + Map params, String param, boolean defaultValue) { + boolean flat = defaultValue; + if (params.containsKey(param)) { + flat = Boolean.parseBoolean(params.get(param)); } - + return flat; + } + + private String buildString(String separator, CSVResult result, String newLine) { + StringBuilder csv = new StringBuilder(); + csv.append(Joiner.on(separator).join(result.getHeaders())); + csv.append(newLine); + csv.append(Joiner.on(newLine).join(result.getLines())); + return csv.toString(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResultsExtractor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResultsExtractor.java index 70cdd91452..5a3b3bc498 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResultsExtractor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CSVResultsExtractor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.csv; import java.util.ArrayList; @@ -14,6 +13,7 @@ import java.util.Set; import java.util.stream.Collectors; import org.opensearch.common.document.DocumentField; +import org.opensearch.geo.search.aggregations.metrics.GeoBounds; import org.opensearch.search.SearchHit; import org.opensearch.search.SearchHits; import org.opensearch.search.aggregations.Aggregation; @@ -21,7 +21,6 @@ import org.opensearch.search.aggregations.bucket.MultiBucketsAggregation; import org.opensearch.search.aggregations.bucket.SingleBucketAggregation; import org.opensearch.search.aggregations.metrics.ExtendedStats; -import org.opensearch.geo.search.aggregations.metrics.GeoBounds; import org.opensearch.search.aggregations.metrics.NumericMetricsAggregation; import org.opensearch.search.aggregations.metrics.Percentile; import org.opensearch.search.aggregations.metrics.Percentiles; @@ -31,320 +30,332 @@ import org.opensearch.sql.legacy.expression.model.ExprValue; import org.opensearch.sql.legacy.utils.Util; -/** - * Created by Eliran on 27/12/2015. - */ +/** Created by Eliran on 27/12/2015. */ public class CSVResultsExtractor { - private final boolean includeScore; - private final boolean includeId; - private int currentLineIndex; - - public CSVResultsExtractor(boolean includeScore, boolean includeId) { - this.includeScore = includeScore; - this.includeId = includeId; - this.currentLineIndex = 0; + private final boolean includeScore; + private final boolean includeId; + private int currentLineIndex; + + public CSVResultsExtractor(boolean includeScore, boolean includeId) { + this.includeScore = includeScore; + this.includeId = includeId; + this.currentLineIndex = 0; + } + + public CSVResult extractResults( + Object queryResult, boolean flat, String separator, final List fieldNames) + throws CsvExtractorException { + + if (queryResult instanceof SearchHits) { + SearchHit[] hits = ((SearchHits) queryResult).getHits(); + List> docsAsMap = new ArrayList<>(); + List headers = createHeadersAndFillDocsMap(flat, hits, docsAsMap, fieldNames); + List> csvLines = createCSVLinesFromDocs(flat, separator, docsAsMap, headers); + return new CSVResult(separator, headers, csvLines); } - - public CSVResult extractResults(Object queryResult, boolean flat, String separator, - final List fieldNames) throws CsvExtractorException { - - if (queryResult instanceof SearchHits) { - SearchHit[] hits = ((SearchHits) queryResult).getHits(); - List> docsAsMap = new ArrayList<>(); - List headers = createHeadersAndFillDocsMap(flat, hits, docsAsMap, fieldNames); - List> csvLines = createCSVLinesFromDocs(flat, separator, docsAsMap, headers); - return new CSVResult(separator, headers, csvLines); - } - if (queryResult instanceof Aggregations) { - List headers = new ArrayList<>(); - List> lines = new ArrayList<>(); - lines.add(new ArrayList()); - handleAggregations((Aggregations) queryResult, headers, lines); - return new CSVResult(separator, headers, lines); - } - // Handle List result. - if (queryResult instanceof List) { - List bindingTuples = (List) queryResult; - List> csvLines = bindingTuples.stream().map(tuple -> { - Map bindingMap = tuple.getBindingMap(); - List rowValues = new ArrayList<>(); - for (String fieldName : fieldNames) { - if (bindingMap.containsKey(fieldName)) { + if (queryResult instanceof Aggregations) { + List headers = new ArrayList<>(); + List> lines = new ArrayList<>(); + lines.add(new ArrayList()); + handleAggregations((Aggregations) queryResult, headers, lines); + return new CSVResult(separator, headers, lines); + } + // Handle List result. + if (queryResult instanceof List) { + List bindingTuples = (List) queryResult; + List> csvLines = + bindingTuples.stream() + .map( + tuple -> { + Map bindingMap = tuple.getBindingMap(); + List rowValues = new ArrayList<>(); + for (String fieldName : fieldNames) { + if (bindingMap.containsKey(fieldName)) { rowValues.add(String.valueOf(bindingMap.get(fieldName).value())); - } else { + } else { rowValues.add(""); + } } - } - return rowValues; - }).collect(Collectors.toList()); + return rowValues; + }) + .collect(Collectors.toList()); - return new CSVResult(separator, fieldNames, csvLines); - } - return null; + return new CSVResult(separator, fieldNames, csvLines); } - - private void handleAggregations(Aggregations aggregations, List headers, List> lines) - throws CsvExtractorException { - if (allNumericAggregations(aggregations)) { - lines.get(this.currentLineIndex) - .addAll(fillHeaderAndCreateLineForNumericAggregations(aggregations, headers)); - return; - } - //aggregations with size one only supported when not metrics. - List aggregationList = aggregations.asList(); - if (aggregationList.size() > 1) { - throw new CsvExtractorException( - "currently support only one aggregation at same level (Except for numeric metrics)"); - } - Aggregation aggregation = aggregationList.get(0); - //we want to skip singleBucketAggregations (nested,reverse_nested,filters) - if (aggregation instanceof SingleBucketAggregation) { - Aggregations singleBucketAggs = ((SingleBucketAggregation) aggregation).getAggregations(); - handleAggregations(singleBucketAggs, headers, lines); - return; - } - if (aggregation instanceof NumericMetricsAggregation) { - handleNumericMetricAggregation(headers, lines.get(currentLineIndex), aggregation); - return; - } - if (aggregation instanceof GeoBounds) { - handleGeoBoundsAggregation(headers, lines, (GeoBounds) aggregation); - return; - } - if (aggregation instanceof TopHits) { - //todo: handle this . it returns hits... maby back to normal? - //todo: read about this usages - // TopHits topHitsAggregation = (TopHits) aggregation; - } - if (aggregation instanceof MultiBucketsAggregation) { - MultiBucketsAggregation bucketsAggregation = (MultiBucketsAggregation) aggregation; - String name = bucketsAggregation.getName(); - //checking because it can comes from sub aggregation again - if (!headers.contains(name)) { - headers.add(name); - } - Collection buckets = bucketsAggregation.getBuckets(); - - //clone current line. - List currentLine = lines.get(this.currentLineIndex); - List clonedLine = new ArrayList<>(currentLine); - - //call handle_Agg with current_line++ - boolean firstLine = true; - for (MultiBucketsAggregation.Bucket bucket : buckets) { - //each bucket need to add new line with current line copied => except for first line - String key = bucket.getKeyAsString(); - if (firstLine) { - firstLine = false; - } else { - currentLineIndex++; - currentLine = new ArrayList(clonedLine); - lines.add(currentLine); - } - currentLine.add(key); - handleAggregations(bucket.getAggregations(), headers, lines); - - } - } + return null; + } + + private void handleAggregations( + Aggregations aggregations, List headers, List> lines) + throws CsvExtractorException { + if (allNumericAggregations(aggregations)) { + lines + .get(this.currentLineIndex) + .addAll(fillHeaderAndCreateLineForNumericAggregations(aggregations, headers)); + return; } - - private void handleGeoBoundsAggregation(List headers, List> lines, - GeoBounds geoBoundsAggregation) { - String geoBoundAggName = geoBoundsAggregation.getName(); - headers.add(geoBoundAggName + ".topLeft.lon"); - headers.add(geoBoundAggName + ".topLeft.lat"); - headers.add(geoBoundAggName + ".bottomRight.lon"); - headers.add(geoBoundAggName + ".bottomRight.lat"); - List line = lines.get(this.currentLineIndex); - line.add(String.valueOf(geoBoundsAggregation.topLeft().getLon())); - line.add(String.valueOf(geoBoundsAggregation.topLeft().getLat())); - line.add(String.valueOf(geoBoundsAggregation.bottomRight().getLon())); - line.add(String.valueOf(geoBoundsAggregation.bottomRight().getLat())); - lines.add(line); + // aggregations with size one only supported when not metrics. + List aggregationList = aggregations.asList(); + if (aggregationList.size() > 1) { + throw new CsvExtractorException( + "currently support only one aggregation at same level (Except for numeric metrics)"); } - - private List fillHeaderAndCreateLineForNumericAggregations(Aggregations aggregations, List header) - throws CsvExtractorException { - List line = new ArrayList<>(); - List aggregationList = aggregations.asList(); - for (Aggregation aggregation : aggregationList) { - handleNumericMetricAggregation(header, line, aggregation); - } - return line; + Aggregation aggregation = aggregationList.get(0); + // we want to skip singleBucketAggregations (nested,reverse_nested,filters) + if (aggregation instanceof SingleBucketAggregation) { + Aggregations singleBucketAggs = ((SingleBucketAggregation) aggregation).getAggregations(); + handleAggregations(singleBucketAggs, headers, lines); + return; } - - private void handleNumericMetricAggregation(List header, List line, Aggregation aggregation) - throws CsvExtractorException { - final String name = aggregation.getName(); - - if (aggregation instanceof NumericMetricsAggregation.SingleValue) { - if (!header.contains(name)) { - header.add(name); - } - NumericMetricsAggregation.SingleValue agg = (NumericMetricsAggregation.SingleValue) aggregation; - line.add(!Double.isInfinite(agg.value()) ? agg.getValueAsString() : "null"); - } else if (aggregation instanceof NumericMetricsAggregation.MultiValue) { - //todo:Numeric MultiValue - Stats,ExtendedStats,Percentile... - if (aggregation instanceof Stats) { - String[] statsHeaders = new String[]{"count", "sum", "avg", "min", "max"}; - boolean isExtendedStats = aggregation instanceof ExtendedStats; - if (isExtendedStats) { - String[] extendedHeaders = new String[]{"sumOfSquares", "variance", "stdDeviation"}; - statsHeaders = Util.concatStringsArrays(statsHeaders, extendedHeaders); - } - mergeHeadersWithPrefix(header, name, statsHeaders); - Stats stats = (Stats) aggregation; - line.add(String.valueOf(stats.getCount())); - line.add(stats.getSumAsString()); - line.add(stats.getAvgAsString()); - line.add(stats.getMinAsString()); - line.add(stats.getMaxAsString()); - if (isExtendedStats) { - ExtendedStats extendedStats = (ExtendedStats) aggregation; - line.add(extendedStats.getSumOfSquaresAsString()); - line.add(extendedStats.getVarianceAsString()); - line.add(extendedStats.getStdDeviationAsString()); - } - } else if (aggregation instanceof Percentiles) { - - final List percentileHeaders = new ArrayList<>(7); - final Percentiles percentiles = (Percentiles) aggregation; - - for (final Percentile p : percentiles) { - percentileHeaders.add(String.valueOf(p.getPercent())); - line.add(percentiles.percentileAsString(p.getPercent())); - } - mergeHeadersWithPrefix(header, name, percentileHeaders.toArray(new String[0])); - } else { - throw new CsvExtractorException( - "unknown NumericMetricsAggregation.MultiValue:" + aggregation.getClass()); - } - + if (aggregation instanceof NumericMetricsAggregation) { + handleNumericMetricAggregation(headers, lines.get(currentLineIndex), aggregation); + return; + } + if (aggregation instanceof GeoBounds) { + handleGeoBoundsAggregation(headers, lines, (GeoBounds) aggregation); + return; + } + if (aggregation instanceof TopHits) { + // todo: handle this . it returns hits... maby back to normal? + // todo: read about this usages + // TopHits topHitsAggregation = (TopHits) aggregation; + } + if (aggregation instanceof MultiBucketsAggregation) { + MultiBucketsAggregation bucketsAggregation = (MultiBucketsAggregation) aggregation; + String name = bucketsAggregation.getName(); + // checking because it can comes from sub aggregation again + if (!headers.contains(name)) { + headers.add(name); + } + Collection buckets = + bucketsAggregation.getBuckets(); + + // clone current line. + List currentLine = lines.get(this.currentLineIndex); + List clonedLine = new ArrayList<>(currentLine); + + // call handle_Agg with current_line++ + boolean firstLine = true; + for (MultiBucketsAggregation.Bucket bucket : buckets) { + // each bucket need to add new line with current line copied => except for first line + String key = bucket.getKeyAsString(); + if (firstLine) { + firstLine = false; } else { - throw new CsvExtractorException("unknown NumericMetricsAggregation" + aggregation.getClass()); + currentLineIndex++; + currentLine = new ArrayList(clonedLine); + lines.add(currentLine); } + currentLine.add(key); + handleAggregations(bucket.getAggregations(), headers, lines); + } } - - private void mergeHeadersWithPrefix(List header, String prefix, String[] newHeaders) { - for (int i = 0; i < newHeaders.length; i++) { - String newHeader = newHeaders[i]; - if (prefix != null && !prefix.equals("")) { - newHeader = prefix + "." + newHeader; - } - if (!header.contains(newHeader)) { - header.add(newHeader); - } - } + } + + private void handleGeoBoundsAggregation( + List headers, List> lines, GeoBounds geoBoundsAggregation) { + String geoBoundAggName = geoBoundsAggregation.getName(); + headers.add(geoBoundAggName + ".topLeft.lon"); + headers.add(geoBoundAggName + ".topLeft.lat"); + headers.add(geoBoundAggName + ".bottomRight.lon"); + headers.add(geoBoundAggName + ".bottomRight.lat"); + List line = lines.get(this.currentLineIndex); + line.add(String.valueOf(geoBoundsAggregation.topLeft().getLon())); + line.add(String.valueOf(geoBoundsAggregation.topLeft().getLat())); + line.add(String.valueOf(geoBoundsAggregation.bottomRight().getLon())); + line.add(String.valueOf(geoBoundsAggregation.bottomRight().getLat())); + lines.add(line); + } + + private List fillHeaderAndCreateLineForNumericAggregations( + Aggregations aggregations, List header) throws CsvExtractorException { + List line = new ArrayList<>(); + List aggregationList = aggregations.asList(); + for (Aggregation aggregation : aggregationList) { + handleNumericMetricAggregation(header, line, aggregation); } - - private boolean allNumericAggregations(Aggregations aggregations) { - List aggregationList = aggregations.asList(); - for (Aggregation aggregation : aggregationList) { - if (!(aggregation instanceof NumericMetricsAggregation)) { - return false; - } + return line; + } + + private void handleNumericMetricAggregation( + List header, List line, Aggregation aggregation) + throws CsvExtractorException { + final String name = aggregation.getName(); + + if (aggregation instanceof NumericMetricsAggregation.SingleValue) { + if (!header.contains(name)) { + header.add(name); + } + NumericMetricsAggregation.SingleValue agg = + (NumericMetricsAggregation.SingleValue) aggregation; + line.add(!Double.isInfinite(agg.value()) ? agg.getValueAsString() : "null"); + } else if (aggregation instanceof NumericMetricsAggregation.MultiValue) { + // todo:Numeric MultiValue - Stats,ExtendedStats,Percentile... + if (aggregation instanceof Stats) { + String[] statsHeaders = new String[] {"count", "sum", "avg", "min", "max"}; + boolean isExtendedStats = aggregation instanceof ExtendedStats; + if (isExtendedStats) { + String[] extendedHeaders = new String[] {"sumOfSquares", "variance", "stdDeviation"}; + statsHeaders = Util.concatStringsArrays(statsHeaders, extendedHeaders); } - return true; - } + mergeHeadersWithPrefix(header, name, statsHeaders); + Stats stats = (Stats) aggregation; + line.add(String.valueOf(stats.getCount())); + line.add(stats.getSumAsString()); + line.add(stats.getAvgAsString()); + line.add(stats.getMinAsString()); + line.add(stats.getMaxAsString()); + if (isExtendedStats) { + ExtendedStats extendedStats = (ExtendedStats) aggregation; + line.add(extendedStats.getSumOfSquaresAsString()); + line.add(extendedStats.getVarianceAsString()); + line.add(extendedStats.getStdDeviationAsString()); + } + } else if (aggregation instanceof Percentiles) { + + final List percentileHeaders = new ArrayList<>(7); + final Percentiles percentiles = (Percentiles) aggregation; - private Aggregation skipAggregations(Aggregation firstAggregation) { - while (firstAggregation instanceof SingleBucketAggregation) { - firstAggregation = getFirstAggregation(((SingleBucketAggregation) firstAggregation).getAggregations()); + for (final Percentile p : percentiles) { + percentileHeaders.add(String.valueOf(p.getPercent())); + line.add(percentiles.percentileAsString(p.getPercent())); } - return firstAggregation; + mergeHeadersWithPrefix(header, name, percentileHeaders.toArray(new String[0])); + } else { + throw new CsvExtractorException( + "unknown NumericMetricsAggregation.MultiValue:" + aggregation.getClass()); + } + + } else { + throw new CsvExtractorException("unknown NumericMetricsAggregation" + aggregation.getClass()); } - - private Aggregation getFirstAggregation(Aggregations aggregations) { - return aggregations.asList().get(0); + } + + private void mergeHeadersWithPrefix(List header, String prefix, String[] newHeaders) { + for (int i = 0; i < newHeaders.length; i++) { + String newHeader = newHeaders[i]; + if (prefix != null && !prefix.equals("")) { + newHeader = prefix + "." + newHeader; + } + if (!header.contains(newHeader)) { + header.add(newHeader); + } + } + } + + private boolean allNumericAggregations(Aggregations aggregations) { + List aggregationList = aggregations.asList(); + for (Aggregation aggregation : aggregationList) { + if (!(aggregation instanceof NumericMetricsAggregation)) { + return false; + } } + return true; + } - private List> createCSVLinesFromDocs(boolean flat, String separator, - List> docsAsMap, - List headers) { - List> csvLines = new ArrayList<>(); - for (Map doc : docsAsMap) { - List line = new ArrayList<>(); - for (String header : headers) { - line.add(findFieldValue(header, doc, flat, separator)); - } - csvLines.add(line); - } - return csvLines; + private Aggregation skipAggregations(Aggregation firstAggregation) { + while (firstAggregation instanceof SingleBucketAggregation) { + firstAggregation = + getFirstAggregation(((SingleBucketAggregation) firstAggregation).getAggregations()); + } + return firstAggregation; + } + + private Aggregation getFirstAggregation(Aggregations aggregations) { + return aggregations.asList().get(0); + } + + private List> createCSVLinesFromDocs( + boolean flat, String separator, List> docsAsMap, List headers) { + List> csvLines = new ArrayList<>(); + for (Map doc : docsAsMap) { + List line = new ArrayList<>(); + for (String header : headers) { + line.add(findFieldValue(header, doc, flat, separator)); + } + csvLines.add(line); + } + return csvLines; + } + + private List createHeadersAndFillDocsMap( + final boolean flat, + final SearchHit[] hits, + final List> docsAsMap, + final List fieldNames) { + final Set csvHeaders = new LinkedHashSet<>(); + if (fieldNames != null) { + csvHeaders.addAll(fieldNames); } - private List createHeadersAndFillDocsMap(final boolean flat, final SearchHit[] hits, - final List> docsAsMap, - final List fieldNames) { - final Set csvHeaders = new LinkedHashSet<>(); - if (fieldNames != null) { - csvHeaders.addAll(fieldNames); - } + for (final SearchHit hit : hits) { + final Map doc = hit.getSourceAsMap(); + final Map fields = hit.getFields(); + for (final DocumentField searchHitField : fields.values()) { + doc.put(searchHitField.getName(), searchHitField.getValue()); + } + + if (this.includeId) { + doc.put("_id", hit.getId()); + } + if (this.includeScore) { + doc.put("_score", hit.getScore()); + } + + // select function as field is a special case where each hit has non-null field (function) + // and sourceAsMap is all columns in index (the same as 'SELECT *') + if (fields.isEmpty()) { + mergeHeaders(csvHeaders, doc, flat); + } + docsAsMap.add(doc); + } - for (final SearchHit hit : hits) { - final Map doc = hit.getSourceAsMap(); - final Map fields = hit.getFields(); - for (final DocumentField searchHitField : fields.values()) { - doc.put(searchHitField.getName(), searchHitField.getValue()); - } - - if (this.includeId) { - doc.put("_id", hit.getId()); - } - if (this.includeScore) { - doc.put("_score", hit.getScore()); - } - - // select function as field is a special case where each hit has non-null field (function) - // and sourceAsMap is all columns in index (the same as 'SELECT *') - if (fields.isEmpty()) { - mergeHeaders(csvHeaders, doc, flat); - } - docsAsMap.add(doc); - } + return new ArrayList<>(csvHeaders); + } - return new ArrayList<>(csvHeaders); - } + private String findFieldValue( + String header, Map doc, boolean flat, String separator) { + if (flat && header.contains(".")) { + String[] split = header.split("\\."); + Object innerDoc = doc; - private String findFieldValue(String header, Map doc, boolean flat, String separator) { - if (flat && header.contains(".")) { - String[] split = header.split("\\."); - Object innerDoc = doc; - - for (String innerField : split) { - if (!(innerDoc instanceof Map)) { - return ""; - } - innerDoc = ((Map) innerDoc).get(innerField); - if (innerDoc == null) { - return ""; - } - } - return innerDoc.toString(); - } else { - if (doc.containsKey(header)) { - return String.valueOf(doc.get(header)); - } + for (String innerField : split) { + if (!(innerDoc instanceof Map)) { + return ""; } - return ""; - } - - private void mergeHeaders(Set headers, Map doc, boolean flat) { - if (!flat) { - headers.addAll(doc.keySet()); - return; + innerDoc = ((Map) innerDoc).get(innerField); + if (innerDoc == null) { + return ""; } - mergeFieldNamesRecursive(headers, doc, ""); + } + return innerDoc.toString(); + } else { + if (doc.containsKey(header)) { + return String.valueOf(doc.get(header)); + } } + return ""; + } - private void mergeFieldNamesRecursive(Set headers, Map doc, String prefix) { - for (Map.Entry field : doc.entrySet()) { - Object value = field.getValue(); - if (value instanceof Map) { - mergeFieldNamesRecursive(headers, (Map) value, prefix + field.getKey() + "."); - } else { - headers.add(prefix + field.getKey()); - } - } + private void mergeHeaders(Set headers, Map doc, boolean flat) { + if (!flat) { + headers.addAll(doc.keySet()); + return; + } + mergeFieldNamesRecursive(headers, doc, ""); + } + + private void mergeFieldNamesRecursive( + Set headers, Map doc, String prefix) { + for (Map.Entry field : doc.entrySet()) { + Object value = field.getValue(); + if (value instanceof Map) { + mergeFieldNamesRecursive( + headers, (Map) value, prefix + field.getKey() + "."); + } else { + headers.add(prefix + field.getKey()); + } } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CsvExtractorException.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CsvExtractorException.java index 7e0f8e8ff9..cb289e4625 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CsvExtractorException.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/csv/CsvExtractorException.java @@ -3,14 +3,11 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.csv; -/** - * Created by Eliran on 29/12/2015. - */ +/** Created by Eliran on 29/12/2015. */ public class CsvExtractorException extends Exception { - public CsvExtractorException(String message) { - super(message); - } + public CsvExtractorException(String message) { + super(message); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorActionRequestRestExecutorFactory.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorActionRequestRestExecutorFactory.java index 7c8ed62a07..b4add64f9c 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorActionRequestRestExecutorFactory.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorActionRequestRestExecutorFactory.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.cursor; import org.opensearch.rest.RestRequest; @@ -11,16 +10,17 @@ public class CursorActionRequestRestExecutorFactory { - public static CursorAsyncRestExecutor createExecutor(RestRequest request, String cursorId, Format format) { + public static CursorAsyncRestExecutor createExecutor( + RestRequest request, String cursorId, Format format) { - if (isCursorCloseRequest(request)) { - return new CursorAsyncRestExecutor(new CursorCloseExecutor(cursorId)); - } else { - return new CursorAsyncRestExecutor(new CursorResultExecutor(cursorId, format)); - } + if (isCursorCloseRequest(request)) { + return new CursorAsyncRestExecutor(new CursorCloseExecutor(cursorId)); + } else { + return new CursorAsyncRestExecutor(new CursorResultExecutor(cursorId, format)); } + } - private static boolean isCursorCloseRequest(final RestRequest request) { - return request.path().endsWith("/_sql/close"); - } + private static boolean isCursorCloseRequest(final RestRequest request) { + return request.path().endsWith("/_sql/close"); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorAsyncRestExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorAsyncRestExecutor.java index 92703dde2a..ffcf2adbf3 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorAsyncRestExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorAsyncRestExecutor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.cursor; import java.io.IOException; @@ -13,9 +12,9 @@ import org.apache.logging.log4j.Logger; import org.opensearch.client.Client; import org.opensearch.common.unit.TimeValue; +import org.opensearch.core.rest.RestStatus; import org.opensearch.rest.BytesRestResponse; import org.opensearch.rest.RestChannel; -import org.opensearch.core.rest.RestStatus; import org.opensearch.sql.common.setting.Settings; import org.opensearch.sql.common.utils.QueryContext; import org.opensearch.sql.legacy.esdomain.LocalClusterState; @@ -25,84 +24,83 @@ import org.opensearch.threadpool.ThreadPool; public class CursorAsyncRestExecutor { - /** - * Custom thread pool name managed by OpenSearch - */ - public static final String SQL_WORKER_THREAD_POOL_NAME = "sql-worker"; + /** Custom thread pool name managed by OpenSearch */ + public static final String SQL_WORKER_THREAD_POOL_NAME = "sql-worker"; - private static final Logger LOG = LogManager.getLogger(CursorAsyncRestExecutor.class); + private static final Logger LOG = LogManager.getLogger(CursorAsyncRestExecutor.class); - /** - * Delegated rest executor to async - */ - private final CursorRestExecutor executor; + /** Delegated rest executor to async */ + private final CursorRestExecutor executor; + CursorAsyncRestExecutor(CursorRestExecutor executor) { + this.executor = executor; + } - CursorAsyncRestExecutor(CursorRestExecutor executor) { - this.executor = executor; - } + public void execute(Client client, Map params, RestChannel channel) { + async(client, params, channel); + } - public void execute(Client client, Map params, RestChannel channel) { - async(client, params, channel); - } - - /** - * Run given task in thread pool asynchronously - */ - private void async(Client client, Map params, RestChannel channel) { + /** Run given task in thread pool asynchronously */ + private void async(Client client, Map params, RestChannel channel) { - ThreadPool threadPool = client.threadPool(); - Runnable runnable = () -> { - try { - doExecuteWithTimeMeasured(client, params, channel); - } catch (IOException e) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); - LOG.warn("[{}] [MCB] async task got an IO/SQL exception: {}", QueryContext.getRequestId(), - e.getMessage()); - e.printStackTrace(); - channel.sendResponse(new BytesRestResponse(RestStatus.INTERNAL_SERVER_ERROR, e.getMessage())); - } catch (IllegalStateException e) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); - LOG.warn("[{}] [MCB] async task got a runtime exception: {}", QueryContext.getRequestId(), - e.getMessage()); - e.printStackTrace(); - channel.sendResponse(new BytesRestResponse(RestStatus.INSUFFICIENT_STORAGE, - "Memory circuit is broken.")); - } catch (Throwable t) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); - LOG.warn("[{}] [MCB] async task got an unknown throwable: {}", QueryContext.getRequestId(), - t.getMessage()); - t.printStackTrace(); - channel.sendResponse(new BytesRestResponse(RestStatus.INTERNAL_SERVER_ERROR, - String.valueOf(t.getMessage()))); - } finally { - BackOffRetryStrategy.releaseMem(executor); - } + ThreadPool threadPool = client.threadPool(); + Runnable runnable = + () -> { + try { + doExecuteWithTimeMeasured(client, params, channel); + } catch (IOException e) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); + LOG.warn( + "[{}] [MCB] async task got an IO/SQL exception: {}", + QueryContext.getRequestId(), + e.getMessage()); + e.printStackTrace(); + channel.sendResponse( + new BytesRestResponse(RestStatus.INTERNAL_SERVER_ERROR, e.getMessage())); + } catch (IllegalStateException e) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); + LOG.warn( + "[{}] [MCB] async task got a runtime exception: {}", + QueryContext.getRequestId(), + e.getMessage()); + e.printStackTrace(); + channel.sendResponse( + new BytesRestResponse( + RestStatus.INSUFFICIENT_STORAGE, "Memory circuit is broken.")); + } catch (Throwable t) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); + LOG.warn( + "[{}] [MCB] async task got an unknown throwable: {}", + QueryContext.getRequestId(), + t.getMessage()); + t.printStackTrace(); + channel.sendResponse( + new BytesRestResponse( + RestStatus.INTERNAL_SERVER_ERROR, String.valueOf(t.getMessage()))); + } finally { + BackOffRetryStrategy.releaseMem(executor); + } }; - // Preserve context of calling thread to ensure headers of requests are forwarded when running blocking actions - threadPool.schedule( - QueryContext.withCurrentContext(runnable), - new TimeValue(0L), - SQL_WORKER_THREAD_POOL_NAME - ); - } + // Preserve context of calling thread to ensure headers of requests are forwarded when running + // blocking actions + threadPool.schedule( + QueryContext.withCurrentContext(runnable), new TimeValue(0L), SQL_WORKER_THREAD_POOL_NAME); + } - /** - * Time the real execution of Executor and log slow query for troubleshooting - */ - private void doExecuteWithTimeMeasured(Client client, - Map params, - RestChannel channel) throws Exception { - long startTime = System.nanoTime(); - try { - executor.execute(client, params, channel); - } finally { - Duration elapsed = Duration.ofNanos(System.nanoTime() - startTime); - int slowLogThreshold = LocalClusterState.state().getSettingValue(Settings.Key.SQL_SLOWLOG); - if (elapsed.getSeconds() >= slowLogThreshold) { - LOG.warn("[{}] Slow query: elapsed={} (ms)", QueryContext.getRequestId(), elapsed.toMillis()); - } - } + /** Time the real execution of Executor and log slow query for troubleshooting */ + private void doExecuteWithTimeMeasured( + Client client, Map params, RestChannel channel) throws Exception { + long startTime = System.nanoTime(); + try { + executor.execute(client, params, channel); + } finally { + Duration elapsed = Duration.ofNanos(System.nanoTime() - startTime); + int slowLogThreshold = LocalClusterState.state().getSettingValue(Settings.Key.SQL_SLOWLOG); + if (elapsed.getSeconds() >= slowLogThreshold) { + LOG.warn( + "[{}] Slow query: elapsed={} (ms)", QueryContext.getRequestId(), elapsed.toMillis()); + } } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorCloseExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorCloseExecutor.java index 98e89c12e4..7282eaed4c 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorCloseExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorCloseExecutor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.cursor; import static org.opensearch.core.rest.RestStatus.OK; @@ -25,66 +24,69 @@ public class CursorCloseExecutor implements CursorRestExecutor { - private static final Logger LOG = LogManager.getLogger(CursorCloseExecutor.class); - - private static final String SUCCEEDED_TRUE = "{\"succeeded\":true}"; - private static final String SUCCEEDED_FALSE = "{\"succeeded\":false}"; - - private String cursorId; - - public CursorCloseExecutor(String cursorId) { - this.cursorId = cursorId; - } - - public void execute(Client client, Map params, RestChannel channel) throws Exception { - try { - String formattedResponse = execute(client, params); - channel.sendResponse(new BytesRestResponse(OK, "application/json; charset=UTF-8", formattedResponse)); - } catch (IllegalArgumentException | JSONException e) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_CUS).increment(); - LOG.error("Error parsing the cursor", e); - channel.sendResponse(new BytesRestResponse(channel, e)); - } catch (OpenSearchException e) { - int status = (e.status().getStatus()); - if (status > 399 && status < 500) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_CUS).increment(); - } else if (status > 499) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); - } - LOG.error("Error completing cursor request", e); - channel.sendResponse(new BytesRestResponse(channel, e)); - } + private static final Logger LOG = LogManager.getLogger(CursorCloseExecutor.class); + + private static final String SUCCEEDED_TRUE = "{\"succeeded\":true}"; + private static final String SUCCEEDED_FALSE = "{\"succeeded\":false}"; + + private String cursorId; + + public CursorCloseExecutor(String cursorId) { + this.cursorId = cursorId; + } + + public void execute(Client client, Map params, RestChannel channel) + throws Exception { + try { + String formattedResponse = execute(client, params); + channel.sendResponse( + new BytesRestResponse(OK, "application/json; charset=UTF-8", formattedResponse)); + } catch (IllegalArgumentException | JSONException e) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_CUS).increment(); + LOG.error("Error parsing the cursor", e); + channel.sendResponse(new BytesRestResponse(channel, e)); + } catch (OpenSearchException e) { + int status = (e.status().getStatus()); + if (status > 399 && status < 500) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_CUS).increment(); + } else if (status > 499) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); + } + LOG.error("Error completing cursor request", e); + channel.sendResponse(new BytesRestResponse(channel, e)); } + } - public String execute(Client client, Map params) throws Exception { - String[] splittedCursor = cursorId.split(":"); - - if (splittedCursor.length!=2) { - throw new VerificationException("Not able to parse invalid cursor"); - } - - String type = splittedCursor[0]; - CursorType cursorType = CursorType.getById(type); - - switch(cursorType) { - case DEFAULT: - DefaultCursor defaultCursor = DefaultCursor.from(splittedCursor[1]); - return handleDefaultCursorCloseRequest(client, defaultCursor); - case AGGREGATION: - case JOIN: - default: throw new VerificationException("Unsupported cursor type [" + type + "]"); - } + public String execute(Client client, Map params) throws Exception { + String[] splittedCursor = cursorId.split(":"); + if (splittedCursor.length != 2) { + throw new VerificationException("Not able to parse invalid cursor"); } - private String handleDefaultCursorCloseRequest(Client client, DefaultCursor cursor) { - String scrollId = cursor.getScrollId(); - ClearScrollResponse clearScrollResponse = client.prepareClearScroll().addScrollId(scrollId).get(); - if (clearScrollResponse.isSucceeded()) { - return SUCCEEDED_TRUE; - } else { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); - return SUCCEEDED_FALSE; - } + String type = splittedCursor[0]; + CursorType cursorType = CursorType.getById(type); + + switch (cursorType) { + case DEFAULT: + DefaultCursor defaultCursor = DefaultCursor.from(splittedCursor[1]); + return handleDefaultCursorCloseRequest(client, defaultCursor); + case AGGREGATION: + case JOIN: + default: + throw new VerificationException("Unsupported cursor type [" + type + "]"); + } + } + + private String handleDefaultCursorCloseRequest(Client client, DefaultCursor cursor) { + String scrollId = cursor.getScrollId(); + ClearScrollResponse clearScrollResponse = + client.prepareClearScroll().addScrollId(scrollId).get(); + if (clearScrollResponse.isSucceeded()) { + return SUCCEEDED_TRUE; + } else { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); + return SUCCEEDED_FALSE; } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorRestExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorRestExecutor.java index 5f294f8e32..4c4b854379 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorRestExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorRestExecutor.java @@ -3,21 +3,16 @@ * SPDX-License-Identifier: Apache-2.0 */ - - package org.opensearch.sql.legacy.executor.cursor; import java.util.Map; import org.opensearch.client.Client; import org.opensearch.rest.RestChannel; -/** - * Interface to execute cursor request. - */ +/** Interface to execute cursor request. */ public interface CursorRestExecutor { - void execute(Client client, Map params, RestChannel channel) - throws Exception; + void execute(Client client, Map params, RestChannel channel) throws Exception; - String execute(Client client, Map params) throws Exception; + String execute(Client client, Map params) throws Exception; } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorResultExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorResultExecutor.java index 9753f8049c..854a40b4dd 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorResultExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/cursor/CursorResultExecutor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.cursor; import static org.opensearch.core.rest.RestStatus.OK; @@ -34,99 +33,104 @@ public class CursorResultExecutor implements CursorRestExecutor { - private String cursorId; - private Format format; + private String cursorId; + private Format format; + + private static final Logger LOG = LogManager.getLogger(CursorResultExecutor.class); + + public CursorResultExecutor(String cursorId, Format format) { + this.cursorId = cursorId; + this.format = format; + } + + public void execute(Client client, Map params, RestChannel channel) + throws Exception { + try { + String formattedResponse = execute(client, params); + channel.sendResponse( + new BytesRestResponse(OK, "application/json; charset=UTF-8", formattedResponse)); + } catch (IllegalArgumentException | JSONException e) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_CUS).increment(); + LOG.error("Error parsing the cursor", e); + channel.sendResponse(new BytesRestResponse(channel, e)); + } catch (OpenSearchException e) { + int status = (e.status().getStatus()); + if (status > 399 && status < 500) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_CUS).increment(); + } else if (status > 499) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); + } + LOG.error("Error completing cursor request", e); + channel.sendResponse(new BytesRestResponse(channel, e)); + } + } - private static final Logger LOG = LogManager.getLogger(CursorResultExecutor.class); + public String execute(Client client, Map params) throws Exception { + /** + * All cursor's are of the form : The serialized form before + * encoding is upto Cursor implementation + */ + String[] splittedCursor = cursorId.split(":", 2); - public CursorResultExecutor(String cursorId, Format format) { - this.cursorId = cursorId; - this.format = format; + if (splittedCursor.length != 2) { + throw new VerificationException("Not able to parse invalid cursor"); } - public void execute(Client client, Map params, RestChannel channel) throws Exception { - try { - String formattedResponse = execute(client, params); - channel.sendResponse(new BytesRestResponse(OK, "application/json; charset=UTF-8", formattedResponse)); - } catch (IllegalArgumentException | JSONException e) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_CUS).increment(); - LOG.error("Error parsing the cursor", e); - channel.sendResponse(new BytesRestResponse(channel, e)); - } catch (OpenSearchException e) { - int status = (e.status().getStatus()); - if (status > 399 && status < 500) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_CUS).increment(); - } else if (status > 499) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); - } - LOG.error("Error completing cursor request", e); - channel.sendResponse(new BytesRestResponse(channel, e)); - } + String type = splittedCursor[0]; + CursorType cursorType = CursorType.getById(type); + + switch (cursorType) { + case DEFAULT: + DefaultCursor defaultCursor = DefaultCursor.from(splittedCursor[1]); + return handleDefaultCursorRequest(client, defaultCursor); + case AGGREGATION: + case JOIN: + default: + throw new VerificationException("Unsupported cursor type [" + type + "]"); } - - public String execute(Client client, Map params) throws Exception { - /** - * All cursor's are of the form : - * The serialized form before encoding is upto Cursor implementation - */ - String[] splittedCursor = cursorId.split(":", 2); - - if (splittedCursor.length!=2) { - throw new VerificationException("Not able to parse invalid cursor"); - } - - String type = splittedCursor[0]; - CursorType cursorType = CursorType.getById(type); - - switch(cursorType) { - case DEFAULT: - DefaultCursor defaultCursor = DefaultCursor.from(splittedCursor[1]); - return handleDefaultCursorRequest(client, defaultCursor); - case AGGREGATION: - case JOIN: - default: throw new VerificationException("Unsupported cursor type [" + type + "]"); - } + } + + private String handleDefaultCursorRequest(Client client, DefaultCursor cursor) { + String previousScrollId = cursor.getScrollId(); + LocalClusterState clusterState = LocalClusterState.state(); + TimeValue scrollTimeout = clusterState.getSettingValue(Settings.Key.SQL_CURSOR_KEEP_ALIVE); + SearchResponse scrollResponse = + client.prepareSearchScroll(previousScrollId).setScroll(scrollTimeout).get(); + SearchHits searchHits = scrollResponse.getHits(); + SearchHit[] searchHitArray = searchHits.getHits(); + String newScrollId = scrollResponse.getScrollId(); + + int rowsLeft = (int) cursor.getRowsLeft(); + int fetch = cursor.getFetchSize(); + + if (rowsLeft < fetch && rowsLeft < searchHitArray.length) { + /** + * This condition implies we are on the last page, and we might need to truncate the result + * from SearchHit[] Avoid truncating in following two scenarios 1. number of rows to be sent + * equals fetchSize 2. size of SearchHit[] is already less that rows that needs to be sent + * + *

Else truncate to desired number of rows + */ + SearchHit[] newSearchHits = Arrays.copyOf(searchHitArray, rowsLeft); + searchHits = + new SearchHits(newSearchHits, searchHits.getTotalHits(), searchHits.getMaxScore()); } - private String handleDefaultCursorRequest(Client client, DefaultCursor cursor) { - String previousScrollId = cursor.getScrollId(); - LocalClusterState clusterState = LocalClusterState.state(); - TimeValue scrollTimeout = clusterState.getSettingValue(Settings.Key.SQL_CURSOR_KEEP_ALIVE); - SearchResponse scrollResponse = client.prepareSearchScroll(previousScrollId).setScroll(scrollTimeout).get(); - SearchHits searchHits = scrollResponse.getHits(); - SearchHit[] searchHitArray = searchHits.getHits(); - String newScrollId = scrollResponse.getScrollId(); - - int rowsLeft = (int) cursor.getRowsLeft(); - int fetch = cursor.getFetchSize(); - - if (rowsLeft < fetch && rowsLeft < searchHitArray.length) { - /** - * This condition implies we are on the last page, and we might need to truncate the result from SearchHit[] - * Avoid truncating in following two scenarios - * 1. number of rows to be sent equals fetchSize - * 2. size of SearchHit[] is already less that rows that needs to be sent - * - * Else truncate to desired number of rows - */ - SearchHit[] newSearchHits = Arrays.copyOf(searchHitArray, rowsLeft); - searchHits = new SearchHits(newSearchHits, searchHits.getTotalHits(), searchHits.getMaxScore()); - } - - rowsLeft = rowsLeft - fetch; - - if (rowsLeft <=0) { - /** Clear the scroll context on last page */ - ClearScrollResponse clearScrollResponse = client.prepareClearScroll().addScrollId(newScrollId).get(); - if (!clearScrollResponse.isSucceeded()) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); - LOG.info("Error closing the cursor context {} ", newScrollId); - } - } - - cursor.setRowsLeft(rowsLeft); - cursor.setScrollId(newScrollId); - Protocol protocol = new Protocol(client, searchHits, format.name().toLowerCase(), cursor); - return protocol.cursorFormat(); + rowsLeft = rowsLeft - fetch; + + if (rowsLeft <= 0) { + /** Clear the scroll context on last page */ + ClearScrollResponse clearScrollResponse = + client.prepareClearScroll().addScrollId(newScrollId).get(); + if (!clearScrollResponse.isSucceeded()) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); + LOG.info("Error closing the cursor context {} ", newScrollId); + } } + + cursor.setRowsLeft(rowsLeft); + cursor.setScrollId(newScrollId); + Protocol protocol = new Protocol(client, searchHits, format.name().toLowerCase(), cursor); + return protocol.cursorFormat(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/BindingTupleResultSet.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/BindingTupleResultSet.java index d9eb463572..872442f04f 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/BindingTupleResultSet.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/BindingTupleResultSet.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.format; import static org.opensearch.sql.legacy.executor.format.DateFieldFormatter.FORMAT_JDBC; @@ -18,43 +17,44 @@ import org.opensearch.sql.legacy.expression.model.ExprValue; import org.opensearch.sql.legacy.query.planner.core.ColumnNode; -/** - * The definition of BindingTuple ResultSet. - */ +/** The definition of BindingTuple ResultSet. */ public class BindingTupleResultSet extends ResultSet { - public BindingTupleResultSet(List columnNodes, List bindingTuples) { - this.schema = buildSchema(columnNodes); - this.dataRows = buildDataRows(columnNodes, bindingTuples); - } - - @VisibleForTesting - public static Schema buildSchema(List columnNodes) { - List columnList = columnNodes.stream() - .map(node -> new Schema.Column( - node.getName(), - node.getAlias(), - node.getType())) - .collect(Collectors.toList()); - return new Schema(columnList); - } - - @VisibleForTesting - public static DataRows buildDataRows(List columnNodes, List bindingTuples) { - List rowList = bindingTuples.stream().map(tuple -> { - Map bindingMap = tuple.getBindingMap(); - Map rowMap = new HashMap<>(); - for (ColumnNode column : columnNodes) { - String columnName = column.columnName(); - Object value = bindingMap.get(columnName).value(); - if (column.getType() == Schema.Type.DATE) { - value = DateFormat.getFormattedDate(new Date((Long) value), FORMAT_JDBC); - } - rowMap.put(columnName, value); - } - return new DataRows.Row(rowMap); - }).collect(Collectors.toList()); - - return new DataRows(bindingTuples.size(), bindingTuples.size(), rowList); - } + public BindingTupleResultSet(List columnNodes, List bindingTuples) { + this.schema = buildSchema(columnNodes); + this.dataRows = buildDataRows(columnNodes, bindingTuples); + } + + @VisibleForTesting + public static Schema buildSchema(List columnNodes) { + List columnList = + columnNodes.stream() + .map(node -> new Schema.Column(node.getName(), node.getAlias(), node.getType())) + .collect(Collectors.toList()); + return new Schema(columnList); + } + + @VisibleForTesting + public static DataRows buildDataRows( + List columnNodes, List bindingTuples) { + List rowList = + bindingTuples.stream() + .map( + tuple -> { + Map bindingMap = tuple.getBindingMap(); + Map rowMap = new HashMap<>(); + for (ColumnNode column : columnNodes) { + String columnName = column.columnName(); + Object value = bindingMap.get(columnName).value(); + if (column.getType() == Schema.Type.DATE) { + value = DateFormat.getFormattedDate(new Date((Long) value), FORMAT_JDBC); + } + rowMap.put(columnName, value); + } + return new DataRows.Row(rowMap); + }) + .collect(Collectors.toList()); + + return new DataRows(bindingTuples.size(), bindingTuples.size(), rowList); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DataRows.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DataRows.java index 541d3200a5..fc153afae8 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DataRows.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DataRows.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.format; import java.util.Iterator; @@ -12,76 +11,76 @@ public class DataRows implements Iterable { - private long size; - private long totalHits; - private List rows; - - public DataRows(long size, long totalHits, List rows) { - this.size = size; - this.totalHits = totalHits; - this.rows = rows; + private long size; + private long totalHits; + private List rows; + + public DataRows(long size, long totalHits, List rows) { + this.size = size; + this.totalHits = totalHits; + this.rows = rows; + } + + public DataRows(List rows) { + this.size = rows.size(); + this.totalHits = rows.size(); + this.rows = rows; + } + + public long getSize() { + return size; + } + + public long getTotalHits() { + return totalHits; + } + + // Iterator method for DataRows + @Override + public Iterator iterator() { + return new Iterator() { + private final Iterator iter = rows.iterator(); + + @Override + public boolean hasNext() { + return iter.hasNext(); + } + + @Override + public Row next() { + return iter.next(); + } + + @Override + public void remove() { + throw new UnsupportedOperationException("No changes allowed to DataRows rows"); + } + }; + } + + // Inner class for Row object + public static class Row { + + private Map data; + + public Row(Map data) { + this.data = data; } - public DataRows(List rows) { - this.size = rows.size(); - this.totalHits = rows.size(); - this.rows = rows; + public Map getContents() { + return data; } - public long getSize() { - return size; + public boolean hasField(String field) { + return data.containsKey(field); } - public long getTotalHits() { - return totalHits; + public Object getData(String field) { + return data.get(field); } - // Iterator method for DataRows - @Override - public Iterator iterator() { - return new Iterator() { - private final Iterator iter = rows.iterator(); - - @Override - public boolean hasNext() { - return iter.hasNext(); - } - - @Override - public Row next() { - return iter.next(); - } - - @Override - public void remove() { - throw new UnsupportedOperationException("No changes allowed to DataRows rows"); - } - }; - } - - // Inner class for Row object - public static class Row { - - private Map data; - - public Row(Map data) { - this.data = data; - } - - public Map getContents() { - return data; - } - - public boolean hasField(String field) { - return data.containsKey(field); - } - - public Object getData(String field) { - return data.get(field); - } - - public Object getDataOrDefault(String field, Object defaultValue) { - return data.getOrDefault(field, defaultValue); - } + public Object getDataOrDefault(String field, Object defaultValue) { + return data.getOrDefault(field, defaultValue); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DateFieldFormatter.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DateFieldFormatter.java index aa803975df..dc239abd84 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DateFieldFormatter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DateFieldFormatter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.format; import com.google.common.annotations.VisibleForTesting; @@ -23,163 +22,169 @@ import org.opensearch.sql.legacy.esdomain.LocalClusterState; import org.opensearch.sql.legacy.esdomain.mapping.FieldMappings; -/** - * Formatter to transform date fields into a consistent format for consumption by clients. - */ +/** Formatter to transform date fields into a consistent format for consumption by clients. */ public class DateFieldFormatter { - private static final Logger LOG = LogManager.getLogger(DateFieldFormatter.class); - public static final String FORMAT_JDBC = "yyyy-MM-dd HH:mm:ss.SSS"; - private static final String FORMAT_DELIMITER = "\\|\\|"; - - private static final String FORMAT_DOT_DATE_AND_TIME = "yyyy-MM-dd'T'HH:mm:ss.SSSZ"; - private static final String FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_LOGS_EXCEPTION = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"; - private static final String FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_FLIGHTS_EXCEPTION = "yyyy-MM-dd'T'HH:mm:ss"; - private static final String - FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_FLIGHTS_EXCEPTION_NO_TIME = "yyyy-MM-dd'T'"; - private static final String FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_ECOMMERCE_EXCEPTION = "yyyy-MM-dd'T'HH:mm:ssXXX"; - private static final String FORMAT_DOT_DATE = DateFormat.getFormatString("date"); - - private final Map> dateFieldFormatMap; - private final Map fieldAliasMap; - private Set dateColumns; - - public DateFieldFormatter(String indexName, List columns, Map fieldAliasMap) { - this.dateFieldFormatMap = getDateFieldFormatMap(indexName); - this.dateColumns = getDateColumns(columns); - this.fieldAliasMap = fieldAliasMap; + private static final Logger LOG = LogManager.getLogger(DateFieldFormatter.class); + public static final String FORMAT_JDBC = "yyyy-MM-dd HH:mm:ss.SSS"; + private static final String FORMAT_DELIMITER = "\\|\\|"; + + private static final String FORMAT_DOT_DATE_AND_TIME = "yyyy-MM-dd'T'HH:mm:ss.SSSZ"; + private static final String FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_LOGS_EXCEPTION = + "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"; + private static final String FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_FLIGHTS_EXCEPTION = + "yyyy-MM-dd'T'HH:mm:ss"; + private static final String + FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_FLIGHTS_EXCEPTION_NO_TIME = "yyyy-MM-dd'T'"; + private static final String FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_ECOMMERCE_EXCEPTION = + "yyyy-MM-dd'T'HH:mm:ssXXX"; + private static final String FORMAT_DOT_DATE = DateFormat.getFormatString("date"); + + private final Map> dateFieldFormatMap; + private final Map fieldAliasMap; + private Set dateColumns; + + public DateFieldFormatter( + String indexName, List columns, Map fieldAliasMap) { + this.dateFieldFormatMap = getDateFieldFormatMap(indexName); + this.dateColumns = getDateColumns(columns); + this.fieldAliasMap = fieldAliasMap; + } + + @VisibleForTesting + protected DateFieldFormatter( + Map> dateFieldFormatMap, + List columns, + Map fieldAliasMap) { + this.dateFieldFormatMap = dateFieldFormatMap; + this.dateColumns = getDateColumns(columns); + this.fieldAliasMap = fieldAliasMap; + } + + /** + * Apply the JDBC date format ({@code yyyy-MM-dd HH:mm:ss.SSS}) to date values in the current row. + * + * @param rowSource The row in which to format the date values. + */ + public void applyJDBCDateFormat(Map rowSource) { + for (String columnName : dateColumns) { + Object columnOriginalDate = rowSource.get(columnName); + if (columnOriginalDate == null) { + // Don't try to parse null date values + continue; + } + + List formats = getFormatsForColumn(columnName); + if (formats == null) { + LOG.warn( + "Could not determine date formats for column {}; returning original value", columnName); + continue; + } + + Date date = parseDateString(formats, columnOriginalDate.toString()); + if (date != null) { + rowSource.put(columnName, DateFormat.getFormattedDate(date, FORMAT_JDBC)); + break; + } else { + LOG.warn("Could not parse date value; returning original value"); + } } - - @VisibleForTesting - protected DateFieldFormatter(Map> dateFieldFormatMap, - List columns, - Map fieldAliasMap) { - this.dateFieldFormatMap = dateFieldFormatMap; - this.dateColumns = getDateColumns(columns); - this.fieldAliasMap = fieldAliasMap; + } + + private List getFormatsForColumn(String columnName) { + // Handle special cases for column names + if (fieldAliasMap.get(columnName) != null) { + // Column was aliased, and we need to find the base name for the column + columnName = fieldAliasMap.get(columnName); + } else if (columnName.split("\\.").length == 2) { + // Column is part of a join, and is qualified by the table alias + columnName = columnName.split("\\.")[1]; } - - /** - * Apply the JDBC date format ({@code yyyy-MM-dd HH:mm:ss.SSS}) to date values in the current row. - * - * @param rowSource The row in which to format the date values. - */ - public void applyJDBCDateFormat(Map rowSource) { - for (String columnName : dateColumns) { - Object columnOriginalDate = rowSource.get(columnName); - if (columnOriginalDate == null) { - // Don't try to parse null date values - continue; - } - - List formats = getFormatsForColumn(columnName); - if (formats == null) { - LOG.warn("Could not determine date formats for column {}; returning original value", columnName); - continue; - } - - Date date = parseDateString(formats, columnOriginalDate.toString()); - if (date != null) { - rowSource.put(columnName, DateFormat.getFormattedDate(date, FORMAT_JDBC)); - break; - } else { - LOG.warn("Could not parse date value; returning original value"); - } + return dateFieldFormatMap.get(columnName); + } + + private Set getDateColumns(List columns) { + return columns.stream() + .filter(column -> column.getType().equals(Schema.Type.DATE.nameLowerCase())) + .map(Schema.Column::getName) + .collect(Collectors.toSet()); + } + + private Map> getDateFieldFormatMap(String indexName) { + LocalClusterState state = LocalClusterState.state(); + Map> formatMap = new HashMap<>(); + + String[] indices = indexName.split("\\|"); + Collection typeProperties = state.getFieldMappings(indices).allMappings(); + + for (FieldMappings fieldMappings : typeProperties) { + for (Map.Entry> field : fieldMappings.data().entrySet()) { + String fieldName = field.getKey(); + Map properties = field.getValue(); + + if (properties.containsKey("format")) { + formatMap.put(fieldName, getFormatsFromProperties(properties.get("format").toString())); + } else { + // Give all field types a format, since operations such as casts + // can change the output type for a field to `date`. + formatMap.put(fieldName, getFormatsFromProperties("date_optional_time")); } + } } - private List getFormatsForColumn(String columnName) { - // Handle special cases for column names - if (fieldAliasMap.get(columnName) != null) { - // Column was aliased, and we need to find the base name for the column - columnName = fieldAliasMap.get(columnName); - } else if (columnName.split("\\.").length == 2) { - // Column is part of a join, and is qualified by the table alias - columnName = columnName.split("\\.")[1]; - } - return dateFieldFormatMap.get(columnName); - } - - private Set getDateColumns(List columns) { - return columns.stream() - .filter(column -> column.getType().equals(Schema.Type.DATE.nameLowerCase())) - .map(Schema.Column::getName) - .collect(Collectors.toSet()); - } - - private Map> getDateFieldFormatMap(String indexName) { - LocalClusterState state = LocalClusterState.state(); - Map> formatMap = new HashMap<>(); - - String[] indices = indexName.split("\\|"); - Collection typeProperties = state.getFieldMappings(indices) - .allMappings(); - - for (FieldMappings fieldMappings: typeProperties) { - for (Map.Entry> field : fieldMappings.data().entrySet()) { - String fieldName = field.getKey(); - Map properties = field.getValue(); - - if (properties.containsKey("format")) { - formatMap.put(fieldName, getFormatsFromProperties(properties.get("format").toString())); - } else { - // Give all field types a format, since operations such as casts - // can change the output type for a field to `date`. - formatMap.put(fieldName, getFormatsFromProperties("date_optional_time")); - } + return formatMap; + } + + private List getFormatsFromProperties(String formatProperty) { + String[] formats = formatProperty.split(FORMAT_DELIMITER); + return Arrays.asList(formats); + } + + private Date parseDateString(List formats, String columnOriginalDate) { + TimeZone originalDefaultTimeZone = TimeZone.getDefault(); + Date parsedDate = null; + + // Apache Commons DateUtils uses the default TimeZone for the JVM when parsing. + // However, since all dates on OpenSearch are stored as UTC, we need to + // parse these values using the UTC timezone. + TimeZone.setDefault(TimeZone.getTimeZone("UTC")); + for (String columnFormat : formats) { + try { + switch (columnFormat) { + case "date_optional_time": + case "strict_date_optional_time": + parsedDate = + DateUtils.parseDate( + columnOriginalDate, + FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_LOGS_EXCEPTION, + FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_FLIGHTS_EXCEPTION, + FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_FLIGHTS_EXCEPTION_NO_TIME, + FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_ECOMMERCE_EXCEPTION, + FORMAT_DOT_DATE_AND_TIME, + FORMAT_DOT_DATE); + break; + case "epoch_millis": + parsedDate = new Date(Long.parseLong(columnOriginalDate)); + break; + case "epoch_second": + parsedDate = new Date(Long.parseLong(columnOriginalDate) * 1000); + break; + default: + String formatString = DateFormat.getFormatString(columnFormat); + if (formatString == null) { + // Custom format; take as-is + formatString = columnFormat; } + parsedDate = DateUtils.parseDate(columnOriginalDate, formatString); } - - return formatMap; - } - - private List getFormatsFromProperties(String formatProperty) { - String[] formats = formatProperty.split(FORMAT_DELIMITER); - return Arrays.asList(formats); + } catch (ParseException | NumberFormatException e) { + LOG.warn( + String.format( + "Could not parse date string %s as %s", columnOriginalDate, columnFormat)); + } } + // Reset default timezone after parsing + TimeZone.setDefault(originalDefaultTimeZone); - private Date parseDateString(List formats, String columnOriginalDate) { - TimeZone originalDefaultTimeZone = TimeZone.getDefault(); - Date parsedDate = null; - - // Apache Commons DateUtils uses the default TimeZone for the JVM when parsing. - // However, since all dates on OpenSearch are stored as UTC, we need to - // parse these values using the UTC timezone. - TimeZone.setDefault(TimeZone.getTimeZone("UTC")); - for (String columnFormat : formats) { - try { - switch (columnFormat) { - case "date_optional_time": - case "strict_date_optional_time": - parsedDate = DateUtils.parseDate( - columnOriginalDate, - FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_LOGS_EXCEPTION, - FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_FLIGHTS_EXCEPTION, - FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_FLIGHTS_EXCEPTION_NO_TIME, - FORMAT_DOT_OPENSEARCH_DASHBOARDS_SAMPLE_DATA_ECOMMERCE_EXCEPTION, - FORMAT_DOT_DATE_AND_TIME, - FORMAT_DOT_DATE); - break; - case "epoch_millis": - parsedDate = new Date(Long.parseLong(columnOriginalDate)); - break; - case "epoch_second": - parsedDate = new Date(Long.parseLong(columnOriginalDate) * 1000); - break; - default: - String formatString = DateFormat.getFormatString(columnFormat); - if (formatString == null) { - // Custom format; take as-is - formatString = columnFormat; - } - parsedDate = DateUtils.parseDate(columnOriginalDate, formatString); - } - } catch (ParseException | NumberFormatException e) { - LOG.warn(String.format("Could not parse date string %s as %s", columnOriginalDate, columnFormat)); - } - } - // Reset default timezone after parsing - TimeZone.setDefault(originalDefaultTimeZone); - - return parsedDate; - } + return parsedDate; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DateFormat.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DateFormat.java index 40151c9413..fc9237918c 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DateFormat.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DateFormat.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.format; import java.time.Instant; @@ -15,112 +14,121 @@ public class DateFormat { - private static Map formatMap = new HashMap<>(); - - static { - // Special cases that are parsed separately - formatMap.put("date_optional_time", ""); - formatMap.put("strict_date_optional_time", ""); - formatMap.put("epoch_millis", ""); - formatMap.put("epoch_second", ""); - - formatMap.put("basic_date", Date.BASIC_DATE); - formatMap.put("basic_date_time", Date.BASIC_DATE + Time.T + Time.BASIC_TIME + Time.MILLIS + Time.TZ); - formatMap.put("basic_date_time_no_millis", Date.BASIC_DATE + Time.T + Time.BASIC_TIME + Time.TZ); - - formatMap.put("basic_ordinal_date", Date.BASIC_ORDINAL_DATE); - formatMap.put("basic_ordinal_date_time", - Date.BASIC_ORDINAL_DATE + Time.T + Time.BASIC_TIME + Time.MILLIS + Time.TZ); - formatMap.put("basic_ordinal_date_time_no_millis", Date.BASIC_ORDINAL_DATE+ Time.T + Time.BASIC_TIME + Time.TZ); - - formatMap.put("basic_time", Time.BASIC_TIME + Time.MILLIS + Time.TZ); - formatMap.put("basic_time_no_millis", Time.BASIC_TIME + Time.TZ); - - formatMap.put("basic_t_time", Time.T + Time.BASIC_TIME + Time.MILLIS + Time.TZ); - formatMap.put("basic_t_time_no_millis", Time.T + Time.BASIC_TIME + Time.TZ); - - formatMap.put("basic_week_date", Date.BASIC_WEEK_DATE); - formatMap.put("basic_week_date_time", Date.BASIC_WEEK_DATE + Time.T + Time.BASIC_TIME + Time.MILLIS + Time.TZ); - formatMap.put("basic_week_date_time_no_millis", Date.BASIC_WEEK_DATE + Time.T + Time.BASIC_TIME + Time.TZ); - - formatMap.put("date", Date.DATE); - formatMap.put("date_hour", Date.DATE + Time.T + Time.HOUR); - formatMap.put("date_hour_minute", Date.DATE + Time.T + Time.HOUR_MINUTE); - formatMap.put("date_hour_minute_second", Date.DATE + Time.T + Time.TIME); - formatMap.put("date_hour_minute_second_fraction", Date.DATE + Time.T + Time.TIME + Time.MILLIS); - formatMap.put("date_hour_minute_second_millis", Date.DATE + Time.T + Time.TIME + Time.MILLIS); - formatMap.put("date_time", Date.DATE + Time.T + Time.TIME + Time.MILLIS + Time.TZZ); - formatMap.put("date_time_no_millis", Date.DATE + Time.T + Time.TIME + Time.TZZ); - - formatMap.put("hour", Time.HOUR); - formatMap.put("hour_minute", Time.HOUR_MINUTE); - formatMap.put("hour_minute_second", Time.TIME); - formatMap.put("hour_minute_second_fraction", Time.TIME + Time.MILLIS); - formatMap.put("hour_minute_second_millis", Time.TIME + Time.MILLIS); - - formatMap.put("ordinal_date", Date.ORDINAL_DATE); - formatMap.put("ordinal_date_time", Date.ORDINAL_DATE + Time.T + Time.TIME + Time.MILLIS + Time.TZZ); - formatMap.put("ordinal_date_time_no_millis", Date.ORDINAL_DATE + Time.T + Time.TIME + Time.TZZ); - - formatMap.put("time", Time.TIME + Time.MILLIS + Time.TZZ); - formatMap.put("time_no_millis", Time.TIME + Time.TZZ); - - formatMap.put("t_time", Time.T + Time.TIME + Time.MILLIS + Time.TZZ); - formatMap.put("t_time_no_millis", Time.T + Time.TIME + Time.TZZ); - - formatMap.put("week_date", Date.WEEK_DATE); - formatMap.put("week_date_time", Date.WEEK_DATE + Time.T + Time.TIME + Time.MILLIS + Time.TZZ); - formatMap.put("week_date_time_no_millis", Date.WEEK_DATE + Time.T + Time.TIME + Time.TZZ); - - // Note: input mapping is "weekyear", but output value is "week_year" - formatMap.put("week_year", Date.WEEKYEAR); - formatMap.put("weekyear_week", Date.WEEKYEAR_WEEK); - formatMap.put("weekyear_week_day", Date.WEEK_DATE); - - formatMap.put("year", Date.YEAR); - formatMap.put("year_month", Date.YEAR_MONTH); - formatMap.put("year_month_day", Date.DATE); - } - - private DateFormat() { - } - - public static String getFormatString(String formatName) { - return formatMap.get(formatName); - } - - public static String getFormattedDate(java.util.Date date, String dateFormat) { - Instant instant = date.toInstant(); - ZonedDateTime zdt = ZonedDateTime.ofInstant(instant, ZoneId.of("Etc/UTC")); - return zdt.format(DateTimeFormatter.ofPattern(dateFormat)); - } - - private static class Date { - static String BASIC_DATE = "yyyyMMdd"; - static String BASIC_ORDINAL_DATE = "yyyyDDD"; - static String BASIC_WEEK_DATE = "YYYY'W'wwu"; - - static String DATE = "yyyy-MM-dd"; - static String ORDINAL_DATE = "yyyy-DDD"; - - static String YEAR = "yyyy"; - static String YEAR_MONTH = "yyyy-MM"; - - static String WEEK_DATE = "YYYY-'W'ww-u"; - static String WEEKYEAR = "YYYY"; - static String WEEKYEAR_WEEK = "YYYY-'W'ww"; - } - - private static class Time { - static String T = "'T'"; - static String BASIC_TIME = "HHmmss"; - static String TIME = "HH:mm:ss"; - - static String HOUR = "HH"; - static String HOUR_MINUTE = "HH:mm"; - - static String MILLIS = ".SSS"; - static String TZ = "Z"; - static String TZZ = "XX"; - } + private static Map formatMap = new HashMap<>(); + + static { + // Special cases that are parsed separately + formatMap.put("date_optional_time", ""); + formatMap.put("strict_date_optional_time", ""); + formatMap.put("epoch_millis", ""); + formatMap.put("epoch_second", ""); + + formatMap.put("basic_date", Date.BASIC_DATE); + formatMap.put( + "basic_date_time", Date.BASIC_DATE + Time.T + Time.BASIC_TIME + Time.MILLIS + Time.TZ); + formatMap.put( + "basic_date_time_no_millis", Date.BASIC_DATE + Time.T + Time.BASIC_TIME + Time.TZ); + + formatMap.put("basic_ordinal_date", Date.BASIC_ORDINAL_DATE); + formatMap.put( + "basic_ordinal_date_time", + Date.BASIC_ORDINAL_DATE + Time.T + Time.BASIC_TIME + Time.MILLIS + Time.TZ); + formatMap.put( + "basic_ordinal_date_time_no_millis", + Date.BASIC_ORDINAL_DATE + Time.T + Time.BASIC_TIME + Time.TZ); + + formatMap.put("basic_time", Time.BASIC_TIME + Time.MILLIS + Time.TZ); + formatMap.put("basic_time_no_millis", Time.BASIC_TIME + Time.TZ); + + formatMap.put("basic_t_time", Time.T + Time.BASIC_TIME + Time.MILLIS + Time.TZ); + formatMap.put("basic_t_time_no_millis", Time.T + Time.BASIC_TIME + Time.TZ); + + formatMap.put("basic_week_date", Date.BASIC_WEEK_DATE); + formatMap.put( + "basic_week_date_time", + Date.BASIC_WEEK_DATE + Time.T + Time.BASIC_TIME + Time.MILLIS + Time.TZ); + formatMap.put( + "basic_week_date_time_no_millis", + Date.BASIC_WEEK_DATE + Time.T + Time.BASIC_TIME + Time.TZ); + + formatMap.put("date", Date.DATE); + formatMap.put("date_hour", Date.DATE + Time.T + Time.HOUR); + formatMap.put("date_hour_minute", Date.DATE + Time.T + Time.HOUR_MINUTE); + formatMap.put("date_hour_minute_second", Date.DATE + Time.T + Time.TIME); + formatMap.put("date_hour_minute_second_fraction", Date.DATE + Time.T + Time.TIME + Time.MILLIS); + formatMap.put("date_hour_minute_second_millis", Date.DATE + Time.T + Time.TIME + Time.MILLIS); + formatMap.put("date_time", Date.DATE + Time.T + Time.TIME + Time.MILLIS + Time.TZZ); + formatMap.put("date_time_no_millis", Date.DATE + Time.T + Time.TIME + Time.TZZ); + + formatMap.put("hour", Time.HOUR); + formatMap.put("hour_minute", Time.HOUR_MINUTE); + formatMap.put("hour_minute_second", Time.TIME); + formatMap.put("hour_minute_second_fraction", Time.TIME + Time.MILLIS); + formatMap.put("hour_minute_second_millis", Time.TIME + Time.MILLIS); + + formatMap.put("ordinal_date", Date.ORDINAL_DATE); + formatMap.put( + "ordinal_date_time", Date.ORDINAL_DATE + Time.T + Time.TIME + Time.MILLIS + Time.TZZ); + formatMap.put("ordinal_date_time_no_millis", Date.ORDINAL_DATE + Time.T + Time.TIME + Time.TZZ); + + formatMap.put("time", Time.TIME + Time.MILLIS + Time.TZZ); + formatMap.put("time_no_millis", Time.TIME + Time.TZZ); + + formatMap.put("t_time", Time.T + Time.TIME + Time.MILLIS + Time.TZZ); + formatMap.put("t_time_no_millis", Time.T + Time.TIME + Time.TZZ); + + formatMap.put("week_date", Date.WEEK_DATE); + formatMap.put("week_date_time", Date.WEEK_DATE + Time.T + Time.TIME + Time.MILLIS + Time.TZZ); + formatMap.put("week_date_time_no_millis", Date.WEEK_DATE + Time.T + Time.TIME + Time.TZZ); + + // Note: input mapping is "weekyear", but output value is "week_year" + formatMap.put("week_year", Date.WEEKYEAR); + formatMap.put("weekyear_week", Date.WEEKYEAR_WEEK); + formatMap.put("weekyear_week_day", Date.WEEK_DATE); + + formatMap.put("year", Date.YEAR); + formatMap.put("year_month", Date.YEAR_MONTH); + formatMap.put("year_month_day", Date.DATE); + } + + private DateFormat() {} + + public static String getFormatString(String formatName) { + return formatMap.get(formatName); + } + + public static String getFormattedDate(java.util.Date date, String dateFormat) { + Instant instant = date.toInstant(); + ZonedDateTime zdt = ZonedDateTime.ofInstant(instant, ZoneId.of("Etc/UTC")); + return zdt.format(DateTimeFormatter.ofPattern(dateFormat)); + } + + private static class Date { + static String BASIC_DATE = "yyyyMMdd"; + static String BASIC_ORDINAL_DATE = "yyyyDDD"; + static String BASIC_WEEK_DATE = "YYYY'W'wwu"; + + static String DATE = "yyyy-MM-dd"; + static String ORDINAL_DATE = "yyyy-DDD"; + + static String YEAR = "yyyy"; + static String YEAR_MONTH = "yyyy-MM"; + + static String WEEK_DATE = "YYYY-'W'ww-u"; + static String WEEKYEAR = "YYYY"; + static String WEEKYEAR_WEEK = "YYYY-'W'ww"; + } + + private static class Time { + static String T = "'T'"; + static String BASIC_TIME = "HHmmss"; + static String TIME = "HH:mm:ss"; + + static String HOUR = "HH"; + static String HOUR_MINUTE = "HH:mm"; + + static String MILLIS = ".SSS"; + static String TZ = "Z"; + static String TZZ = "XX"; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DeleteResultSet.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DeleteResultSet.java index ccecacc432..24afb0a7af 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DeleteResultSet.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DeleteResultSet.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.format; import java.util.Collections; @@ -14,28 +13,28 @@ import org.opensearch.sql.legacy.domain.Delete; public class DeleteResultSet extends ResultSet { - private Delete query; - private Object queryResult; - - public static final String DELETED = "deleted_rows"; - - public DeleteResultSet(Client client, Delete query, Object queryResult) { - this.client = client; - this.query = query; - this.queryResult = queryResult; - this.schema = new Schema(loadColumns()); - this.dataRows = new DataRows(loadRows()); - } - - private List loadColumns() { - return Collections.singletonList(new Schema.Column(DELETED, null, Schema.Type.LONG)); - } - - private List loadRows() { - return Collections.singletonList(new DataRows.Row(loadDeletedData())); - } - - private Map loadDeletedData(){ - return Collections.singletonMap(DELETED, ((BulkByScrollResponse) queryResult).getDeleted()); - } + private Delete query; + private Object queryResult; + + public static final String DELETED = "deleted_rows"; + + public DeleteResultSet(Client client, Delete query, Object queryResult) { + this.client = client; + this.query = query; + this.queryResult = queryResult; + this.schema = new Schema(loadColumns()); + this.dataRows = new DataRows(loadRows()); + } + + private List loadColumns() { + return Collections.singletonList(new Schema.Column(DELETED, null, Schema.Type.LONG)); + } + + private List loadRows() { + return Collections.singletonList(new DataRows.Row(loadDeletedData())); + } + + private Map loadDeletedData() { + return Collections.singletonMap(DELETED, ((BulkByScrollResponse) queryResult).getDeleted()); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DescribeResultSet.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DescribeResultSet.java index 0cccf73268..eba6db2453 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DescribeResultSet.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/DescribeResultSet.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.format; import java.util.ArrayList; @@ -21,145 +20,142 @@ public class DescribeResultSet extends ResultSet { - private static final int DEFAULT_NUM_PREC_RADIX = 10; - private static final String IS_AUTOINCREMENT = "NO"; - - /** - * You are not required to set the field type to object explicitly, as this is the default value. - * https://www.elastic.co/guide/en/elasticsearch/reference/current/object.html - */ - public static final String DEFAULT_OBJECT_DATATYPE = "object"; - - private IndexStatement statement; - private Object queryResult; - - public DescribeResultSet(Client client, IndexStatement statement, Object queryResult) { - this.client = client; - this.clusterName = getClusterName(); - this.statement = statement; - this.queryResult = queryResult; - - this.schema = new Schema(statement, loadColumns()); - this.dataRows = new DataRows(loadRows()); + private static final int DEFAULT_NUM_PREC_RADIX = 10; + private static final String IS_AUTOINCREMENT = "NO"; + + /** + * You are not required to set the field type to object explicitly, as this is the default value. + * https://www.elastic.co/guide/en/elasticsearch/reference/current/object.html + */ + public static final String DEFAULT_OBJECT_DATATYPE = "object"; + + private IndexStatement statement; + private Object queryResult; + + public DescribeResultSet(Client client, IndexStatement statement, Object queryResult) { + this.client = client; + this.clusterName = getClusterName(); + this.statement = statement; + this.queryResult = queryResult; + + this.schema = new Schema(statement, loadColumns()); + this.dataRows = new DataRows(loadRows()); + } + + private List loadColumns() { + List columns = new ArrayList<>(); + // Unused Columns are still included in Schema to match JDBC/ODBC standard + columns.add(new Column("TABLE_CAT", null, Type.KEYWORD)); + columns.add(new Column("TABLE_SCHEM", null, Type.KEYWORD)); + columns.add(new Column("TABLE_NAME", null, Type.KEYWORD)); + columns.add(new Column("COLUMN_NAME", null, Type.KEYWORD)); + columns.add(new Column("DATA_TYPE", null, Type.INTEGER)); + columns.add(new Column("TYPE_NAME", null, Type.KEYWORD)); + columns.add(new Column("COLUMN_SIZE", null, Type.INTEGER)); + columns.add(new Column("BUFFER_LENGTH", null, Type.INTEGER)); // Not used + columns.add(new Column("DECIMAL_DIGITS", null, Type.INTEGER)); + columns.add(new Column("NUM_PREC_RADIX", null, Type.INTEGER)); + columns.add(new Column("NULLABLE", null, Type.INTEGER)); + columns.add(new Column("REMARKS", null, Type.KEYWORD)); + columns.add(new Column("COLUMN_DEF", null, Type.KEYWORD)); + columns.add(new Column("SQL_DATA_TYPE", null, Type.INTEGER)); // Not used + columns.add(new Column("SQL_DATETIME_SUB", null, Type.INTEGER)); // Not used + columns.add(new Column("CHAR_OCTET_LENGTH", null, Type.INTEGER)); + columns.add(new Column("ORDINAL_POSITION", null, Type.INTEGER)); + columns.add(new Column("IS_NULLABLE", null, Type.KEYWORD)); + columns.add(new Column("SCOPE_CATALOG", null, Type.KEYWORD)); // Not used + columns.add(new Column("SCOPE_SCHEMA", null, Type.KEYWORD)); // Not used + columns.add(new Column("SCOPE_TABLE", null, Type.KEYWORD)); // Not used + columns.add(new Column("SOURCE_DATA_TYPE", null, Type.SHORT)); // Not used + columns.add(new Column("IS_AUTOINCREMENT", null, Type.KEYWORD)); + columns.add(new Column("IS_GENERATEDCOLUMN", null, Type.KEYWORD)); + + return columns; + } + + private List loadRows() { + List rows = new ArrayList<>(); + GetIndexResponse indexResponse = (GetIndexResponse) queryResult; + Map indexMappings = indexResponse.getMappings(); + + // Iterate through indices in indexMappings + for (Entry indexCursor : indexMappings.entrySet()) { + String index = indexCursor.getKey(); + + if (matchesPatternIfRegex(index, statement.getIndexPattern())) { + rows.addAll(loadIndexData(index, indexCursor.getValue().getSourceAsMap())); + } } - - private List loadColumns() { - List columns = new ArrayList<>(); - // Unused Columns are still included in Schema to match JDBC/ODBC standard - columns.add(new Column("TABLE_CAT", null, Type.KEYWORD)); - columns.add(new Column("TABLE_SCHEM", null, Type.KEYWORD)); - columns.add(new Column("TABLE_NAME", null, Type.KEYWORD)); - columns.add(new Column("COLUMN_NAME", null, Type.KEYWORD)); - columns.add(new Column("DATA_TYPE", null, Type.INTEGER)); - columns.add(new Column("TYPE_NAME", null, Type.KEYWORD)); - columns.add(new Column("COLUMN_SIZE", null, Type.INTEGER)); - columns.add(new Column("BUFFER_LENGTH", null, Type.INTEGER)); // Not used - columns.add(new Column("DECIMAL_DIGITS", null, Type.INTEGER)); - columns.add(new Column("NUM_PREC_RADIX", null, Type.INTEGER)); - columns.add(new Column("NULLABLE", null, Type.INTEGER)); - columns.add(new Column("REMARKS", null, Type.KEYWORD)); - columns.add(new Column("COLUMN_DEF", null, Type.KEYWORD)); - columns.add(new Column("SQL_DATA_TYPE", null, Type.INTEGER)); // Not used - columns.add(new Column("SQL_DATETIME_SUB", null, Type.INTEGER)); // Not used - columns.add(new Column("CHAR_OCTET_LENGTH", null, Type.INTEGER)); - columns.add(new Column("ORDINAL_POSITION", null, Type.INTEGER)); - columns.add(new Column("IS_NULLABLE", null, Type.KEYWORD)); - columns.add(new Column("SCOPE_CATALOG", null, Type.KEYWORD)); // Not used - columns.add(new Column("SCOPE_SCHEMA", null, Type.KEYWORD)); // Not used - columns.add(new Column("SCOPE_TABLE", null, Type.KEYWORD)); // Not used - columns.add(new Column("SOURCE_DATA_TYPE", null, Type.SHORT)); // Not used - columns.add(new Column("IS_AUTOINCREMENT", null, Type.KEYWORD)); - columns.add(new Column("IS_GENERATEDCOLUMN", null, Type.KEYWORD)); - - return columns; + return rows; + } + + @SuppressWarnings("unchecked") + private List loadIndexData(String index, Map mappingMetadata) { + List rows = new ArrayList<>(); + + Map flattenedMetaData = + flattenMappingMetaData(mappingMetadata, "", new HashMap<>()); + int position = 1; // Used as an arbitrary ORDINAL_POSITION value for the time being + for (Entry entry : flattenedMetaData.entrySet()) { + String columnPattern = statement.getColumnPattern(); + + // Check to see if column name matches pattern, if given + if (columnPattern == null || matchesPattern(entry.getKey(), columnPattern)) { + rows.add(new Row(loadRowData(index, entry.getKey(), entry.getValue(), position))); + position++; + } } - private List loadRows() { - List rows = new ArrayList<>(); - GetIndexResponse indexResponse = (GetIndexResponse) queryResult; - Map indexMappings = indexResponse.getMappings(); - - // Iterate through indices in indexMappings - for (Entry indexCursor : indexMappings.entrySet()) { - String index = indexCursor.getKey(); - - if (matchesPatternIfRegex(index, statement.getIndexPattern())) { - rows.addAll(loadIndexData(index, indexCursor.getValue().getSourceAsMap())); - } - } - return rows; - } - - @SuppressWarnings("unchecked") - private List loadIndexData(String index, Map mappingMetadata) { - List rows = new ArrayList<>(); - - Map flattenedMetaData = flattenMappingMetaData(mappingMetadata, "", new HashMap<>()); - int position = 1; // Used as an arbitrary ORDINAL_POSITION value for the time being - for (Entry entry : flattenedMetaData.entrySet()) { - String columnPattern = statement.getColumnPattern(); - - // Check to see if column name matches pattern, if given - if (columnPattern == null || matchesPattern(entry.getKey(), columnPattern)) { - rows.add( - new Row( - loadRowData(index, entry.getKey(), entry.getValue(), position) - ) - ); - position++; - } - } - - return rows; + return rows; + } + + private Map loadRowData(String index, String column, String type, int position) { + Map data = new HashMap<>(); + data.put("TABLE_CAT", clusterName); + data.put("TABLE_NAME", index); + data.put("COLUMN_NAME", column); + data.put("TYPE_NAME", type); + data.put("NUM_PREC_RADIX", DEFAULT_NUM_PREC_RADIX); + data.put("NULLABLE", 2); // TODO Defaulting to 2, need to find a way to check this + data.put("ORDINAL_POSITION", position); // There is no deterministic position of column in table + data.put("IS_NULLABLE", ""); // TODO Defaulting to unknown, need to check this + data.put("IS_AUTOINCREMENT", IS_AUTOINCREMENT); // Defaulting to "NO" + data.put("IS_GENERATEDCOLUMN", ""); // TODO Defaulting to unknown, need to check + + return data; + } + + /** + * To not disrupt old logic, for the time being, ShowQueryAction and DescribeQueryAction are using + * the same 'GetIndexRequestBuilder' that was used in the old ShowQueryAction. Since the format of + * the resulting meta data is different, this method is being used to flatten and retrieve types. + * + *

In the future, should look for a way to generalize this since Schema is currently using + * FieldMappingMetaData whereas here we are using MappingMetaData. + */ + @SuppressWarnings("unchecked") + private Map flattenMappingMetaData( + Map mappingMetaData, String currPath, Map flattenedMapping) { + Map properties = (Map) mappingMetaData.get("properties"); + for (Entry entry : properties.entrySet()) { + Map metaData = (Map) entry.getValue(); + + String fullPath = addToPath(currPath, entry.getKey()); + flattenedMapping.put( + fullPath, (String) metaData.getOrDefault("type", DEFAULT_OBJECT_DATATYPE)); + if (metaData.containsKey("properties")) { + flattenedMapping = flattenMappingMetaData(metaData, fullPath, flattenedMapping); + } } - private Map loadRowData(String index, String column, String type, int position) { - Map data = new HashMap<>(); - data.put("TABLE_CAT", clusterName); - data.put("TABLE_NAME", index); - data.put("COLUMN_NAME", column); - data.put("TYPE_NAME", type); - data.put("NUM_PREC_RADIX", DEFAULT_NUM_PREC_RADIX); - data.put("NULLABLE", 2); // TODO Defaulting to 2, need to find a way to check this - data.put("ORDINAL_POSITION", position); // There is no deterministic position of column in table - data.put("IS_NULLABLE", ""); // TODO Defaulting to unknown, need to check this - data.put("IS_AUTOINCREMENT", IS_AUTOINCREMENT); // Defaulting to "NO" - data.put("IS_GENERATEDCOLUMN", ""); // TODO Defaulting to unknown, need to check - - return data; - } + return flattenedMapping; + } - /** - * To not disrupt old logic, for the time being, ShowQueryAction and DescribeQueryAction are using the same - * 'GetIndexRequestBuilder' that was used in the old ShowQueryAction. Since the format of the resulting meta data - * is different, this method is being used to flatten and retrieve types. - *

- * In the future, should look for a way to generalize this since Schema is currently using FieldMappingMetaData - * whereas here we are using MappingMetaData. - */ - @SuppressWarnings("unchecked") - private Map flattenMappingMetaData(Map mappingMetaData, - String currPath, - Map flattenedMapping) { - Map properties = (Map) mappingMetaData.get("properties"); - for (Entry entry : properties.entrySet()) { - Map metaData = (Map) entry.getValue(); - - String fullPath = addToPath(currPath, entry.getKey()); - flattenedMapping.put(fullPath, (String) metaData.getOrDefault("type", DEFAULT_OBJECT_DATATYPE)); - if (metaData.containsKey("properties")) { - flattenedMapping = flattenMappingMetaData(metaData, fullPath, flattenedMapping); - } - } - - return flattenedMapping; + private String addToPath(String currPath, String field) { + if (currPath.isEmpty()) { + return field; } - private String addToPath(String currPath, String field) { - if (currPath.isEmpty()) { - return field; - } - - return currPath + "." + field; - } + return currPath + "." + field; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/ErrorMessage.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/ErrorMessage.java index 5297fa38ff..aa0d02bed8 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/ErrorMessage.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/ErrorMessage.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.format; import org.json.JSONObject; @@ -11,59 +10,59 @@ public class ErrorMessage { - protected E exception; + protected E exception; - private int status; - private String type; - private String reason; - private String details; + private int status; + private String type; + private String reason; + private String details; - public ErrorMessage(E exception, int status) { - this.exception = exception; - this.status = status; + public ErrorMessage(E exception, int status) { + this.exception = exception; + this.status = status; - this.type = fetchType(); - this.reason = fetchReason(); - this.details = fetchDetails(); - } + this.type = fetchType(); + this.reason = fetchReason(); + this.details = fetchDetails(); + } - private String fetchType() { - return exception.getClass().getSimpleName(); - } + private String fetchType() { + return exception.getClass().getSimpleName(); + } - protected String fetchReason() { - return status == RestStatus.BAD_REQUEST.getStatus() - ? "Invalid SQL query" - : "There was internal problem at backend"; - } + protected String fetchReason() { + return status == RestStatus.BAD_REQUEST.getStatus() + ? "Invalid SQL query" + : "There was internal problem at backend"; + } - protected String fetchDetails() { - // Some exception prints internal information (full class name) which is security concern - //return exception.toString(); - return emptyStringIfNull(exception.getLocalizedMessage()); - } + protected String fetchDetails() { + // Some exception prints internal information (full class name) which is security concern + // return exception.toString(); + return emptyStringIfNull(exception.getLocalizedMessage()); + } - private String emptyStringIfNull(String str) { - return str != null ? str : ""; - } + private String emptyStringIfNull(String str) { + return str != null ? str : ""; + } - @Override - public String toString() { - JSONObject output = new JSONObject(); + @Override + public String toString() { + JSONObject output = new JSONObject(); - output.put("status", status); - output.put("error", getErrorAsJson()); + output.put("status", status); + output.put("error", getErrorAsJson()); - return output.toString(2); - } + return output.toString(2); + } - private JSONObject getErrorAsJson() { - JSONObject errorJson = new JSONObject(); + private JSONObject getErrorAsJson() { + JSONObject errorJson = new JSONObject(); - errorJson.put("type", type); - errorJson.put("reason", reason); - errorJson.put("details", details); + errorJson.put("type", type); + errorJson.put("reason", reason); + errorJson.put("details", details); - return errorJson; - } + return errorJson; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/ErrorMessageFactory.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/ErrorMessageFactory.java index 0e96fe9b67..ba28ee8325 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/ErrorMessageFactory.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/ErrorMessageFactory.java @@ -3,42 +3,40 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.format; import org.opensearch.OpenSearchException; public class ErrorMessageFactory { - /** - * Create error message based on the exception type - * Exceptions of OpenSearch exception type and exceptions with wrapped OpenSearch exception causes - * should create {@link OpenSearchErrorMessage} - * - * @param e exception to create error message - * @param status exception status code - * @return error message - */ - - public static ErrorMessage createErrorMessage(Exception e, int status) { - if (e instanceof OpenSearchException) { - return new OpenSearchErrorMessage((OpenSearchException) e, - ((OpenSearchException) e).status().getStatus()); - } else if (unwrapCause(e) instanceof OpenSearchException) { - OpenSearchException exception = (OpenSearchException) unwrapCause(e); - return new OpenSearchErrorMessage(exception, exception.status().getStatus()); - } - return new ErrorMessage(e, status); + /** + * Create error message based on the exception type Exceptions of OpenSearch exception type and + * exceptions with wrapped OpenSearch exception causes should create {@link + * OpenSearchErrorMessage} + * + * @param e exception to create error message + * @param status exception status code + * @return error message + */ + public static ErrorMessage createErrorMessage(Exception e, int status) { + if (e instanceof OpenSearchException) { + return new OpenSearchErrorMessage( + (OpenSearchException) e, ((OpenSearchException) e).status().getStatus()); + } else if (unwrapCause(e) instanceof OpenSearchException) { + OpenSearchException exception = (OpenSearchException) unwrapCause(e); + return new OpenSearchErrorMessage(exception, exception.status().getStatus()); } + return new ErrorMessage(e, status); + } - public static Throwable unwrapCause(Throwable t) { - Throwable result = t; - if (result instanceof OpenSearchException) { - return result; - } - if (result.getCause() == null) { - return result; - } - result = unwrapCause(result.getCause()); - return result; + public static Throwable unwrapCause(Throwable t) { + Throwable result = t; + if (result instanceof OpenSearchException) { + return result; + } + if (result.getCause() == null) { + return result; } + result = unwrapCause(result.getCause()); + return result; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/OpenSearchErrorMessage.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/OpenSearchErrorMessage.java index a48ab003dc..8117d241b1 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/OpenSearchErrorMessage.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/OpenSearchErrorMessage.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.format; import org.opensearch.OpenSearchException; @@ -13,46 +12,53 @@ public class OpenSearchErrorMessage extends ErrorMessage { - OpenSearchErrorMessage(OpenSearchException exception, int status) { - super(exception, status); - } + OpenSearchErrorMessage(OpenSearchException exception, int status) { + super(exception, status); + } - @Override - protected String fetchReason() { - return "Error occurred in OpenSearch engine: " + exception.getMessage(); - } + @Override + protected String fetchReason() { + return "Error occurred in OpenSearch engine: " + exception.getMessage(); + } - /** Currently Sql-Jdbc plugin only supports string type as reason and details in the error messages */ - @Override - protected String fetchDetails() { - StringBuilder details = new StringBuilder(); - if (exception instanceof SearchPhaseExecutionException) { - details.append(fetchSearchPhaseExecutionExceptionDetails((SearchPhaseExecutionException) exception)); - } else { - details.append(defaultDetails(exception)); - } - details.append("\nFor more details, please send request for Json format to see the raw response from " - + "OpenSearch engine."); - return details.toString(); + /** + * Currently Sql-Jdbc plugin only supports string type as reason and details in the error messages + */ + @Override + protected String fetchDetails() { + StringBuilder details = new StringBuilder(); + if (exception instanceof SearchPhaseExecutionException) { + details.append( + fetchSearchPhaseExecutionExceptionDetails((SearchPhaseExecutionException) exception)); + } else { + details.append(defaultDetails(exception)); } + details.append( + "\nFor more details, please send request for Json format to see the raw response from " + + "OpenSearch engine."); + return details.toString(); + } - private String defaultDetails(OpenSearchException exception) { - return exception.getDetailedMessage(); - } + private String defaultDetails(OpenSearchException exception) { + return exception.getDetailedMessage(); + } - /** - * Could not deliver the exactly same error messages due to the limit of JDBC types. - * Currently our cases occurred only SearchPhaseExecutionException instances among all types of OpenSearch exceptions - * according to the survey, see all types: OpenSearchException.OpenSearchExceptionHandle. - * Either add methods of fetching details for different types, or re-make a consistent message by not giving - * detailed messages/root causes but only a suggestion message. - */ - private String fetchSearchPhaseExecutionExceptionDetails(SearchPhaseExecutionException exception) { - StringBuilder details = new StringBuilder(); - ShardSearchFailure[] shardFailures = exception.shardFailures(); - for (ShardSearchFailure failure : shardFailures) { - details.append(StringUtils.format("Shard[%d]: %s\n", failure.shardId(), failure.getCause().toString())); - } - return details.toString(); + /** + * Could not deliver the exactly same error messages due to the limit of JDBC types. Currently our + * cases occurred only SearchPhaseExecutionException instances among all types of OpenSearch + * exceptions according to the survey, see all types: + * OpenSearchException.OpenSearchExceptionHandle. Either add methods of fetching details for + * different types, or re-make a consistent message by not giving detailed messages/root causes + * but only a suggestion message. + */ + private String fetchSearchPhaseExecutionExceptionDetails( + SearchPhaseExecutionException exception) { + StringBuilder details = new StringBuilder(); + ShardSearchFailure[] shardFailures = exception.shardFailures(); + for (ShardSearchFailure failure : shardFailures) { + details.append( + StringUtils.format("Shard[%d]: %s\n", failure.shardId(), failure.getCause().toString())); } + return details.toString(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/PrettyFormatRestExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/PrettyFormatRestExecutor.java index 65fd6b7022..00feabf5d8 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/PrettyFormatRestExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/PrettyFormatRestExecutor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.format; import java.util.Map; @@ -13,9 +12,9 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.client.Client; import org.opensearch.core.common.Strings; +import org.opensearch.core.rest.RestStatus; import org.opensearch.rest.BytesRestResponse; import org.opensearch.rest.RestChannel; -import org.opensearch.core.rest.RestStatus; import org.opensearch.sql.legacy.cursor.Cursor; import org.opensearch.sql.legacy.cursor.DefaultCursor; import org.opensearch.sql.legacy.exception.SqlParseException; @@ -27,82 +26,84 @@ public class PrettyFormatRestExecutor implements RestExecutor { - private static final Logger LOG = LogManager.getLogger(); - - private final String format; - - public PrettyFormatRestExecutor(String format) { - this.format = format.toLowerCase(); + private static final Logger LOG = LogManager.getLogger(); + + private final String format; + + public PrettyFormatRestExecutor(String format) { + this.format = format.toLowerCase(); + } + + /** Execute the QueryAction and return the REST response using the channel. */ + @Override + public void execute( + Client client, Map params, QueryAction queryAction, RestChannel channel) { + String formattedResponse = execute(client, params, queryAction); + BytesRestResponse bytesRestResponse; + if (format.equals("jdbc")) { + bytesRestResponse = + new BytesRestResponse( + RestStatus.OK, "application/json; charset=UTF-8", formattedResponse); + } else { + bytesRestResponse = new BytesRestResponse(RestStatus.OK, formattedResponse); } - /** - * Execute the QueryAction and return the REST response using the channel. - */ - @Override - public void execute(Client client, Map params, QueryAction queryAction, RestChannel channel) { - String formattedResponse = execute(client, params, queryAction); - BytesRestResponse bytesRestResponse; - if (format.equals("jdbc")) { - bytesRestResponse = new BytesRestResponse(RestStatus.OK, - "application/json; charset=UTF-8", - formattedResponse); - } else { - bytesRestResponse = new BytesRestResponse(RestStatus.OK, formattedResponse); - } - - if (!BackOffRetryStrategy.isHealthy(2 * bytesRestResponse.content().length(), this)) { - throw new IllegalStateException( - "[PrettyFormatRestExecutor] Memory could be insufficient when sendResponse()."); - } - - channel.sendResponse(bytesRestResponse); + if (!BackOffRetryStrategy.isHealthy(2 * bytesRestResponse.content().length(), this)) { + throw new IllegalStateException( + "[PrettyFormatRestExecutor] Memory could be insufficient when sendResponse()."); } - @Override - public String execute(Client client, Map params, QueryAction queryAction) { - Protocol protocol; - - try { - if (queryAction instanceof DefaultQueryAction) { - protocol = buildProtocolForDefaultQuery(client, (DefaultQueryAction) queryAction); - } else { - Object queryResult = QueryActionElasticExecutor.executeAnyAction(client, queryAction); - protocol = new Protocol(client, queryAction, queryResult, format, Cursor.NULL_CURSOR); - } - } catch (Exception e) { - if (e instanceof OpenSearchException) { - LOG.warn("An error occurred in OpenSearch engine: " - + ((OpenSearchException) e).getDetailedMessage(), e); - } else { - LOG.warn("Error happened in pretty formatter", e); - } - protocol = new Protocol(e); - } - - return protocol.format(); + channel.sendResponse(bytesRestResponse); + } + + @Override + public String execute(Client client, Map params, QueryAction queryAction) { + Protocol protocol; + + try { + if (queryAction instanceof DefaultQueryAction) { + protocol = buildProtocolForDefaultQuery(client, (DefaultQueryAction) queryAction); + } else { + Object queryResult = QueryActionElasticExecutor.executeAnyAction(client, queryAction); + protocol = new Protocol(client, queryAction, queryResult, format, Cursor.NULL_CURSOR); + } + } catch (Exception e) { + if (e instanceof OpenSearchException) { + LOG.warn( + "An error occurred in OpenSearch engine: " + + ((OpenSearchException) e).getDetailedMessage(), + e); + } else { + LOG.warn("Error happened in pretty formatter", e); + } + protocol = new Protocol(e); } - /** - * QueryActionElasticExecutor.executeAnyAction() returns SearchHits inside SearchResponse. - * In order to get scroll ID if any, we need to execute DefaultQueryAction ourselves for SearchResponse. - */ - private Protocol buildProtocolForDefaultQuery(Client client, DefaultQueryAction queryAction) - throws SqlParseException { - - SearchResponse response = (SearchResponse) queryAction.explain().get(); - String scrollId = response.getScrollId(); - - Protocol protocol; - if (!Strings.isNullOrEmpty(scrollId)) { - DefaultCursor defaultCursor = new DefaultCursor(); - defaultCursor.setScrollId(scrollId); - defaultCursor.setLimit(queryAction.getSelect().getRowCount()); - defaultCursor.setFetchSize(queryAction.getSqlRequest().fetchSize()); - protocol = new Protocol(client, queryAction, response.getHits(), format, defaultCursor); - } else { - protocol = new Protocol(client, queryAction, response.getHits(), format, Cursor.NULL_CURSOR); - } - - return protocol; + return protocol.format(); + } + + /** + * QueryActionElasticExecutor.executeAnyAction() returns SearchHits inside SearchResponse. In + * order to get scroll ID if any, we need to execute DefaultQueryAction ourselves for + * SearchResponse. + */ + private Protocol buildProtocolForDefaultQuery(Client client, DefaultQueryAction queryAction) + throws SqlParseException { + + SearchResponse response = (SearchResponse) queryAction.explain().get(); + String scrollId = response.getScrollId(); + + Protocol protocol; + if (!Strings.isNullOrEmpty(scrollId)) { + DefaultCursor defaultCursor = new DefaultCursor(); + defaultCursor.setScrollId(scrollId); + defaultCursor.setLimit(queryAction.getSelect().getRowCount()); + defaultCursor.setFetchSize(queryAction.getSqlRequest().fetchSize()); + protocol = new Protocol(client, queryAction, response.getHits(), format, defaultCursor); + } else { + protocol = new Protocol(client, queryAction, response.getHits(), format, Cursor.NULL_CURSOR); } + + return protocol; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/Protocol.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/Protocol.java index aba0a3c599..e6ea767e17 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/Protocol.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/Protocol.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.format; import static org.opensearch.sql.legacy.domain.IndexStatement.StatementType; @@ -33,215 +32,223 @@ public class Protocol { - static final int OK_STATUS = 200; - static final int ERROR_STATUS = 500; - - private final String formatType; - private int status; - private long size; - private long total; - private ResultSet resultSet; - private ErrorMessage error; - private List columnNodeList; - private Cursor cursor = new NullCursor(); - private ColumnTypeProvider scriptColumnType = new ColumnTypeProvider(); - - public Protocol(Client client, QueryAction queryAction, Object queryResult, String formatType, Cursor cursor) { - this.cursor = cursor; - - if (queryAction instanceof QueryPlanQueryAction) { - this.columnNodeList = - ((QueryPlanRequestBuilder) (((QueryPlanQueryAction) queryAction).explain())).outputColumns(); - } else if (queryAction instanceof DefaultQueryAction) { - scriptColumnType = queryAction.getScriptColumnType(); - } - - this.formatType = formatType; - QueryStatement query = queryAction.getQueryStatement(); - this.status = OK_STATUS; - this.resultSet = loadResultSet(client, query, queryResult); - this.size = resultSet.getDataRows().getSize(); - this.total = resultSet.getDataRows().getTotalHits(); - } - - - public Protocol(Client client, Object queryResult, String formatType, Cursor cursor) { - this.cursor = cursor; - this.status = OK_STATUS; - this.formatType = formatType; - this.resultSet = loadResultSetForCursor(client, queryResult); - } - - public Protocol(Exception e) { - this.formatType = null; - this.status = ERROR_STATUS; - this.error = ErrorMessageFactory.createErrorMessage(e, status); - } - - private ResultSet loadResultSetForCursor(Client client, Object queryResult) { - return new SelectResultSet(client, queryResult, formatType, cursor); - } - - private ResultSet loadResultSet(Client client, QueryStatement queryStatement, Object queryResult) { - if (queryResult instanceof List) { - return new BindingTupleResultSet(columnNodeList, (List) queryResult); - } - if (queryStatement instanceof Delete) { - return new DeleteResultSet(client, (Delete) queryStatement, queryResult); - } else if (queryStatement instanceof Query) { - return new SelectResultSet(client, (Query) queryStatement, queryResult, - scriptColumnType, formatType, cursor); - } else if (queryStatement instanceof IndexStatement) { - IndexStatement statement = (IndexStatement) queryStatement; - StatementType statementType = statement.getStatementType(); - - if (statementType == StatementType.SHOW) { - return new ShowResultSet(client, statement, queryResult); - } else if (statementType == StatementType.DESCRIBE) { - return new DescribeResultSet(client, statement, queryResult); - } - } - - throw new UnsupportedOperationException( - String.format("The following instance of QueryStatement is not supported: %s", - queryStatement.getClass().toString()) - ); - } - - public int getStatus() { - return status; - } - - public ResultSet getResultSet() { - return resultSet; - } - - public String format() { - if (status == OK_STATUS) { - switch (formatType) { - case "jdbc": - return outputInJdbcFormat(); - case "table": - return outputInTableFormat(); - case "raw": - return outputInRawFormat(); - default: - throw new UnsupportedOperationException( - String.format("The following format is not supported: %s", formatType)); - } - } - - return error.toString(); + static final int OK_STATUS = 200; + static final int ERROR_STATUS = 500; + + private final String formatType; + private int status; + private long size; + private long total; + private ResultSet resultSet; + private ErrorMessage error; + private List columnNodeList; + private Cursor cursor = new NullCursor(); + private ColumnTypeProvider scriptColumnType = new ColumnTypeProvider(); + + public Protocol( + Client client, + QueryAction queryAction, + Object queryResult, + String formatType, + Cursor cursor) { + this.cursor = cursor; + + if (queryAction instanceof QueryPlanQueryAction) { + this.columnNodeList = + ((QueryPlanRequestBuilder) (((QueryPlanQueryAction) queryAction).explain())) + .outputColumns(); + } else if (queryAction instanceof DefaultQueryAction) { + scriptColumnType = queryAction.getScriptColumnType(); + } + + this.formatType = formatType; + QueryStatement query = queryAction.getQueryStatement(); + this.status = OK_STATUS; + this.resultSet = loadResultSet(client, query, queryResult); + this.size = resultSet.getDataRows().getSize(); + this.total = resultSet.getDataRows().getTotalHits(); + } + + public Protocol(Client client, Object queryResult, String formatType, Cursor cursor) { + this.cursor = cursor; + this.status = OK_STATUS; + this.formatType = formatType; + this.resultSet = loadResultSetForCursor(client, queryResult); + } + + public Protocol(Exception e) { + this.formatType = null; + this.status = ERROR_STATUS; + this.error = ErrorMessageFactory.createErrorMessage(e, status); + } + + private ResultSet loadResultSetForCursor(Client client, Object queryResult) { + return new SelectResultSet(client, queryResult, formatType, cursor); + } + + private ResultSet loadResultSet( + Client client, QueryStatement queryStatement, Object queryResult) { + if (queryResult instanceof List) { + return new BindingTupleResultSet(columnNodeList, (List) queryResult); + } + if (queryStatement instanceof Delete) { + return new DeleteResultSet(client, (Delete) queryStatement, queryResult); + } else if (queryStatement instanceof Query) { + return new SelectResultSet( + client, (Query) queryStatement, queryResult, scriptColumnType, formatType, cursor); + } else if (queryStatement instanceof IndexStatement) { + IndexStatement statement = (IndexStatement) queryStatement; + StatementType statementType = statement.getStatementType(); + + if (statementType == StatementType.SHOW) { + return new ShowResultSet(client, statement, queryResult); + } else if (statementType == StatementType.DESCRIBE) { + return new DescribeResultSet(client, statement, queryResult); + } + } + + throw new UnsupportedOperationException( + String.format( + "The following instance of QueryStatement is not supported: %s", + queryStatement.getClass().toString())); + } + + public int getStatus() { + return status; + } + + public ResultSet getResultSet() { + return resultSet; + } + + public String format() { + if (status == OK_STATUS) { + switch (formatType) { + case "jdbc": + return outputInJdbcFormat(); + case "table": + return outputInTableFormat(); + case "raw": + return outputInRawFormat(); + default: + throw new UnsupportedOperationException( + String.format("The following format is not supported: %s", formatType)); + } + } + + return error.toString(); + } + + private String outputInJdbcFormat() { + JSONObject formattedOutput = new JSONObject(); + + formattedOutput.put("status", status); + formattedOutput.put("size", size); + formattedOutput.put("total", total); + + JSONArray schema = getSchemaAsJson(); + + formattedOutput.put("schema", schema); + formattedOutput.put("datarows", getDataRowsAsJson()); + + String cursorId = cursor.generateCursorId(); + if (!Strings.isNullOrEmpty(cursorId)) { + formattedOutput.put("cursor", cursorId); + } + + return formattedOutput.toString(2); + } + + private String outputInRawFormat() { + Schema schema = resultSet.getSchema(); + DataRows dataRows = resultSet.getDataRows(); + + StringBuilder formattedOutput = new StringBuilder(); + for (Row row : dataRows) { + formattedOutput.append(rawEntry(row, schema)).append("\n"); + } + + return formattedOutput.toString(); + } + + private String outputInTableFormat() { + return null; + } + + public String cursorFormat() { + if (status == OK_STATUS) { + switch (formatType) { + case "jdbc": + return cursorOutputInJDBCFormat(); + default: + throw new UnsupportedOperationException( + String.format( + "The following response format is not supported for cursor: [%s]", formatType)); + } } + return error.toString(); + } - private String outputInJdbcFormat() { - JSONObject formattedOutput = new JSONObject(); + private String cursorOutputInJDBCFormat() { + JSONObject formattedOutput = new JSONObject(); + formattedOutput.put("datarows", getDataRowsAsJson()); - formattedOutput.put("status", status); - formattedOutput.put("size", size); - formattedOutput.put("total", total); - - JSONArray schema = getSchemaAsJson(); - - formattedOutput.put("schema", schema); - formattedOutput.put("datarows", getDataRowsAsJson()); - - String cursorId = cursor.generateCursorId(); - if (!Strings.isNullOrEmpty(cursorId)) { - formattedOutput.put("cursor", cursorId); - } - - return formattedOutput.toString(2); + String cursorId = cursor.generateCursorId(); + if (!Strings.isNullOrEmpty(cursorId)) { + formattedOutput.put("cursor", cursorId); } + return formattedOutput.toString(2); + } - private String outputInRawFormat() { - Schema schema = resultSet.getSchema(); - DataRows dataRows = resultSet.getDataRows(); + private String rawEntry(Row row, Schema schema) { + // TODO String separator is being kept to "|" for the time being as using "\t" will require + // formatting since + // TODO tabs are occurring in multiple of 4 (one option is Guava's Strings.padEnd() method) + return StreamSupport.stream(schema.spliterator(), false) + .map(column -> row.getDataOrDefault(column.getName(), "NULL").toString()) + .collect(Collectors.joining("|")); + } - StringBuilder formattedOutput = new StringBuilder(); - for (Row row : dataRows) { - formattedOutput.append(rawEntry(row, schema)).append("\n"); - } - - return formattedOutput.toString(); - } - - private String outputInTableFormat() { - return null; - } + private JSONArray getSchemaAsJson() { + Schema schema = resultSet.getSchema(); + JSONArray schemaJson = new JSONArray(); - public String cursorFormat() { - if (status == OK_STATUS) { - switch (formatType) { - case "jdbc": - return cursorOutputInJDBCFormat(); - default: - throw new UnsupportedOperationException(String.format( - "The following response format is not supported for cursor: [%s]", formatType)); - } - } - return error.toString(); + for (Column column : schema) { + schemaJson.put(schemaEntry(column.getName(), column.getAlias(), column.getType())); } - private String cursorOutputInJDBCFormat() { - JSONObject formattedOutput = new JSONObject(); - formattedOutput.put("datarows", getDataRowsAsJson()); + return schemaJson; + } - String cursorId = cursor.generateCursorId(); - if (!Strings.isNullOrEmpty(cursorId)) { - formattedOutput.put("cursor", cursorId); - } - return formattedOutput.toString(2); + private JSONObject schemaEntry(String name, String alias, String type) { + JSONObject entry = new JSONObject(); + entry.put("name", name); + if (alias != null) { + entry.put("alias", alias); } + entry.put("type", type); - private String rawEntry(Row row, Schema schema) { - // TODO String separator is being kept to "|" for the time being as using "\t" will require formatting since - // TODO tabs are occurring in multiple of 4 (one option is Guava's Strings.padEnd() method) - return StreamSupport.stream(schema.spliterator(), false) - .map(column -> row.getDataOrDefault(column.getName(), "NULL").toString()) - .collect(Collectors.joining("|")); - } - - private JSONArray getSchemaAsJson() { - Schema schema = resultSet.getSchema(); - JSONArray schemaJson = new JSONArray(); - - for (Column column : schema) { - schemaJson.put(schemaEntry(column.getName(), column.getAlias(), column.getType())); - } - - return schemaJson; - } + return entry; + } - private JSONObject schemaEntry(String name, String alias, String type) { - JSONObject entry = new JSONObject(); - entry.put("name", name); - if (alias != null) { - entry.put("alias", alias); - } - entry.put("type", type); + private JSONArray getDataRowsAsJson() { + Schema schema = resultSet.getSchema(); + DataRows dataRows = resultSet.getDataRows(); + JSONArray dataRowsJson = new JSONArray(); - return entry; + for (Row row : dataRows) { + dataRowsJson.put(dataEntry(row, schema)); } - private JSONArray getDataRowsAsJson() { - Schema schema = resultSet.getSchema(); - DataRows dataRows = resultSet.getDataRows(); - JSONArray dataRowsJson = new JSONArray(); - - for (Row row : dataRows) { - dataRowsJson.put(dataEntry(row, schema)); - } - - return dataRowsJson; - } + return dataRowsJson; + } - private JSONArray dataEntry(Row dataRow, Schema schema) { - JSONArray entry = new JSONArray(); - for (Column column : schema) { - String columnName = column.getIdentifier(); - entry.put(dataRow.getDataOrDefault(columnName, JSONObject.NULL)); - } - return entry; + private JSONArray dataEntry(Row dataRow, Schema schema) { + JSONArray entry = new JSONArray(); + for (Column column : schema) { + String columnName = column.getIdentifier(); + entry.put(dataRow.getDataOrDefault(columnName, JSONObject.NULL)); } + return entry; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/ResultSet.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/ResultSet.java index 9864f1ffdc..079a738eb3 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/ResultSet.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/ResultSet.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.format; import java.util.regex.Matcher; @@ -12,47 +11,44 @@ public abstract class ResultSet { - protected Schema schema; - protected DataRows dataRows; - - protected Client client; - protected String clusterName; - - public Schema getSchema() { - return schema; - } - - public DataRows getDataRows() { - return dataRows; - } - - protected String getClusterName() { - return client.admin().cluster() - .prepareHealth() - .get() - .getClusterName(); - } - - /** - * Check if given string matches the pattern. Do this check only if the pattern is a regex. - * Otherwise skip the matching process and consider it's a match. - * This is a quick fix to support SHOW/DESCRIBE alias by skip mismatch between actual index name - * and pattern (alias). - * @param string string to match - * @param pattern pattern - * @return true if match or pattern is not regular expression. otherwise false. - */ - protected boolean matchesPatternIfRegex(String string, String pattern) { - return isNotRegexPattern(pattern) || matchesPattern(string, pattern); - } - - protected boolean matchesPattern(String string, String pattern) { - Pattern p = Pattern.compile(pattern); - Matcher matcher = p.matcher(string); - return matcher.find(); - } - - private boolean isNotRegexPattern(String pattern) { - return !pattern.contains(".") && !pattern.contains("*"); - } + protected Schema schema; + protected DataRows dataRows; + + protected Client client; + protected String clusterName; + + public Schema getSchema() { + return schema; + } + + public DataRows getDataRows() { + return dataRows; + } + + protected String getClusterName() { + return client.admin().cluster().prepareHealth().get().getClusterName(); + } + + /** + * Check if given string matches the pattern. Do this check only if the pattern is a regex. + * Otherwise skip the matching process and consider it's a match. This is a quick fix to support + * SHOW/DESCRIBE alias by skip mismatch between actual index name and pattern (alias). + * + * @param string string to match + * @param pattern pattern + * @return true if match or pattern is not regular expression. otherwise false. + */ + protected boolean matchesPatternIfRegex(String string, String pattern) { + return isNotRegexPattern(pattern) || matchesPattern(string, pattern); + } + + protected boolean matchesPattern(String string, String pattern) { + Pattern p = Pattern.compile(pattern); + Matcher matcher = p.matcher(string); + return matcher.find(); + } + + private boolean isNotRegexPattern(String pattern) { + return !pattern.contains(".") && !pattern.contains("*"); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/Schema.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/Schema.java index e02841fcd6..b29369f713 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/Schema.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/Schema.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.format; import static java.util.Collections.unmodifiableList; @@ -17,144 +16,155 @@ public class Schema implements Iterable { - private String indexName; - private List columns; + private String indexName; + private List columns; - private static Set types; + private static Set types; - static { - types = getTypes(); - } + static { + types = getTypes(); + } - public Schema(String indexName, List columns) { - this.indexName = indexName; - this.columns = columns; - } + public Schema(String indexName, List columns) { + this.indexName = indexName; + this.columns = columns; + } - public Schema(IndexStatement statement, List columns) { - this.indexName = statement.getIndexPattern(); - this.columns = columns; - } + public Schema(IndexStatement statement, List columns) { + this.indexName = statement.getIndexPattern(); + this.columns = columns; + } - public Schema(List columns){ - this.columns = columns; - } + public Schema(List columns) { + this.columns = columns; + } + + public String getIndexName() { + return indexName; + } - public String getIndexName() { - return indexName; + public List getHeaders() { + return columns.stream().map(column -> column.getName()).collect(Collectors.toList()); + } + + public List getColumns() { + return unmodifiableList(columns); + } + + private static Set getTypes() { + HashSet types = new HashSet<>(); + for (Type type : Type.values()) { + types.add(type.name()); } - public List getHeaders() { - return columns.stream() - .map(column -> column.getName()) - .collect(Collectors.toList()); + return types; + } + + // A method for efficiently checking if a Type exists + public static boolean hasType(String type) { + return types.contains(type); + } + + // Iterator method for Schema + @Override + public Iterator iterator() { + return new Iterator() { + private final Iterator iter = columns.iterator(); + + @Override + public boolean hasNext() { + return iter.hasNext(); + } + + @Override + public Column next() { + return iter.next(); + } + + @Override + public void remove() { + throw new UnsupportedOperationException("No changes allowed to Schema columns"); + } + }; + } + + // Only core OpenSearch datatypes currently supported + public enum Type { + TEXT, + KEYWORD, + IP, // String types + LONG, + INTEGER, + SHORT, + BYTE, + DOUBLE, + FLOAT, + HALF_FLOAT, + SCALED_FLOAT, // Numeric types + DATE, // Date types + BOOLEAN, // Boolean types + BINARY, // Binary types + OBJECT, + NESTED, + INTEGER_RANGE, + FLOAT_RANGE, + LONG_RANGE, + DOUBLE_RANGE, + DATE_RANGE; // Range types + + public String nameLowerCase() { + return name().toLowerCase(); } + } + + // Inner class for Column object + public static class Column { - public List getColumns() { - return unmodifiableList(columns); + private final String name; + private String alias; + private final Type type; + + private boolean identifiedByAlias; + + public Column(String name, String alias, Type type, boolean identifiedByAlias) { + this.name = name; + this.alias = alias; + this.type = type; + this.identifiedByAlias = identifiedByAlias; } - private static Set getTypes() { - HashSet types = new HashSet<>(); - for (Type type : Type.values()) { - types.add(type.name()); - } + public Column(String name, String alias, Type type) { + this(name, alias, type, false); + } - return types; + public String getName() { + return name; } - // A method for efficiently checking if a Type exists - public static boolean hasType(String type) { - return types.contains(type); + public String getAlias() { + return alias; } - // Iterator method for Schema - @Override - public Iterator iterator() { - return new Iterator() { - private final Iterator iter = columns.iterator(); - - @Override - public boolean hasNext() { - return iter.hasNext(); - } - - @Override - public Column next() { - return iter.next(); - } - - @Override - public void remove() { - throw new UnsupportedOperationException("No changes allowed to Schema columns"); - } - }; + public String getType() { + return type.nameLowerCase(); } - // Only core OpenSearch datatypes currently supported - public enum Type { - TEXT, KEYWORD, IP, // String types - LONG, INTEGER, SHORT, BYTE, DOUBLE, FLOAT, HALF_FLOAT, SCALED_FLOAT, // Numeric types - DATE, // Date types - BOOLEAN, // Boolean types - BINARY, // Binary types - OBJECT, - NESTED, - INTEGER_RANGE, FLOAT_RANGE, LONG_RANGE, DOUBLE_RANGE, DATE_RANGE; // Range types - - public String nameLowerCase() { - return name().toLowerCase(); - } + /* + * Some query types (like JOIN) label the data in SearchHit using alias instead of field name if it's given. + * + * This method returns the alias as the identifier if the identifiedByAlias flag is set for such cases so that + * the correct identifier is used to access related data in DataRows. + */ + public String getIdentifier() { + if (identifiedByAlias && alias != null) { + return alias; + } else { + return name; + } } - // Inner class for Column object - public static class Column { - - private final String name; - private String alias; - private final Type type; - - private boolean identifiedByAlias; - - public Column(String name, String alias, Type type, boolean identifiedByAlias) { - this.name = name; - this.alias = alias; - this.type = type; - this.identifiedByAlias = identifiedByAlias; - } - - public Column(String name, String alias, Type type) { - this(name, alias, type, false); - } - - public String getName() { - return name; - } - - public String getAlias() { - return alias; - } - - public String getType() { - return type.nameLowerCase(); - } - - /* - * Some query types (like JOIN) label the data in SearchHit using alias instead of field name if it's given. - * - * This method returns the alias as the identifier if the identifiedByAlias flag is set for such cases so that - * the correct identifier is used to access related data in DataRows. - */ - public String getIdentifier() { - if (identifiedByAlias && alias != null) { - return alias; - } else { - return name; - } - } - - public Type getEnumType() { - return type; - } + public Type getEnumType() { + return type; } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/SelectResultSet.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/SelectResultSet.java index a6f4cf815a..aaf5ef2bc0 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/SelectResultSet.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/SelectResultSet.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.format; import static java.util.Collections.unmodifiableMap; @@ -59,820 +58,787 @@ public class SelectResultSet extends ResultSet { - private static final Logger LOG = LogManager.getLogger(SelectResultSet.class); - - public static final String SCORE = "_score"; - private final String formatType; - - private Query query; - private Object queryResult; - - private boolean selectAll; - private String indexName; - private List columns = new ArrayList<>(); - private ColumnTypeProvider outputColumnType; - - private List head; - private long size; - private long totalHits; - private long internalTotalHits; - private List rows; - private Cursor cursor; - - private DateFieldFormatter dateFieldFormatter; - // alias -> base field name - private Map fieldAliasMap = new HashMap<>(); - - public SelectResultSet(Client client, - Query query, - Object queryResult, - ColumnTypeProvider outputColumnType, - String formatType, - Cursor cursor) { - this.client = client; - this.query = query; - this.queryResult = queryResult; - this.selectAll = false; - this.formatType = formatType; - this.outputColumnType = outputColumnType; - this.cursor = cursor; - - if (isJoinQuery()) { - JoinSelect joinQuery = (JoinSelect) query; - loadFromEsState(joinQuery.getFirstTable()); - loadFromEsState(joinQuery.getSecondTable()); - } else { - loadFromEsState(query); - } - this.schema = new Schema(indexName, columns); - this.head = schema.getHeaders(); - this.dateFieldFormatter = new DateFieldFormatter(indexName, columns, fieldAliasMap); - - extractData(); - populateCursor(); - this.dataRows = new DataRows(size, totalHits, rows); - } - - public SelectResultSet(Client client, Object queryResult, String formatType, Cursor cursor) { - this.cursor = cursor; - this.client = client; - this.queryResult = queryResult; - this.selectAll = false; - this.formatType = formatType; - populateResultSetFromCursor(cursor); - } - - public String indexName(){ - return this.indexName; - } - - public Map fieldAliasMap() { - return unmodifiableMap(this.fieldAliasMap); - } - - public void populateResultSetFromCursor(Cursor cursor) { - switch (cursor.getType()) { - case DEFAULT: - populateResultSetFromDefaultCursor((DefaultCursor) cursor); - default: - return; - } - } - - private void populateResultSetFromDefaultCursor(DefaultCursor cursor) { - this.columns = cursor.getColumns(); - this.schema = new Schema(columns); - this.head = schema.getHeaders(); - this.dateFieldFormatter = new DateFieldFormatter( - cursor.getIndexPattern(), - columns, - cursor.getFieldAliasMap() - ); - extractData(); - this.dataRows = new DataRows(size, totalHits, rows); - } - - //*********************************************************** - // Logic for loading Columns to be stored in Schema - //*********************************************************** - - /** - * Makes a request to local node to receive meta data information and maps each field specified in SELECT to its - * type in the index mapping - */ - private void loadFromEsState(Query query) { - String indexName = fetchIndexName(query); - String[] fieldNames = fetchFieldsAsArray(query); - - // Reset boolean in the case of JOIN query where multiple calls to loadFromEsState() are made - selectAll = isSimpleQuerySelectAll(query) || isJoinQuerySelectAll(query, fieldNames); - - GetFieldMappingsRequest request = new GetFieldMappingsRequest() - .indices(indexName) - .fields(selectAllFieldsIfEmpty(fieldNames)) - .local(true); - GetFieldMappingsResponse response = client.admin().indices() - .getFieldMappings(request) - .actionGet(); - - Map> mappings = response.mappings(); - if (mappings.isEmpty() || !mappings.containsKey(indexName)) { - throw new IllegalArgumentException(String.format("Index type %s does not exist", query.getFrom())); - } - Map typeMappings = mappings.get(indexName); - - - this.indexName = this.indexName == null ? indexName : (this.indexName + "|" + indexName); - this.columns.addAll(renameColumnWithTableAlias(query, populateColumns(query, fieldNames, typeMappings))); - } - - /** - * Rename column name with table alias as prefix for join query - */ - private List renameColumnWithTableAlias(Query query, List columns) { - List renamedCols; - if ((query instanceof TableOnJoinSelect) - && !Strings.isNullOrEmpty(((TableOnJoinSelect) query).getAlias())) { - - TableOnJoinSelect joinQuery = (TableOnJoinSelect) query; - renamedCols = new ArrayList<>(); - - for (Schema.Column column : columns) { - renamedCols.add(new Schema.Column( - joinQuery.getAlias() + "." + column.getName(), - column.getAlias(), - Schema.Type.valueOf(column.getType().toUpperCase()), - true - )); - } - } else { - renamedCols = columns; - } - return renamedCols; - } - - private boolean isSelectAll() { - return selectAll; - } - - /** - * Is a simple (non-join/non-group-by) query with SELECT * explicitly - */ - private boolean isSimpleQuerySelectAll(Query query) { - return (query instanceof Select) && ((Select) query).isSelectAll(); - } - - /** - * Is a join query with SELECT * on either one of the tables some fields specified - */ - private boolean isJoinQuerySelectAll(Query query, String[] fieldNames) { - return fieldNames.length == 0 && !fieldsSelectedOnAnotherTable(query); - } - - /** - * In the case of a JOIN query, if no fields are SELECTed on for a particular table, the other table's fields are - * checked in SELECT to ensure a table is not incorrectly marked as a isSelectAll() case. - */ - private boolean fieldsSelectedOnAnotherTable(Query query) { - if (isJoinQuery()) { - TableOnJoinSelect otherTable = getOtherTable(query); - return otherTable.getSelectedFields().size() > 0; - } - - return false; - } - - private TableOnJoinSelect getOtherTable(Query currJoinSelect) { - JoinSelect joinQuery = (JoinSelect) query; - if (joinQuery.getFirstTable() == currJoinSelect) { - return joinQuery.getSecondTable(); - } else { - return joinQuery.getFirstTable(); - } - } - - private boolean containsWildcard(Query query) { - for (Field field : fetchFields(query)) { - if (!(field instanceof MethodField) && field.getName().contains("*")) { - return true; - } - } - - return false; - } - - private String fetchIndexName(Query query) { - return query.getFrom().get(0).getIndex(); - } - - /** - * queryResult is checked to see if it's of type Aggregation in which case the aggregation fields in GROUP BY - * are returned as well. This prevents returning a Schema of all fields when SELECT * is called with - * GROUP BY (since all fields will be retrieved from the typeMappings request when no fields are returned from - * fetchFields()). - *

- * After getting all of the fields from GROUP BY, the fields from SELECT are iterated and only the fields of type - * MethodField are added (to prevent duplicate field in Schema for queries like - * "SELECT age, COUNT(*) FROM bank GROUP BY age" where 'age' is mentioned in both SELECT and GROUP BY). + private static final Logger LOG = LogManager.getLogger(SelectResultSet.class); + + public static final String SCORE = "_score"; + private final String formatType; + + private Query query; + private Object queryResult; + + private boolean selectAll; + private String indexName; + private List columns = new ArrayList<>(); + private ColumnTypeProvider outputColumnType; + + private List head; + private long size; + private long totalHits; + private long internalTotalHits; + private List rows; + private Cursor cursor; + + private DateFieldFormatter dateFieldFormatter; + // alias -> base field name + private Map fieldAliasMap = new HashMap<>(); + + public SelectResultSet( + Client client, + Query query, + Object queryResult, + ColumnTypeProvider outputColumnType, + String formatType, + Cursor cursor) { + this.client = client; + this.query = query; + this.queryResult = queryResult; + this.selectAll = false; + this.formatType = formatType; + this.outputColumnType = outputColumnType; + this.cursor = cursor; + + if (isJoinQuery()) { + JoinSelect joinQuery = (JoinSelect) query; + loadFromEsState(joinQuery.getFirstTable()); + loadFromEsState(joinQuery.getSecondTable()); + } else { + loadFromEsState(query); + } + this.schema = new Schema(indexName, columns); + this.head = schema.getHeaders(); + this.dateFieldFormatter = new DateFieldFormatter(indexName, columns, fieldAliasMap); + + extractData(); + populateCursor(); + this.dataRows = new DataRows(size, totalHits, rows); + } + + public SelectResultSet(Client client, Object queryResult, String formatType, Cursor cursor) { + this.cursor = cursor; + this.client = client; + this.queryResult = queryResult; + this.selectAll = false; + this.formatType = formatType; + populateResultSetFromCursor(cursor); + } + + public String indexName() { + return this.indexName; + } + + public Map fieldAliasMap() { + return unmodifiableMap(this.fieldAliasMap); + } + + public void populateResultSetFromCursor(Cursor cursor) { + switch (cursor.getType()) { + case DEFAULT: + populateResultSetFromDefaultCursor((DefaultCursor) cursor); + default: + return; + } + } + + private void populateResultSetFromDefaultCursor(DefaultCursor cursor) { + this.columns = cursor.getColumns(); + this.schema = new Schema(columns); + this.head = schema.getHeaders(); + this.dateFieldFormatter = + new DateFieldFormatter(cursor.getIndexPattern(), columns, cursor.getFieldAliasMap()); + extractData(); + this.dataRows = new DataRows(size, totalHits, rows); + } + + // *********************************************************** + // Logic for loading Columns to be stored in Schema + // *********************************************************** + + /** + * Makes a request to local node to receive meta data information and maps each field specified in + * SELECT to its type in the index mapping + */ + private void loadFromEsState(Query query) { + String indexName = fetchIndexName(query); + String[] fieldNames = fetchFieldsAsArray(query); + + // Reset boolean in the case of JOIN query where multiple calls to loadFromEsState() are made + selectAll = isSimpleQuerySelectAll(query) || isJoinQuerySelectAll(query, fieldNames); + + GetFieldMappingsRequest request = + new GetFieldMappingsRequest() + .indices(indexName) + .fields(selectAllFieldsIfEmpty(fieldNames)) + .local(true); + GetFieldMappingsResponse response = + client.admin().indices().getFieldMappings(request).actionGet(); + + Map> mappings = response.mappings(); + if (mappings.isEmpty() || !mappings.containsKey(indexName)) { + throw new IllegalArgumentException( + String.format("Index type %s does not exist", query.getFrom())); + } + Map typeMappings = mappings.get(indexName); + + this.indexName = this.indexName == null ? indexName : (this.indexName + "|" + indexName); + this.columns.addAll( + renameColumnWithTableAlias(query, populateColumns(query, fieldNames, typeMappings))); + } + + /** Rename column name with table alias as prefix for join query */ + private List renameColumnWithTableAlias(Query query, List columns) { + List renamedCols; + if ((query instanceof TableOnJoinSelect) + && !Strings.isNullOrEmpty(((TableOnJoinSelect) query).getAlias())) { + + TableOnJoinSelect joinQuery = (TableOnJoinSelect) query; + renamedCols = new ArrayList<>(); + + for (Schema.Column column : columns) { + renamedCols.add( + new Schema.Column( + joinQuery.getAlias() + "." + column.getName(), + column.getAlias(), + Schema.Type.valueOf(column.getType().toUpperCase()), + true)); + } + } else { + renamedCols = columns; + } + return renamedCols; + } + + private boolean isSelectAll() { + return selectAll; + } + + /** Is a simple (non-join/non-group-by) query with SELECT * explicitly */ + private boolean isSimpleQuerySelectAll(Query query) { + return (query instanceof Select) && ((Select) query).isSelectAll(); + } + + /** Is a join query with SELECT * on either one of the tables some fields specified */ + private boolean isJoinQuerySelectAll(Query query, String[] fieldNames) { + return fieldNames.length == 0 && !fieldsSelectedOnAnotherTable(query); + } + + /** + * In the case of a JOIN query, if no fields are SELECTed on for a particular table, the other + * table's fields are checked in SELECT to ensure a table is not incorrectly marked as a + * isSelectAll() case. + */ + private boolean fieldsSelectedOnAnotherTable(Query query) { + if (isJoinQuery()) { + TableOnJoinSelect otherTable = getOtherTable(query); + return otherTable.getSelectedFields().size() > 0; + } + + return false; + } + + private TableOnJoinSelect getOtherTable(Query currJoinSelect) { + JoinSelect joinQuery = (JoinSelect) query; + if (joinQuery.getFirstTable() == currJoinSelect) { + return joinQuery.getSecondTable(); + } else { + return joinQuery.getFirstTable(); + } + } + + private boolean containsWildcard(Query query) { + for (Field field : fetchFields(query)) { + if (!(field instanceof MethodField) && field.getName().contains("*")) { + return true; + } + } + + return false; + } + + private String fetchIndexName(Query query) { + return query.getFrom().get(0).getIndex(); + } + + /** + * queryResult is checked to see if it's of type Aggregation in which case the aggregation fields + * in GROUP BY are returned as well. This prevents returning a Schema of all fields when SELECT * + * is called with GROUP BY (since all fields will be retrieved from the typeMappings request when + * no fields are returned from fetchFields()). + * + *

After getting all of the fields from GROUP BY, the fields from SELECT are iterated and only + * the fields of type MethodField are added (to prevent duplicate field in Schema for queries like + * "SELECT age, COUNT(*) FROM bank GROUP BY age" where 'age' is mentioned in both SELECT and GROUP + * BY). + */ + private List fetchFields(Query query) { + Select select = (Select) query; + + if (queryResult instanceof Aggregations) { + List groupByFields = + select.getGroupBys().isEmpty() ? new ArrayList<>() : select.getGroupBys().get(0); + + for (Field selectField : select.getFields()) { + if (selectField instanceof MethodField && !selectField.isScriptField()) { + groupByFields.add(selectField); + } else if (selectField.isScriptField() + && selectField.getAlias().equals(groupByFields.get(0).getName())) { + return select.getFields(); + } + } + return groupByFields; + } + + if (query instanceof TableOnJoinSelect) { + return ((TableOnJoinSelect) query).getSelectedFields(); + } + + return select.getFields(); + } + + private String[] fetchFieldsAsArray(Query query) { + List fields = fetchFields(query); + return fields.stream().map(this::getFieldName).toArray(String[]::new); + } + + private String getFieldName(Field field) { + if (field instanceof MethodField) { + return field.getAlias(); + } + + return field.getName(); + } + + private Map fetchFieldMap(Query query) { + Map fieldMap = new HashMap<>(); + + for (Field field : fetchFields(query)) { + fieldMap.put(getFieldName(field), field); + } + + return fieldMap; + } + + private String[] selectAllFieldsIfEmpty(String[] fields) { + if (isSelectAll()) { + return new String[] {"*"}; + } + + return fields; + } + + private String[] emptyArrayIfNull(String typeName) { + if (typeName != null) { + return new String[] {typeName}; + } else { + return Strings.EMPTY_ARRAY; + } + } + + private Schema.Type fetchMethodReturnType(int fieldIndex, MethodField field) { + switch (field.getName().toLowerCase()) { + case "count": + return Schema.Type.LONG; + case "sum": + case "avg": + case "min": + case "max": + case "percentiles": + return Schema.Type.DOUBLE; + case "script": + { + // TODO: return type information is disconnected from the function definitions in + // SQLFunctions. + // Refactor SQLFunctions to have functions self-explanatory (types, scripts) and pluggable + // (similar to Strategy pattern) + if (field.getExpression() instanceof SQLCaseExpr) { + return Schema.Type.TEXT; + } + Schema.Type resolvedType = outputColumnType.get(fieldIndex); + return SQLFunctions.getScriptFunctionReturnType(field, resolvedType); + } + default: + throw new UnsupportedOperationException( + String.format("The following method is not supported in Schema: %s", field.getName())); + } + } + + /** + * Returns a list of Column objects which contain names identifying the field as well as its type. + * + *

If all fields are being selected (SELECT *) then the order of fields returned will be + * random, otherwise the output will be in the same order as how they were selected. + * + *

If an alias was given for a field, that will be used to identify the field in Column, + * otherwise the field name will be used. + */ + private List populateColumns( + Query query, String[] fieldNames, Map typeMappings) { + List fieldNameList; + + if (isSelectAll() || containsWildcard(query)) { + fieldNameList = new ArrayList<>(typeMappings.keySet()); + } else { + fieldNameList = Arrays.asList(fieldNames); + } + + /* + * The reason the 'fieldMap' mapping is needed on top of 'fieldNameList' is because the map would be + * empty in cases like 'SELECT *' but List fieldNameList will always be set in either case. + * That way, 'fieldNameList' is used to access field names in order that they were selected, if given, + * and then 'fieldMap' is used to access the respective Field object to check for aliases. */ - private List fetchFields(Query query) { - Select select = (Select) query; - - if (queryResult instanceof Aggregations) { - List groupByFields = select.getGroupBys().isEmpty() ? new ArrayList<>() : - select.getGroupBys().get(0); - - - for (Field selectField : select.getFields()) { - if (selectField instanceof MethodField && !selectField.isScriptField()) { - groupByFields.add(selectField); - } else if (selectField.isScriptField() - && selectField.getAlias().equals(groupByFields.get(0).getName())) { - return select.getFields(); - } - } - return groupByFields; - } - - if (query instanceof TableOnJoinSelect) { - return ((TableOnJoinSelect) query).getSelectedFields(); - } - - return select.getFields(); - } - - private String[] fetchFieldsAsArray(Query query) { - List fields = fetchFields(query); - return fields.stream() - .map(this::getFieldName) - .toArray(String[]::new); - } - - private String getFieldName(Field field) { - if (field instanceof MethodField) { - return field.getAlias(); - } - - return field.getName(); - } - - private Map fetchFieldMap(Query query) { - Map fieldMap = new HashMap<>(); - - for (Field field : fetchFields(query)) { - fieldMap.put(getFieldName(field), field); - } - - return fieldMap; - } - - private String[] selectAllFieldsIfEmpty(String[] fields) { - if (isSelectAll()) { - return new String[]{"*"}; - } - - return fields; - } - - private String[] emptyArrayIfNull(String typeName) { - if (typeName != null) { - return new String[]{typeName}; - } else { - return Strings.EMPTY_ARRAY; - } - } - - private Schema.Type fetchMethodReturnType(int fieldIndex, MethodField field) { - switch (field.getName().toLowerCase()) { - case "count": - return Schema.Type.LONG; - case "sum": - case "avg": - case "min": - case "max": - case "percentiles": - return Schema.Type.DOUBLE; - case "script": { - // TODO: return type information is disconnected from the function definitions in SQLFunctions. - // Refactor SQLFunctions to have functions self-explanatory (types, scripts) and pluggable - // (similar to Strategy pattern) - if (field.getExpression() instanceof SQLCaseExpr) { - return Schema.Type.TEXT; - } - Schema.Type resolvedType = outputColumnType.get(fieldIndex); - return SQLFunctions.getScriptFunctionReturnType(field, resolvedType); - } - default: - throw new UnsupportedOperationException( - String.format("The following method is not supported in Schema: %s", field.getName())); - } - } - - /** - * Returns a list of Column objects which contain names identifying the field as well as its type. - *

- * If all fields are being selected (SELECT *) then the order of fields returned will be random, otherwise - * the output will be in the same order as how they were selected. - *

- * If an alias was given for a field, that will be used to identify the field in Column, otherwise the field name - * will be used. - */ - private List populateColumns(Query query, String[] fieldNames, Map typeMappings) { - List fieldNameList; - - if (isSelectAll() || containsWildcard(query)) { - fieldNameList = new ArrayList<>(typeMappings.keySet()); - } else { - fieldNameList = Arrays.asList(fieldNames); + Map fieldMap = fetchFieldMap(query); + List columns = new ArrayList<>(); + for (String fieldName : fieldNameList) { + // _score is a special case since it is not included in typeMappings, so it is checked for + // here + if (fieldName.equals(SCORE)) { + columns.add( + new Schema.Column(fieldName, fetchAlias(fieldName, fieldMap), Schema.Type.FLOAT)); + continue; + } + /* + * Methods are also a special case as their type cannot be determined from typeMappings, so it is checked + * for here. + * + * Note: When adding the Column for Method, alias is used in place of getName() because the default name + * is set as alias (ex. COUNT(*)) and overwritten if an alias is given. So alias is used as the + * name instead. + */ + if (fieldMap.get(fieldName) instanceof MethodField) { + MethodField methodField = (MethodField) fieldMap.get(fieldName); + int fieldIndex = fieldNameList.indexOf(fieldName); + + SQLExpr expr = methodField.getExpression(); + if (expr instanceof SQLCastExpr) { + // Since CAST expressions create an alias for a field, we need to save the original field + // name + // for this alias for formatting data later. + SQLIdentifierExpr castFieldIdentifier = + (SQLIdentifierExpr) ((SQLCastExpr) expr).getExpr(); + fieldAliasMap.put(methodField.getAlias(), castFieldIdentifier.getName()); + } + + columns.add( + new Schema.Column( + methodField.getAlias(), null, fetchMethodReturnType(fieldIndex, methodField))); + continue; + } + + /* + * Unnecessary fields (ex. _index, _parent) are ignored. + * Fields like field.keyword will be ignored when isSelectAll is true but will be returned if + * explicitly selected. + */ + FieldMapping field = new FieldMapping(fieldName, typeMappings, fieldMap); + if (!field.isMetaField()) { + + if (field.isMultiField() && !field.isSpecified()) { + continue; + } + if (field.isPropertyField() && !field.isSpecified() && !field.isWildcardSpecified()) { + continue; } /* - * The reason the 'fieldMap' mapping is needed on top of 'fieldNameList' is because the map would be - * empty in cases like 'SELECT *' but List fieldNameList will always be set in either case. - * That way, 'fieldNameList' is used to access field names in order that they were selected, if given, - * and then 'fieldMap' is used to access the respective Field object to check for aliases. + * Three cases regarding Type: + * 1. If Type exists, create Column + * 2. If Type doesn't exist and isSelectAll() is false, throw exception + * 3. If Type doesn't exist and isSelectAll() is true, Column creation for fieldName is skipped */ - Map fieldMap = fetchFieldMap(query); - List columns = new ArrayList<>(); - for (String fieldName : fieldNameList) { - // _score is a special case since it is not included in typeMappings, so it is checked for here - if (fieldName.equals(SCORE)) { - columns.add(new Schema.Column(fieldName, fetchAlias(fieldName, fieldMap), Schema.Type.FLOAT)); - continue; - } - /* - * Methods are also a special case as their type cannot be determined from typeMappings, so it is checked - * for here. - * - * Note: When adding the Column for Method, alias is used in place of getName() because the default name - * is set as alias (ex. COUNT(*)) and overwritten if an alias is given. So alias is used as the - * name instead. - */ - if (fieldMap.get(fieldName) instanceof MethodField) { - MethodField methodField = (MethodField) fieldMap.get(fieldName); - int fieldIndex = fieldNameList.indexOf(fieldName); - - SQLExpr expr = methodField.getExpression(); - if (expr instanceof SQLCastExpr) { - // Since CAST expressions create an alias for a field, we need to save the original field name - // for this alias for formatting data later. - SQLIdentifierExpr castFieldIdentifier = (SQLIdentifierExpr) ((SQLCastExpr) expr).getExpr(); - fieldAliasMap.put(methodField.getAlias(), castFieldIdentifier.getName()); - } - - columns.add( - new Schema.Column( - methodField.getAlias(), - null, - fetchMethodReturnType(fieldIndex, methodField) - ) - ); - continue; - } - - /* - * Unnecessary fields (ex. _index, _parent) are ignored. - * Fields like field.keyword will be ignored when isSelectAll is true but will be returned if - * explicitly selected. - */ - FieldMapping field = new FieldMapping(fieldName, typeMappings, fieldMap); - if (!field.isMetaField()) { - - if (field.isMultiField() && !field.isSpecified()) { - continue; - } - if (field.isPropertyField() && !field.isSpecified() && !field.isWildcardSpecified()) { - continue; - } - - /* - * Three cases regarding Type: - * 1. If Type exists, create Column - * 2. If Type doesn't exist and isSelectAll() is false, throw exception - * 3. If Type doesn't exist and isSelectAll() is true, Column creation for fieldName is skipped - */ - String type = field.type().toUpperCase(); - if (Schema.hasType(type)) { - - // If the current field is a group key, we should use alias as the identifier - boolean isGroupKey = false; - Select select = (Select) query; - if (null != select.getGroupBys() - && !select.getGroupBys().isEmpty() - && select.getGroupBys().get(0).contains(fieldMap.get(fieldName))) { - isGroupKey = true; - } - - columns.add( - new Schema.Column( - fieldName, - fetchAlias(fieldName, fieldMap), - Schema.Type.valueOf(type), - isGroupKey - ) - ); - } else if (!isSelectAll()) { - throw new IllegalArgumentException( - String.format("%s fieldName types are currently not supported.", type)); - } - } - } - - if (isSelectAllOnly(query)) { - populateAllNestedFields(columns, fieldNameList); - } - return columns; - } - - /** - * SELECT * only without other columns or wildcard pattern specified. - */ - private boolean isSelectAllOnly(Query query) { - return isSelectAll() && fetchFields(query).isEmpty(); - } - - /** - * Special case which trades off consistency of SELECT * meaning for more intuition from customer perspective. - * In other cases, * means all regular fields on the level. - * The only exception here is * picks all non-regular (nested) fields as JSON without flatten. - */ - private void populateAllNestedFields(List columns, List fields) { - Set nestedFieldPaths = fields.stream(). - map(FieldMapping::new). - filter(FieldMapping::isPropertyField). - filter(f -> !f.isMultiField()). - map(FieldMapping::path). - collect(toSet()); - - for (String nestedFieldPath : nestedFieldPaths) { - columns.add( - new Schema.Column(nestedFieldPath, "", Schema.Type.TEXT) - ); - } - } - - /** - * Since this helper method is called within a check to see if the field exists in type mapping, it's - * already confirmed that the fieldName is valid. The check for fieldName in fieldMap has to be done in the case - * that 'SELECT *' was called since the map will be empty. - */ - private String fetchAlias(String fieldName, Map fieldMap) { - if (fieldMap.containsKey(fieldName)) { - return fieldMap.get(fieldName).getAlias(); - } - - return null; - } - - //*********************************************************** - // Logic for loading Rows to be stored in DataRows - //*********************************************************** - + String type = field.type().toUpperCase(); + if (Schema.hasType(type)) { + + // If the current field is a group key, we should use alias as the identifier + boolean isGroupKey = false; + Select select = (Select) query; + if (null != select.getGroupBys() + && !select.getGroupBys().isEmpty() + && select.getGroupBys().get(0).contains(fieldMap.get(fieldName))) { + isGroupKey = true; + } + + columns.add( + new Schema.Column( + fieldName, + fetchAlias(fieldName, fieldMap), + Schema.Type.valueOf(type), + isGroupKey)); + } else if (!isSelectAll()) { + throw new IllegalArgumentException( + String.format("%s fieldName types are currently not supported.", type)); + } + } + } + + if (isSelectAllOnly(query)) { + populateAllNestedFields(columns, fieldNameList); + } + return columns; + } + + /** SELECT * only without other columns or wildcard pattern specified. */ + private boolean isSelectAllOnly(Query query) { + return isSelectAll() && fetchFields(query).isEmpty(); + } + + /** + * Special case which trades off consistency of SELECT * meaning for more intuition from customer + * perspective. In other cases, * means all regular fields on the level. The only exception here + * is * picks all non-regular (nested) fields as JSON without flatten. + */ + private void populateAllNestedFields(List columns, List fields) { + Set nestedFieldPaths = + fields.stream() + .map(FieldMapping::new) + .filter(FieldMapping::isPropertyField) + .filter(f -> !f.isMultiField()) + .map(FieldMapping::path) + .collect(toSet()); + + for (String nestedFieldPath : nestedFieldPaths) { + columns.add(new Schema.Column(nestedFieldPath, "", Schema.Type.TEXT)); + } + } + + /** + * Since this helper method is called within a check to see if the field exists in type mapping, + * it's already confirmed that the fieldName is valid. The check for fieldName in fieldMap has to + * be done in the case that 'SELECT *' was called since the map will be empty. + */ + private String fetchAlias(String fieldName, Map fieldMap) { + if (fieldMap.containsKey(fieldName)) { + return fieldMap.get(fieldName).getAlias(); + } + + return null; + } + + // *********************************************************** + // Logic for loading Rows to be stored in DataRows + // *********************************************************** + + /** + * Extract data from query results into Row objects Need to cover two cases: 1. queryResult is a + * SearchHits object 2. queryResult is an Aggregations object + * + *

Ignoring queryResult being ActionResponse (from executeDeleteAction), there should be no + * data in this case + */ + private void extractData() { + if (queryResult instanceof SearchHits) { + SearchHits searchHits = (SearchHits) queryResult; + + this.rows = populateRows(searchHits); + this.size = rows.size(); + this.internalTotalHits = + Optional.ofNullable(searchHits.getTotalHits()).map(th -> th.value).orElse(0L); + // size may be greater than totalHits after nested rows be flatten + this.totalHits = Math.max(size, internalTotalHits); + } else if (queryResult instanceof Aggregations) { + Aggregations aggregations = (Aggregations) queryResult; + + this.rows = populateRows(aggregations); + this.size = rows.size(); + this.internalTotalHits = size; + // Total hits is not available from Aggregations so 'size' is used + this.totalHits = size; + } + } + + private void populateCursor() { + switch (cursor.getType()) { + case DEFAULT: + populateDefaultCursor((DefaultCursor) cursor); + default: + return; + } + } + + private void populateDefaultCursor(DefaultCursor cursor) { /** - * Extract data from query results into Row objects - * Need to cover two cases: - * 1. queryResult is a SearchHits object - * 2. queryResult is an Aggregations object - *

- * Ignoring queryResult being ActionResponse (from executeDeleteAction), there should be no data in this case + * Assumption: scrollId, fetchSize, limit already being set in + * + * @see PrettyFormatRestExecutor.buildProtocolForDefaultQuery() */ - private void extractData() { - if (queryResult instanceof SearchHits) { - SearchHits searchHits = (SearchHits) queryResult; - - this.rows = populateRows(searchHits); - this.size = rows.size(); - this.internalTotalHits = Optional.ofNullable(searchHits.getTotalHits()).map(th -> th.value).orElse(0L); - // size may be greater than totalHits after nested rows be flatten - this.totalHits = Math.max(size, internalTotalHits); - } else if (queryResult instanceof Aggregations) { - Aggregations aggregations = (Aggregations) queryResult; - - this.rows = populateRows(aggregations); - this.size = rows.size(); - this.internalTotalHits = size; - // Total hits is not available from Aggregations so 'size' is used - this.totalHits = size; - } - } - - private void populateCursor() { - switch(cursor.getType()) { - case DEFAULT: - populateDefaultCursor((DefaultCursor) cursor); - default: - return; - } - } - - private void populateDefaultCursor(DefaultCursor cursor) { - /** - * Assumption: scrollId, fetchSize, limit already being set in - * @see PrettyFormatRestExecutor.buildProtocolForDefaultQuery() - */ - - Integer limit = cursor.getLimit(); - long rowsLeft = rowsLeft(cursor.getFetchSize(), cursor.getLimit()); - if (rowsLeft <= 0) { - // close the cursor - String scrollId = cursor.getScrollId(); - ClearScrollResponse clearScrollResponse = client.prepareClearScroll().addScrollId(scrollId).get(); - if (!clearScrollResponse.isSucceeded()) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); - LOG.error("Error closing the cursor context {} ", scrollId); - } - return; - } - - cursor.setRowsLeft(rowsLeft); - cursor.setIndexPattern(indexName); - cursor.setFieldAliasMap(fieldAliasMap()); - cursor.setColumns(columns); - this.totalHits = limit != null && limit < internalTotalHits ? limit : internalTotalHits; - } - - private long rowsLeft(Integer fetchSize, Integer limit) { - long rowsLeft = 0; - long totalHits = internalTotalHits; - if (limit != null && limit < totalHits) { - rowsLeft = limit - fetchSize; - } else { - rowsLeft = totalHits - fetchSize; - } - return rowsLeft; - } - - private List populateRows(SearchHits searchHits) { - List rows = new ArrayList<>(); - Set newKeys = new HashSet<>(head); - for (SearchHit hit : searchHits) { - Map rowSource = hit.getSourceAsMap(); - List result; - - if (!isJoinQuery()) { - // Row already flatten in source in join. And join doesn't support nested fields for now. - rowSource = flatRow(head, rowSource); - rowSource.put(SCORE, hit.getScore()); - - for (Map.Entry field : hit.getFields().entrySet()) { - rowSource.put(field.getKey(), field.getValue().getValue()); - } - if (formatType.equalsIgnoreCase(Format.JDBC.getFormatName())) { - dateFieldFormatter.applyJDBCDateFormat(rowSource); - } - result = flatNestedField(newKeys, rowSource, hit.getInnerHits()); - } else { - if (formatType.equalsIgnoreCase(Format.JDBC.getFormatName())) { - dateFieldFormatter.applyJDBCDateFormat(rowSource); - } - result = new ArrayList<>(); - result.add(new DataRows.Row(rowSource)); - } - - rows.addAll(result); - } - - return rows; - } - - private List populateRows(Aggregations aggregations) { - List rows = new ArrayList<>(); - List aggs = aggregations.asList(); - if (hasTermAggregations(aggs)) { - Terms terms = (Terms) aggs.get(0); - String field = terms.getName(); - - for (Terms.Bucket bucket : terms.getBuckets()) { - List aggRows = new ArrayList<>(); - getAggsData(bucket, aggRows, addMap(field, bucket.getKey())); - - rows.addAll(aggRows); - } - } else { - // This occurs for cases like "SELECT AVG(age) FROM bank" where we aggregate in SELECT with no GROUP BY - rows.add( - new DataRows.Row( - addNumericAggregation(aggs, new HashMap<>()) - ) - ); + Integer limit = cursor.getLimit(); + long rowsLeft = rowsLeft(cursor.getFetchSize(), cursor.getLimit()); + if (rowsLeft <= 0) { + // close the cursor + String scrollId = cursor.getScrollId(); + ClearScrollResponse clearScrollResponse = + client.prepareClearScroll().addScrollId(scrollId).get(); + if (!clearScrollResponse.isSucceeded()) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); + LOG.error("Error closing the cursor context {} ", scrollId); + } + return; + } + + cursor.setRowsLeft(rowsLeft); + cursor.setIndexPattern(indexName); + cursor.setFieldAliasMap(fieldAliasMap()); + cursor.setColumns(columns); + this.totalHits = limit != null && limit < internalTotalHits ? limit : internalTotalHits; + } + + private long rowsLeft(Integer fetchSize, Integer limit) { + long rowsLeft = 0; + long totalHits = internalTotalHits; + if (limit != null && limit < totalHits) { + rowsLeft = limit - fetchSize; + } else { + rowsLeft = totalHits - fetchSize; + } + return rowsLeft; + } + + private List populateRows(SearchHits searchHits) { + List rows = new ArrayList<>(); + Set newKeys = new HashSet<>(head); + for (SearchHit hit : searchHits) { + Map rowSource = hit.getSourceAsMap(); + List result; + + if (!isJoinQuery()) { + // Row already flatten in source in join. And join doesn't support nested fields for now. + rowSource = flatRow(head, rowSource); + rowSource.put(SCORE, hit.getScore()); + + for (Map.Entry field : hit.getFields().entrySet()) { + rowSource.put(field.getKey(), field.getValue().getValue()); + } + if (formatType.equalsIgnoreCase(Format.JDBC.getFormatName())) { + dateFieldFormatter.applyJDBCDateFormat(rowSource); + } + result = flatNestedField(newKeys, rowSource, hit.getInnerHits()); + } else { + if (formatType.equalsIgnoreCase(Format.JDBC.getFormatName())) { + dateFieldFormatter.applyJDBCDateFormat(rowSource); + } + result = new ArrayList<>(); + result.add(new DataRows.Row(rowSource)); + } + + rows.addAll(result); + } + + return rows; + } + + private List populateRows(Aggregations aggregations) { + List rows = new ArrayList<>(); + List aggs = aggregations.asList(); + if (hasTermAggregations(aggs)) { + Terms terms = (Terms) aggs.get(0); + String field = terms.getName(); + + for (Terms.Bucket bucket : terms.getBuckets()) { + List aggRows = new ArrayList<>(); + getAggsData(bucket, aggRows, addMap(field, bucket.getKey())); + + rows.addAll(aggRows); + } + } else { + // This occurs for cases like "SELECT AVG(age) FROM bank" where we aggregate in SELECT with no + // GROUP BY + rows.add(new DataRows.Row(addNumericAggregation(aggs, new HashMap<>()))); + } + return rows; + } + + /** + * This recursive method goes through the buckets iterated through populateRows() and flattens any + * inner aggregations and puts that data as a Map into a Row (this nested aggregation happens when + * we GROUP BY multiple fields) + */ + private void getAggsData( + Terms.Bucket bucket, List aggRows, Map data) { + List aggs = bucket.getAggregations().asList(); + if (hasTermAggregations(aggs)) { + Terms terms = (Terms) aggs.get(0); + String field = terms.getName(); + + for (Terms.Bucket innerBucket : terms.getBuckets()) { + data.put(field, innerBucket.getKey()); + getAggsData(innerBucket, aggRows, data); + data.remove(field); + } + } else { + data = addNumericAggregation(aggs, data); + aggRows.add(new DataRows.Row(new HashMap<>(data))); + } + } + + /** + * hasTermAggregations() checks for specific type of aggregation, one that contains Terms. This is + * the case when the aggregations contains the contents of a GROUP BY field. + * + *

If the aggregation contains the data for an aggregation function (ex. COUNT(*)), the items + * in the list will be of instance InternalValueCount, InternalSum, etc. (depending on the + * aggregation function) and will be considered a base case of getAggsData() which will add that + * data to the Row (if it exists). + */ + private boolean hasTermAggregations(List aggs) { + return !aggs.isEmpty() && aggs.get(0) instanceof Terms; + } + + /** + * Adds the contents of Aggregation (specifically the NumericMetricsAggregation.SingleValue + * instance) from bucket.aggregations into the data map + */ + private Map addNumericAggregation( + List aggs, Map data) { + for (Aggregation aggregation : aggs) { + if (aggregation instanceof NumericMetricsAggregation.SingleValue) { + NumericMetricsAggregation.SingleValue singleValueAggregation = + (NumericMetricsAggregation.SingleValue) aggregation; + data.put( + singleValueAggregation.getName(), + !Double.isInfinite(singleValueAggregation.value()) + ? singleValueAggregation.getValueAsString() + : "null"); + } else if (aggregation instanceof Percentiles) { + Percentiles percentiles = (Percentiles) aggregation; + + data.put( + percentiles.getName(), + StreamSupport.stream(percentiles.spliterator(), false) + .collect( + Collectors.toMap( + Percentile::getPercent, + Percentile::getValue, + (v1, v2) -> { + throw new IllegalArgumentException( + String.format("Duplicate key for values %s and %s", v1, v2)); + }, + TreeMap::new))); + } else { + throw new SqlFeatureNotImplementedException( + "Aggregation type " + aggregation.getType() + " is not yet implemented"); + } + } + + return data; + } + + /** + * Simplifies the structure of row's source Map by flattening it, making the full path of an + * object the key and the Object it refers to the value. This handles the case of regular object + * since nested objects will not be in hit.source but rather in hit.innerHits + * + *

Sample input: keys = ['comments.likes'] row = comments: { likes: 2 } + * + *

Return: flattenedRow = {comment.likes: 2} + */ + @SuppressWarnings("unchecked") + private Map flatRow(List keys, Map row) { + Map flattenedRow = new HashMap<>(); + for (String key : keys) { + String[] splitKeys = key.split("\\."); + boolean found = true; + Object currentObj = row; + + for (String splitKey : splitKeys) { + // This check is made to prevent Cast Exception as an ArrayList of objects can be in the + // sourceMap + if (!(currentObj instanceof Map)) { + found = false; + break; + } + + Map currentMap = (Map) currentObj; + if (!currentMap.containsKey(splitKey)) { + found = false; + break; + } + + currentObj = currentMap.get(splitKey); + } + + if (found) { + flattenedRow.put(key, currentObj); + } + } + + return flattenedRow; + } + + /** + * If innerHits associated with column name exists, flatten both the inner field name and the + * inner rows in it. + * + *

Sample input: newKeys = {'region', 'employees.age'}, row = {'region': 'US'} innerHits = + * employees: { hits: [{ source: { age: 26, firstname: 'Hank' } },{ source: { age: 30, firstname: + * 'John' } }] } + */ + private List flatNestedField( + Set newKeys, Map row, Map innerHits) { + List result = new ArrayList<>(); + result.add(new DataRows.Row(row)); + + if (innerHits == null) { + return result; + } + + for (String colName : innerHits.keySet()) { + SearchHit[] colValue = innerHits.get(colName).getHits(); + doFlatNestedFieldName(colName, colValue, newKeys); + result = doFlatNestedFieldValue(colName, colValue, result); + } + + return result; + } + + private void doFlatNestedFieldName(String colName, SearchHit[] colValue, Set keys) { + Map innerRow = colValue[0].getSourceAsMap(); + for (String field : innerRow.keySet()) { + String innerName = colName + "." + field; + keys.add(innerName); + } + + keys.remove(colName); + } + + /** + * Do Cartesian Product between current outer row and inner rows by nested loop and remove + * original outer row. + * + *

Sample input: colName = 'employees', rows = [{region: 'US'}] colValue= [{ source: { age: 26, + * firstname: 'Hank' } },{ source: { age: 30, firstname: 'John' } }] + * + *

Return: [ {region:'US', employees.age:26, employees.firstname:'Hank'}, {region:'US', + * employees.age:30, employees.firstname:'John'} ] + */ + private List doFlatNestedFieldValue( + String colName, SearchHit[] colValue, List rows) { + List result = new ArrayList<>(); + for (DataRows.Row row : rows) { + for (SearchHit hit : colValue) { + Map innerRow = hit.getSourceAsMap(); + Map copy = new HashMap<>(); + + for (String field : row.getContents().keySet()) { + copy.put(field, row.getData(field)); } - return rows; - } - - /** - * This recursive method goes through the buckets iterated through populateRows() and flattens any inner - * aggregations and puts that data as a Map into a Row (this nested aggregation happens when we GROUP BY - * multiple fields) - */ - private void getAggsData(Terms.Bucket bucket, List aggRows, Map data) { - List aggs = bucket.getAggregations().asList(); - if (hasTermAggregations(aggs)) { - Terms terms = (Terms) aggs.get(0); - String field = terms.getName(); - - for (Terms.Bucket innerBucket : terms.getBuckets()) { - data.put(field, innerBucket.getKey()); - getAggsData(innerBucket, aggRows, data); - data.remove(field); - } - } else { - data = addNumericAggregation(aggs, data); - aggRows.add(new DataRows.Row(new HashMap<>(data))); - } - } - - /** - * hasTermAggregations() checks for specific type of aggregation, one that contains Terms. This is the case when the - * aggregations contains the contents of a GROUP BY field. - *

- * If the aggregation contains the data for an aggregation function (ex. COUNT(*)), the items in the list will - * be of instance InternalValueCount, InternalSum, etc. (depending on the aggregation function) and will be - * considered a base case of getAggsData() which will add that data to the Row (if it exists). - */ - private boolean hasTermAggregations(List aggs) { - return !aggs.isEmpty() && aggs.get(0) instanceof Terms; - } - - /** - * Adds the contents of Aggregation (specifically the NumericMetricsAggregation.SingleValue instance) from - * bucket.aggregations into the data map - */ - private Map addNumericAggregation(List aggs, Map data) { - for (Aggregation aggregation : aggs) { - if (aggregation instanceof NumericMetricsAggregation.SingleValue) { - NumericMetricsAggregation.SingleValue singleValueAggregation = - (NumericMetricsAggregation.SingleValue) aggregation; - data.put(singleValueAggregation.getName(), !Double.isInfinite(singleValueAggregation.value()) - ? singleValueAggregation.getValueAsString() : "null"); - } else if (aggregation instanceof Percentiles) { - Percentiles percentiles = (Percentiles) aggregation; - - data.put(percentiles.getName(), StreamSupport - .stream(percentiles.spliterator(), false) - .collect( - Collectors.toMap( - Percentile::getPercent, - Percentile::getValue, - (v1, v2) -> { - throw new IllegalArgumentException( - String.format("Duplicate key for values %s and %s", v1, v2)); - }, - TreeMap::new))); - } else { - throw new SqlFeatureNotImplementedException("Aggregation type " + aggregation.getType() - + " is not yet implemented"); - } - } - - return data; - } - - /** - * Simplifies the structure of row's source Map by flattening it, making the full path of an object the key - * and the Object it refers to the value. This handles the case of regular object since nested objects will not - * be in hit.source but rather in hit.innerHits - *

- * Sample input: - * keys = ['comments.likes'] - * row = comments: { - * likes: 2 - * } - *

- * Return: - * flattenedRow = {comment.likes: 2} - */ - @SuppressWarnings("unchecked") - private Map flatRow(List keys, Map row) { - Map flattenedRow = new HashMap<>(); - for (String key : keys) { - String[] splitKeys = key.split("\\."); - boolean found = true; - Object currentObj = row; - - for (String splitKey : splitKeys) { - // This check is made to prevent Cast Exception as an ArrayList of objects can be in the sourceMap - if (!(currentObj instanceof Map)) { - found = false; - break; - } - - Map currentMap = (Map) currentObj; - if (!currentMap.containsKey(splitKey)) { - found = false; - break; - } - - currentObj = currentMap.get(splitKey); - } - - if (found) { - flattenedRow.put(key, currentObj); - } - } - - return flattenedRow; - } - - /** - * If innerHits associated with column name exists, flatten both the inner field name and the inner rows in it. - *

- * Sample input: - * newKeys = {'region', 'employees.age'}, row = {'region': 'US'} - * innerHits = employees: { - * hits: [{ - * source: { - * age: 26, - * firstname: 'Hank' - * } - * },{ - * source: { - * age: 30, - * firstname: 'John' - * } - * }] - * } - */ - private List flatNestedField(Set newKeys, Map row, - Map innerHits) { - List result = new ArrayList<>(); - result.add(new DataRows.Row(row)); - - if (innerHits == null) { - return result; - } - - for (String colName : innerHits.keySet()) { - SearchHit[] colValue = innerHits.get(colName).getHits(); - doFlatNestedFieldName(colName, colValue, newKeys); - result = doFlatNestedFieldValue(colName, colValue, result); - } - - return result; - } - - private void doFlatNestedFieldName(String colName, SearchHit[] colValue, Set keys) { - Map innerRow = colValue[0].getSourceAsMap(); for (String field : innerRow.keySet()) { - String innerName = colName + "." + field; - keys.add(innerName); + copy.put(colName + "." + field, innerRow.get(field)); } - keys.remove(colName); + copy.remove(colName); + result.add(new DataRows.Row(copy)); + } } - /** - * Do Cartesian Product between current outer row and inner rows by nested loop and remove original outer row. - *

- * Sample input: - * colName = 'employees', rows = [{region: 'US'}] - * colValue= [{ - * source: { - * age: 26, - * firstname: 'Hank' - * } - * },{ - * source: { - * age: 30, - * firstname: 'John' - * } - * }] - *

- * Return: - * [ - * {region:'US', employees.age:26, employees.firstname:'Hank'}, - * {region:'US', employees.age:30, employees.firstname:'John'} - * ] - */ - private List doFlatNestedFieldValue(String colName, SearchHit[] colValue, List rows) { - List result = new ArrayList<>(); - for (DataRows.Row row : rows) { - for (SearchHit hit : colValue) { - Map innerRow = hit.getSourceAsMap(); - Map copy = new HashMap<>(); - - for (String field : row.getContents().keySet()) { - copy.put(field, row.getData(field)); - } - for (String field : innerRow.keySet()) { - copy.put(colName + "." + field, innerRow.get(field)); - } - - copy.remove(colName); - result.add(new DataRows.Row(copy)); - } - } + return result; + } - return result; - } - - private Map addMap(String field, Object term) { - Map data = new HashMap<>(); - data.put(field, term); - return data; - } + private Map addMap(String field, Object term) { + Map data = new HashMap<>(); + data.put(field, term); + return data; + } - private boolean isJoinQuery() { - return query instanceof JoinSelect; - } + private boolean isJoinQuery() { + return query instanceof JoinSelect; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/ShowResultSet.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/ShowResultSet.java index 0a32f6c582..263bf1e7db 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/ShowResultSet.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/format/ShowResultSet.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.format; import java.util.ArrayList; @@ -21,62 +20,62 @@ public class ShowResultSet extends ResultSet { - private static final String TABLE_TYPE = "BASE TABLE"; - - private IndexStatement statement; - private Object queryResult; - - public ShowResultSet(Client client, IndexStatement statement, Object queryResult) { - this.client = client; - this.clusterName = getClusterName(); - this.statement = statement; - this.queryResult = queryResult; - - this.schema = new Schema(statement, loadColumns()); - this.dataRows = new DataRows(loadRows()); + private static final String TABLE_TYPE = "BASE TABLE"; + + private IndexStatement statement; + private Object queryResult; + + public ShowResultSet(Client client, IndexStatement statement, Object queryResult) { + this.client = client; + this.clusterName = getClusterName(); + this.statement = statement; + this.queryResult = queryResult; + + this.schema = new Schema(statement, loadColumns()); + this.dataRows = new DataRows(loadRows()); + } + + private List loadColumns() { + List columns = new ArrayList<>(); + // Unused Columns are still included in Schema to match JDBC/ODBC standard + columns.add(new Column("TABLE_CAT", null, Type.KEYWORD)); + columns.add(new Column("TABLE_SCHEM", null, Type.KEYWORD)); // Not used + columns.add(new Column("TABLE_NAME", null, Type.KEYWORD)); + columns.add(new Column("TABLE_TYPE", null, Type.KEYWORD)); + columns.add(new Column("REMARKS", null, Type.KEYWORD)); // Not used + columns.add(new Column("TYPE_CAT", null, Type.KEYWORD)); // Not used + columns.add(new Column("TYPE_SCHEM", null, Type.KEYWORD)); // Not used + columns.add(new Column("TYPE_NAME", null, Type.KEYWORD)); // Not used + columns.add(new Column("SELF_REFERENCING_COL_NAME", null, Type.KEYWORD)); // Not used + columns.add(new Column("REF_GENERATION", null, Type.KEYWORD)); // Not used + + return columns; + } + + private List loadRows() { + List rows = new ArrayList<>(); + for (String index : extractIndices()) { + rows.add(new Row(loadData(index))); } - private List loadColumns() { - List columns = new ArrayList<>(); - // Unused Columns are still included in Schema to match JDBC/ODBC standard - columns.add(new Column("TABLE_CAT", null, Type.KEYWORD)); - columns.add(new Column("TABLE_SCHEM", null, Type.KEYWORD)); // Not used - columns.add(new Column("TABLE_NAME", null, Type.KEYWORD)); - columns.add(new Column("TABLE_TYPE", null, Type.KEYWORD)); - columns.add(new Column("REMARKS", null, Type.KEYWORD)); // Not used - columns.add(new Column("TYPE_CAT", null, Type.KEYWORD)); // Not used - columns.add(new Column("TYPE_SCHEM", null, Type.KEYWORD)); // Not used - columns.add(new Column("TYPE_NAME", null, Type.KEYWORD)); // Not used - columns.add(new Column("SELF_REFERENCING_COL_NAME", null, Type.KEYWORD)); // Not used - columns.add(new Column("REF_GENERATION", null, Type.KEYWORD)); // Not used + return rows; + } - return columns; - } + private List extractIndices() { + String indexPattern = statement.getIndexPattern(); + String[] indices = ((GetIndexResponse) queryResult).getIndices(); - private List loadRows() { - List rows = new ArrayList<>(); - for (String index : extractIndices()) { - rows.add(new Row(loadData(index))); - } + return Arrays.stream(indices) + .filter(index -> matchesPatternIfRegex(index, indexPattern)) + .collect(Collectors.toList()); + } - return rows; - } + private Map loadData(String tableName) { + Map data = new HashMap<>(); + data.put("TABLE_CAT", clusterName); + data.put("TABLE_NAME", tableName); + data.put("TABLE_TYPE", TABLE_TYPE); - private List extractIndices() { - String indexPattern = statement.getIndexPattern(); - String[] indices = ((GetIndexResponse) queryResult).getIndices(); - - return Arrays.stream(indices) - .filter(index -> matchesPatternIfRegex(index, indexPattern)) - .collect(Collectors.toList()); - } - - private Map loadData(String tableName) { - Map data = new HashMap<>(); - data.put("TABLE_CAT", clusterName); - data.put("TABLE_NAME", tableName); - data.put("TABLE_TYPE", TABLE_TYPE); - - return data; - } + return data; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/ElasticJoinExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/ElasticJoinExecutor.java index f7d1fbf641..f0ffafc470 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/ElasticJoinExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/ElasticJoinExecutor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.join; import java.io.IOException; @@ -22,11 +21,11 @@ import org.opensearch.client.Client; import org.opensearch.common.document.DocumentField; import org.opensearch.common.unit.TimeValue; +import org.opensearch.core.rest.RestStatus; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.mapper.MapperService; import org.opensearch.rest.BytesRestResponse; import org.opensearch.rest.RestChannel; -import org.opensearch.core.rest.RestStatus; import org.opensearch.search.SearchHit; import org.opensearch.search.SearchHits; import org.opensearch.search.sort.FieldSortBuilder; @@ -41,219 +40,239 @@ import org.opensearch.sql.legacy.query.join.TableInJoinRequestBuilder; import org.opensearch.sql.legacy.query.planner.HashJoinQueryPlanRequestBuilder; -/** - * Created by Eliran on 15/9/2015. - */ +/** Created by Eliran on 15/9/2015. */ public abstract class ElasticJoinExecutor implements ElasticHitsExecutor { - private static final Logger LOG = LogManager.getLogger(); - - protected List results; // Keep list to avoid copy to new array in SearchHits - protected MetaSearchResult metaResults; - protected final int MAX_RESULTS_ON_ONE_FETCH = 10000; - private Set aliasesOnReturn; - private boolean allFieldsReturn; - - protected ElasticJoinExecutor(JoinRequestBuilder requestBuilder) { - metaResults = new MetaSearchResult(); - aliasesOnReturn = new HashSet<>(); - List firstTableReturnedField = requestBuilder.getFirstTable().getReturnedFields(); - List secondTableReturnedField = requestBuilder.getSecondTable().getReturnedFields(); - allFieldsReturn = (firstTableReturnedField == null || firstTableReturnedField.size() == 0) - && (secondTableReturnedField == null || secondTableReturnedField.size() == 0); - } - - public void sendResponse(RestChannel channel) throws IOException { - XContentBuilder builder = null; - long len; - try { - builder = ElasticUtils.hitsAsStringResultZeroCopy(results, metaResults, this); - BytesRestResponse bytesRestResponse = new BytesRestResponse(RestStatus.OK, builder); - len = bytesRestResponse.content().length(); - channel.sendResponse(bytesRestResponse); - } catch (IOException e) { - try { - if (builder != null) { - builder.close(); - } - } catch (Exception ex) { - // Ignore. Already logged in channel - } - throw e; + private static final Logger LOG = LogManager.getLogger(); + + protected List results; // Keep list to avoid copy to new array in SearchHits + protected MetaSearchResult metaResults; + protected final int MAX_RESULTS_ON_ONE_FETCH = 10000; + private Set aliasesOnReturn; + private boolean allFieldsReturn; + + protected ElasticJoinExecutor(JoinRequestBuilder requestBuilder) { + metaResults = new MetaSearchResult(); + aliasesOnReturn = new HashSet<>(); + List firstTableReturnedField = requestBuilder.getFirstTable().getReturnedFields(); + List secondTableReturnedField = requestBuilder.getSecondTable().getReturnedFields(); + allFieldsReturn = + (firstTableReturnedField == null || firstTableReturnedField.size() == 0) + && (secondTableReturnedField == null || secondTableReturnedField.size() == 0); + } + + public void sendResponse(RestChannel channel) throws IOException { + XContentBuilder builder = null; + long len; + try { + builder = ElasticUtils.hitsAsStringResultZeroCopy(results, metaResults, this); + BytesRestResponse bytesRestResponse = new BytesRestResponse(RestStatus.OK, builder); + len = bytesRestResponse.content().length(); + channel.sendResponse(bytesRestResponse); + } catch (IOException e) { + try { + if (builder != null) { + builder.close(); } - LOG.debug("[MCB] Successfully send response with size of {}. Thread id = {}", len, - Thread.currentThread().getId()); + } catch (Exception ex) { + // Ignore. Already logged in channel + } + throw e; } - - public void run() throws IOException, SqlParseException { - long timeBefore = System.currentTimeMillis(); - results = innerRun(); - long joinTimeInMilli = System.currentTimeMillis() - timeBefore; - this.metaResults.setTookImMilli(joinTimeInMilli); + LOG.debug( + "[MCB] Successfully send response with size of {}. Thread id = {}", + len, + Thread.currentThread().getId()); + } + + public void run() throws IOException, SqlParseException { + long timeBefore = System.currentTimeMillis(); + results = innerRun(); + long joinTimeInMilli = System.currentTimeMillis() - timeBefore; + this.metaResults.setTookImMilli(joinTimeInMilli); + } + + protected abstract List innerRun() throws IOException, SqlParseException; + + public SearchHits getHits() { + return new SearchHits( + results.toArray(new SearchHit[results.size()]), + new TotalHits(results.size(), Relation.EQUAL_TO), + 1.0f); + } + + public static ElasticJoinExecutor createJoinExecutor( + Client client, SqlElasticRequestBuilder requestBuilder) { + if (requestBuilder instanceof HashJoinQueryPlanRequestBuilder) { + return new QueryPlanElasticExecutor((HashJoinQueryPlanRequestBuilder) requestBuilder); + } else if (requestBuilder instanceof HashJoinElasticRequestBuilder) { + HashJoinElasticRequestBuilder hashJoin = (HashJoinElasticRequestBuilder) requestBuilder; + return new HashJoinElasticExecutor(client, hashJoin); + } else if (requestBuilder instanceof NestedLoopsElasticRequestBuilder) { + NestedLoopsElasticRequestBuilder nestedLoops = + (NestedLoopsElasticRequestBuilder) requestBuilder; + return new NestedLoopsElasticExecutor(client, nestedLoops); + } else { + throw new RuntimeException("Unsuported requestBuilder of type: " + requestBuilder.getClass()); } - - - protected abstract List innerRun() throws IOException, SqlParseException; - - public SearchHits getHits() { - return new SearchHits(results.toArray(new SearchHit[results.size()]), new TotalHits(results.size(), - Relation.EQUAL_TO), 1.0f); + } + + protected void mergeSourceAndAddAliases( + Map secondTableHitSource, + SearchHit searchHit, + String t1Alias, + String t2Alias) { + Map results = mapWithAliases(searchHit.getSourceAsMap(), t1Alias); + results.putAll(mapWithAliases(secondTableHitSource, t2Alias)); + searchHit.getSourceAsMap().clear(); + searchHit.getSourceAsMap().putAll(results); + } + + protected Map mapWithAliases(Map source, String alias) { + Map mapWithAliases = new HashMap<>(); + for (Map.Entry fieldNameToValue : source.entrySet()) { + if (!aliasesOnReturn.contains(fieldNameToValue.getKey())) { + mapWithAliases.put(alias + "." + fieldNameToValue.getKey(), fieldNameToValue.getValue()); + } else { + mapWithAliases.put(fieldNameToValue.getKey(), fieldNameToValue.getValue()); + } } - - public static ElasticJoinExecutor createJoinExecutor(Client client, SqlElasticRequestBuilder requestBuilder) { - if (requestBuilder instanceof HashJoinQueryPlanRequestBuilder) { - return new QueryPlanElasticExecutor((HashJoinQueryPlanRequestBuilder) requestBuilder); - } else if (requestBuilder instanceof HashJoinElasticRequestBuilder) { - HashJoinElasticRequestBuilder hashJoin = (HashJoinElasticRequestBuilder) requestBuilder; - return new HashJoinElasticExecutor(client, hashJoin); - } else if (requestBuilder instanceof NestedLoopsElasticRequestBuilder) { - NestedLoopsElasticRequestBuilder nestedLoops = (NestedLoopsElasticRequestBuilder) requestBuilder; - return new NestedLoopsElasticExecutor(client, nestedLoops); - } else { - throw new RuntimeException("Unsuported requestBuilder of type: " + requestBuilder.getClass()); - } - } - - protected void mergeSourceAndAddAliases(Map secondTableHitSource, SearchHit searchHit, - String t1Alias, String t2Alias) { - Map results = mapWithAliases(searchHit.getSourceAsMap(), t1Alias); - results.putAll(mapWithAliases(secondTableHitSource, t2Alias)); - searchHit.getSourceAsMap().clear(); - searchHit.getSourceAsMap().putAll(results); + return mapWithAliases; + } + + protected void onlyReturnedFields( + Map fieldsMap, List required, boolean allRequired) { + HashMap filteredMap = new HashMap<>(); + if (allFieldsReturn || allRequired) { + filteredMap.putAll(fieldsMap); + return; } - - protected Map mapWithAliases(Map source, String alias) { - Map mapWithAliases = new HashMap<>(); - for (Map.Entry fieldNameToValue : source.entrySet()) { - if (!aliasesOnReturn.contains(fieldNameToValue.getKey())) { - mapWithAliases.put(alias + "." + fieldNameToValue.getKey(), fieldNameToValue.getValue()); - } else { - mapWithAliases.put(fieldNameToValue.getKey(), fieldNameToValue.getValue()); - } - } - return mapWithAliases; + for (Field field : required) { + String name = field.getName(); + String returnName = name; + String alias = field.getAlias(); + if (alias != null && alias != "") { + returnName = alias; + aliasesOnReturn.add(alias); + } + filteredMap.put(returnName, deepSearchInMap(fieldsMap, name)); } - - protected void onlyReturnedFields(Map fieldsMap, List required, boolean allRequired) { - HashMap filteredMap = new HashMap<>(); - if (allFieldsReturn || allRequired) { - filteredMap.putAll(fieldsMap); - return; + fieldsMap.clear(); + fieldsMap.putAll(filteredMap); + } + + protected Object deepSearchInMap(Map fieldsMap, String name) { + if (name.contains(".")) { + String[] path = name.split("\\."); + Map currentObject = fieldsMap; + for (int i = 0; i < path.length - 1; i++) { + Object valueFromCurrentMap = currentObject.get(path[i]); + if (valueFromCurrentMap == null) { + return null; } - for (Field field : required) { - String name = field.getName(); - String returnName = name; - String alias = field.getAlias(); - if (alias != null && alias != "") { - returnName = alias; - aliasesOnReturn.add(alias); - } - filteredMap.put(returnName, deepSearchInMap(fieldsMap, name)); + if (!Map.class.isAssignableFrom(valueFromCurrentMap.getClass())) { + return null; } - fieldsMap.clear(); - fieldsMap.putAll(filteredMap); - + currentObject = (Map) valueFromCurrentMap; + } + return currentObject.get(path[path.length - 1]); } - protected Object deepSearchInMap(Map fieldsMap, String name) { - if (name.contains(".")) { - String[] path = name.split("\\."); - Map currentObject = fieldsMap; - for (int i = 0; i < path.length - 1; i++) { - Object valueFromCurrentMap = currentObject.get(path[i]); - if (valueFromCurrentMap == null) { - return null; - } - if (!Map.class.isAssignableFrom(valueFromCurrentMap.getClass())) { - return null; - } - currentObject = (Map) valueFromCurrentMap; - } - return currentObject.get(path[path.length - 1]); + return fieldsMap.get(name); + } + + protected void addUnmatchedResults( + List combinedResults, + Collection firstTableSearchHits, + List secondTableReturnedFields, + int currentNumOfIds, + int totalLimit, + String t1Alias, + String t2Alias) { + boolean limitReached = false; + for (SearchHitsResult hitsResult : firstTableSearchHits) { + if (!hitsResult.isMatchedWithOtherTable()) { + for (SearchHit hit : hitsResult.getSearchHits()) { + + // todo: decide which id to put or type. or maby its ok this way. just need to doc. + SearchHit unmachedResult = + createUnmachedResult(secondTableReturnedFields, hit.docId(), t1Alias, t2Alias, hit); + combinedResults.add(unmachedResult); + currentNumOfIds++; + if (currentNumOfIds >= totalLimit) { + limitReached = true; + break; + } } - - return fieldsMap.get(name); + } + if (limitReached) { + break; + } } - - - protected void addUnmatchedResults(List combinedResults, - Collection firstTableSearchHits, - List secondTableReturnedFields, int currentNumOfIds, int totalLimit, - String t1Alias, String t2Alias) { - boolean limitReached = false; - for (SearchHitsResult hitsResult : firstTableSearchHits) { - if (!hitsResult.isMatchedWithOtherTable()) { - for (SearchHit hit : hitsResult.getSearchHits()) { - - //todo: decide which id to put or type. or maby its ok this way. just need to doc. - SearchHit unmachedResult = createUnmachedResult(secondTableReturnedFields, hit.docId(), - t1Alias, t2Alias, hit); - combinedResults.add(unmachedResult); - currentNumOfIds++; - if (currentNumOfIds >= totalLimit) { - limitReached = true; - break; - } - - } - } - if (limitReached) { - break; - } - } - } - - protected SearchHit createUnmachedResult(List secondTableReturnedFields, int docId, String t1Alias, - String t2Alias, SearchHit hit) { - String unmatchedId = hit.getId() + "|0"; - - Map documentFields = new HashMap<>(); - Map metaFields = new HashMap<>(); - hit.getFields().forEach((fieldName, docField) -> - (MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) ? metaFields : documentFields).put(fieldName, docField)); - SearchHit searchHit = new SearchHit(docId, unmatchedId, documentFields, metaFields); - - searchHit.sourceRef(hit.getSourceRef()); - searchHit.getSourceAsMap().clear(); - searchHit.getSourceAsMap().putAll(hit.getSourceAsMap()); - Map emptySecondTableHitSource = createNullsSource(secondTableReturnedFields); - - mergeSourceAndAddAliases(emptySecondTableHitSource, searchHit, t1Alias, t2Alias); - - return searchHit; - } - - protected Map createNullsSource(List secondTableReturnedFields) { - Map nulledSource = new HashMap<>(); - for (Field field : secondTableReturnedFields) { - if (!field.getName().equals("*")) { - nulledSource.put(field.getName(), null); - } - } - return nulledSource; - } - - protected void updateMetaSearchResults(SearchResponse searchResponse) { - this.metaResults.addSuccessfulShards(searchResponse.getSuccessfulShards()); - this.metaResults.addFailedShards(searchResponse.getFailedShards()); - this.metaResults.addTotalNumOfShards(searchResponse.getTotalShards()); - this.metaResults.updateTimeOut(searchResponse.isTimedOut()); + } + + protected SearchHit createUnmachedResult( + List secondTableReturnedFields, + int docId, + String t1Alias, + String t2Alias, + SearchHit hit) { + String unmatchedId = hit.getId() + "|0"; + + Map documentFields = new HashMap<>(); + Map metaFields = new HashMap<>(); + hit.getFields() + .forEach( + (fieldName, docField) -> + (MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) + ? metaFields + : documentFields) + .put(fieldName, docField)); + SearchHit searchHit = new SearchHit(docId, unmatchedId, documentFields, metaFields); + + searchHit.sourceRef(hit.getSourceRef()); + searchHit.getSourceAsMap().clear(); + searchHit.getSourceAsMap().putAll(hit.getSourceAsMap()); + Map emptySecondTableHitSource = createNullsSource(secondTableReturnedFields); + + mergeSourceAndAddAliases(emptySecondTableHitSource, searchHit, t1Alias, t2Alias); + + return searchHit; + } + + protected Map createNullsSource(List secondTableReturnedFields) { + Map nulledSource = new HashMap<>(); + for (Field field : secondTableReturnedFields) { + if (!field.getName().equals("*")) { + nulledSource.put(field.getName(), null); + } } - - protected SearchResponse scrollOneTimeWithMax(Client client, TableInJoinRequestBuilder tableRequest) { - SearchRequestBuilder scrollRequest = tableRequest.getRequestBuilder() - .setScroll(new TimeValue(60000)).setSize(MAX_RESULTS_ON_ONE_FETCH); - boolean ordered = tableRequest.getOriginalSelect().isOrderdSelect(); - if (!ordered) { - scrollRequest.addSort(FieldSortBuilder.DOC_FIELD_NAME, SortOrder.ASC); - } - SearchResponse responseWithHits = scrollRequest.get(); - //on ordered select - not using SCAN , elastic returns hits on first scroll - //es5.0 elastic always return docs on scan - // if(!ordered) - // responseWithHits = client.prepareSearchScroll(responseWithHits.getScrollId()) - // .setScroll(new TimeValue(600000)).get(); - return responseWithHits; + return nulledSource; + } + + protected void updateMetaSearchResults(SearchResponse searchResponse) { + this.metaResults.addSuccessfulShards(searchResponse.getSuccessfulShards()); + this.metaResults.addFailedShards(searchResponse.getFailedShards()); + this.metaResults.addTotalNumOfShards(searchResponse.getTotalShards()); + this.metaResults.updateTimeOut(searchResponse.isTimedOut()); + } + + protected SearchResponse scrollOneTimeWithMax( + Client client, TableInJoinRequestBuilder tableRequest) { + SearchRequestBuilder scrollRequest = + tableRequest + .getRequestBuilder() + .setScroll(new TimeValue(60000)) + .setSize(MAX_RESULTS_ON_ONE_FETCH); + boolean ordered = tableRequest.getOriginalSelect().isOrderdSelect(); + if (!ordered) { + scrollRequest.addSort(FieldSortBuilder.DOC_FIELD_NAME, SortOrder.ASC); } - - + SearchResponse responseWithHits = scrollRequest.get(); + // on ordered select - not using SCAN , elastic returns hits on first scroll + // es5.0 elastic always return docs on scan + // if(!ordered) + // responseWithHits = client.prepareSearchScroll(responseWithHits.getScrollId()) + // .setScroll(new TimeValue(600000)).get(); + return responseWithHits; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/ElasticUtils.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/ElasticUtils.java index aa6ea05389..7b6228a3d2 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/ElasticUtils.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/ElasticUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.join; import static org.opensearch.core.xcontent.ToXContent.EMPTY_PARAMS; @@ -17,10 +16,10 @@ import org.opensearch.action.search.SearchRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.client.Client; -import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.xcontent.ToXContent.Params; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.search.SearchHit; @@ -30,136 +29,144 @@ import org.opensearch.sql.legacy.domain.Select; import org.opensearch.sql.legacy.query.join.BackOffRetryStrategy; -/** - * Created by Eliran on 2/9/2016. - */ +/** Created by Eliran on 2/9/2016. */ public class ElasticUtils { - public static SearchResponse scrollOneTimeWithHits(Client client, SearchRequestBuilder requestBuilder, - Select originalSelect, int resultSize) { - SearchRequestBuilder scrollRequest = requestBuilder - .setScroll(new TimeValue(60000)).setSize(resultSize); - boolean ordered = originalSelect.isOrderdSelect(); - if (!ordered) { - scrollRequest.addSort(FieldSortBuilder.DOC_FIELD_NAME, SortOrder.ASC); - } - SearchResponse responseWithHits = scrollRequest.get(); - //on ordered select - not using SCAN , elastic returns hits on first scroll - //es5.0 elastic always return docs on scan -// if(!ordered) { -// responseWithHits = client.prepareSearchScroll(responseWithHits.getScrollId()) -// .setScroll(new TimeValue(600000)).get(); -// } - return responseWithHits; + public static SearchResponse scrollOneTimeWithHits( + Client client, SearchRequestBuilder requestBuilder, Select originalSelect, int resultSize) { + SearchRequestBuilder scrollRequest = + requestBuilder.setScroll(new TimeValue(60000)).setSize(resultSize); + boolean ordered = originalSelect.isOrderdSelect(); + if (!ordered) { + scrollRequest.addSort(FieldSortBuilder.DOC_FIELD_NAME, SortOrder.ASC); + } + SearchResponse responseWithHits = scrollRequest.get(); + // on ordered select - not using SCAN , elastic returns hits on first scroll + // es5.0 elastic always return docs on scan + // if(!ordered) { + // responseWithHits = client.prepareSearchScroll(responseWithHits.getScrollId()) + // .setScroll(new TimeValue(600000)).get(); + // } + return responseWithHits; + } + + // use our deserializer instead of results toXcontent because the source field is different from + // sourceAsMap. + public static String hitsAsStringResult(SearchHits results, MetaSearchResult metaResults) + throws IOException { + if (results == null) { + return null; + } + Object[] searchHits; + searchHits = + new Object + [Optional.ofNullable(results.getTotalHits()).map(th -> th.value).orElse(0L).intValue()]; + int i = 0; + for (SearchHit hit : results) { + HashMap value = new HashMap<>(); + value.put("_id", hit.getId()); + value.put("_score", hit.getScore()); + value.put("_source", hit.getSourceAsMap()); + searchHits[i] = value; + i++; + } + HashMap hits = new HashMap<>(); + hits.put( + "total", + ImmutableMap.of( + "value", Optional.ofNullable(results.getTotalHits()).map(th -> th.value).orElse(0L), + "relation", + Optional.ofNullable(results.getTotalHits()) + .map(th -> th.relation) + .orElse(Relation.EQUAL_TO))); + hits.put("max_score", results.getMaxScore()); + hits.put("hits", searchHits); + XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); + builder.startObject(); + builder.field("took", metaResults.getTookImMilli()); + builder.field("timed_out", metaResults.isTimedOut()); + builder.field( + "_shards", + ImmutableMap.of( + "total", + metaResults.getTotalNumOfShards(), + "successful", + metaResults.getSuccessfulShards(), + "failed", + metaResults.getFailedShards())); + builder.field("hits", hits); + builder.endObject(); + return BytesReference.bytes(builder).utf8ToString(); + } + + /** Generate string by serializing SearchHits in place without any new HashMap copy */ + public static XContentBuilder hitsAsStringResultZeroCopy( + List results, MetaSearchResult metaResults, ElasticJoinExecutor executor) + throws IOException { + BytesStreamOutput outputStream = new BytesStreamOutput(); + + XContentBuilder builder = XContentFactory.jsonBuilder(outputStream).prettyPrint(); + builder.startObject(); + builder.field("took", metaResults.getTookImMilli()); + builder.field("timed_out", metaResults.isTimedOut()); + builder.field( + "_shards", + ImmutableMap.of( + "total", metaResults.getTotalNumOfShards(), + "successful", metaResults.getSuccessfulShards(), + "failed", metaResults.getFailedShards())); + toXContent(builder, EMPTY_PARAMS, results, executor); + builder.endObject(); + + if (!BackOffRetryStrategy.isHealthy(2 * outputStream.size(), executor)) { + throw new IllegalStateException("Memory could be insufficient when sendResponse()."); } - - //use our deserializer instead of results toXcontent because the source field is different from sourceAsMap. - public static String hitsAsStringResult(SearchHits results, MetaSearchResult metaResults) throws IOException { - if (results == null) { - return null; - } - Object[] searchHits; - searchHits = new Object[Optional.ofNullable(results.getTotalHits()).map(th -> th.value).orElse(0L).intValue()]; - int i = 0; - for (SearchHit hit : results) { - HashMap value = new HashMap<>(); - value.put("_id", hit.getId()); - value.put("_score", hit.getScore()); - value.put("_source", hit.getSourceAsMap()); - searchHits[i] = value; - i++; - } - HashMap hits = new HashMap<>(); - hits.put("total", ImmutableMap.of( - "value", Optional.ofNullable(results.getTotalHits()).map(th -> th.value).orElse(0L), - "relation", Optional.ofNullable(results.getTotalHits()).map(th -> th.relation).orElse(Relation.EQUAL_TO) - )); - hits.put("max_score", results.getMaxScore()); - hits.put("hits", searchHits); - XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); - builder.startObject(); - builder.field("took", metaResults.getTookImMilli()); - builder.field("timed_out", metaResults.isTimedOut()); - builder.field("_shards", ImmutableMap.of("total", metaResults.getTotalNumOfShards(), - "successful", metaResults.getSuccessfulShards() - , "failed", metaResults.getFailedShards())); - builder.field("hits", hits); - builder.endObject(); - return BytesReference.bytes(builder).utf8ToString(); + return builder; + } + + /** Code copy from SearchHits */ + private static void toXContent( + XContentBuilder builder, Params params, List hits, ElasticJoinExecutor executor) + throws IOException { + builder.startObject(SearchHits.Fields.HITS); + builder.field( + SearchHits.Fields.TOTAL, + ImmutableMap.of("value", hits.size(), "relation", Relation.EQUAL_TO)); + builder.field(SearchHits.Fields.MAX_SCORE, 1.0f); + builder.field(SearchHits.Fields.HITS); + builder.startArray(); + + for (int i = 0; i < hits.size(); i++) { + if (i % 10000 == 0 && !BackOffRetryStrategy.isHealthy()) { + throw new IllegalStateException("Memory circuit break when generating json builder"); + } + toXContent(builder, params, hits.get(i)); } - /** - * Generate string by serializing SearchHits in place without any new HashMap copy - */ - public static XContentBuilder hitsAsStringResultZeroCopy(List results, MetaSearchResult metaResults, - ElasticJoinExecutor executor) throws IOException { - BytesStreamOutput outputStream = new BytesStreamOutput(); - - XContentBuilder builder = XContentFactory.jsonBuilder(outputStream).prettyPrint(); - builder.startObject(); - builder.field("took", metaResults.getTookImMilli()); - builder.field("timed_out", metaResults.isTimedOut()); - builder.field("_shards", ImmutableMap.of( - "total", metaResults.getTotalNumOfShards(), - "successful", metaResults.getSuccessfulShards(), - "failed", metaResults.getFailedShards() - )); - toXContent(builder, EMPTY_PARAMS, results, executor); - builder.endObject(); - - if (!BackOffRetryStrategy.isHealthy(2 * outputStream.size(), executor)) { - throw new IllegalStateException("Memory could be insufficient when sendResponse()."); - } - - return builder; + builder.endArray(); + builder.endObject(); + } + + /** Code copy from SearchHit but only keep fields interested and replace source by sourceMap */ + private static void toXContent(XContentBuilder builder, Params params, SearchHit hit) + throws IOException { + builder.startObject(); + if (hit.getId() != null) { + builder.field("_id", hit.getId()); } - /** - * Code copy from SearchHits - */ - private static void toXContent(XContentBuilder builder, Params params, List hits, - ElasticJoinExecutor executor) throws IOException { - builder.startObject(SearchHits.Fields.HITS); - builder.field(SearchHits.Fields.TOTAL, ImmutableMap.of( - "value", hits.size(), - "relation", Relation.EQUAL_TO - )); - builder.field(SearchHits.Fields.MAX_SCORE, 1.0f); - builder.field(SearchHits.Fields.HITS); - builder.startArray(); - - for (int i = 0; i < hits.size(); i++) { - if (i % 10000 == 0 && !BackOffRetryStrategy.isHealthy()) { - throw new IllegalStateException("Memory circuit break when generating json builder"); - } - toXContent(builder, params, hits.get(i)); - } - - builder.endArray(); - builder.endObject(); + if (Float.isNaN(hit.getScore())) { + builder.nullField("_score"); + } else { + builder.field("_score", hit.getScore()); } - /** - * Code copy from SearchHit but only keep fields interested and replace source by sourceMap + /* + * Use sourceMap rather than binary source because source is out-of-date + * and only used when creating a new instance of SearchHit */ - private static void toXContent(XContentBuilder builder, Params params, SearchHit hit) throws IOException { - builder.startObject(); - if (hit.getId() != null) { - builder.field("_id", hit.getId()); - } - - if (Float.isNaN(hit.getScore())) { - builder.nullField("_score"); - } else { - builder.field("_score", hit.getScore()); - } - - /* - * Use sourceMap rather than binary source because source is out-of-date - * and only used when creating a new instance of SearchHit - */ - builder.field("_source", hit.getSourceAsMap()); - builder.endObject(); - } + builder.field("_source", hit.getSourceAsMap()); + builder.endObject(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/HashJoinComparisonStructure.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/HashJoinComparisonStructure.java index 52d292a2e5..8216feac66 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/HashJoinComparisonStructure.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/HashJoinComparisonStructure.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.join; import java.util.ArrayList; @@ -14,55 +13,56 @@ import org.opensearch.search.SearchHit; import org.opensearch.sql.legacy.domain.Field; -/** - * Created by Eliran on 2/11/2015. - */ +/** Created by Eliran on 2/11/2015. */ public class HashJoinComparisonStructure { - private HashMap>> comparisonIDtoComparisonFields; - private HashMap> comparisonIDtoComparisonHash; + private HashMap>> comparisonIDtoComparisonFields; + private HashMap> comparisonIDtoComparisonHash; - public HashJoinComparisonStructure(List>> t1ToT2FieldsComparisons) { - comparisonIDtoComparisonFields = new HashMap<>(); - comparisonIDtoComparisonHash = new HashMap<>(); - if (t1ToT2FieldsComparisons == null || t1ToT2FieldsComparisons.size() == 0) { - String comparisonId = UUID.randomUUID().toString(); - this.comparisonIDtoComparisonFields.put(comparisonId, new ArrayList>()); - this.comparisonIDtoComparisonHash.put(comparisonId, new HashMap()); - } - for (List> comparisonFields : t1ToT2FieldsComparisons) { - String comparisonId = UUID.randomUUID().toString(); - //maby from field to List ? - this.comparisonIDtoComparisonFields.put(comparisonId, comparisonFields); - this.comparisonIDtoComparisonHash.put(comparisonId, new HashMap()); - } + public HashJoinComparisonStructure(List>> t1ToT2FieldsComparisons) { + comparisonIDtoComparisonFields = new HashMap<>(); + comparisonIDtoComparisonHash = new HashMap<>(); + if (t1ToT2FieldsComparisons == null || t1ToT2FieldsComparisons.size() == 0) { + String comparisonId = UUID.randomUUID().toString(); + this.comparisonIDtoComparisonFields.put( + comparisonId, new ArrayList>()); + this.comparisonIDtoComparisonHash.put(comparisonId, new HashMap()); } - - public HashMap>> getComparisons() { - return comparisonIDtoComparisonFields; + for (List> comparisonFields : t1ToT2FieldsComparisons) { + String comparisonId = UUID.randomUUID().toString(); + // maby from field to List ? + this.comparisonIDtoComparisonFields.put(comparisonId, comparisonFields); + this.comparisonIDtoComparisonHash.put(comparisonId, new HashMap()); } + } - public void insertIntoComparisonHash(String comparisonID, String comparisonKey, SearchHit hit) { - HashMap comparisonHash = this.comparisonIDtoComparisonHash.get(comparisonID); - SearchHitsResult currentSearchHitsResult = comparisonHash.get(comparisonKey); - if (currentSearchHitsResult == null) { - currentSearchHitsResult = new SearchHitsResult(new ArrayList(), false); - comparisonHash.put(comparisonKey, currentSearchHitsResult); - } - currentSearchHitsResult.getSearchHits().add(hit); - } + public HashMap>> getComparisons() { + return comparisonIDtoComparisonFields; + } - public SearchHitsResult searchForMatchingSearchHits(String comparisonID, String comparisonKey) { - HashMap comparisonHash = this.comparisonIDtoComparisonHash.get(comparisonID); - return comparisonHash.get(comparisonKey); + public void insertIntoComparisonHash(String comparisonID, String comparisonKey, SearchHit hit) { + HashMap comparisonHash = + this.comparisonIDtoComparisonHash.get(comparisonID); + SearchHitsResult currentSearchHitsResult = comparisonHash.get(comparisonKey); + if (currentSearchHitsResult == null) { + currentSearchHitsResult = new SearchHitsResult(new ArrayList(), false); + comparisonHash.put(comparisonKey, currentSearchHitsResult); } + currentSearchHitsResult.getSearchHits().add(hit); + } - public List getAllSearchHits() { - List allSearchHits = new ArrayList<>(); + public SearchHitsResult searchForMatchingSearchHits(String comparisonID, String comparisonKey) { + HashMap comparisonHash = + this.comparisonIDtoComparisonHash.get(comparisonID); + return comparisonHash.get(comparisonKey); + } - for (HashMap comparisonHash : this.comparisonIDtoComparisonHash.values()) { - allSearchHits.addAll(comparisonHash.values()); - } - return allSearchHits; - } + public List getAllSearchHits() { + List allSearchHits = new ArrayList<>(); + for (HashMap comparisonHash : + this.comparisonIDtoComparisonHash.values()) { + allSearchHits.addAll(comparisonHash.values()); + } + return allSearchHits; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/HashJoinElasticExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/HashJoinElasticExecutor.java index 5703cf2ef5..06a913205d 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/HashJoinElasticExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/HashJoinElasticExecutor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.join; import com.alibaba.druid.sql.ast.statement.SQLJoinTableSource; @@ -34,341 +33,385 @@ import org.opensearch.sql.legacy.query.join.TableInJoinRequestBuilder; import org.opensearch.sql.legacy.query.maker.QueryMaker; -/** - * Created by Eliran on 22/8/2015. - */ +/** Created by Eliran on 22/8/2015. */ public class HashJoinElasticExecutor extends ElasticJoinExecutor { - private HashJoinElasticRequestBuilder requestBuilder; - - - private Client client; - private boolean useQueryTermsFilterOptimization = false; - private final int MAX_RESULTS_FOR_FIRST_TABLE = 100000; - HashJoinComparisonStructure hashJoinComparisonStructure; - private Set alreadyMatched; - - public HashJoinElasticExecutor(Client client, HashJoinElasticRequestBuilder requestBuilder) { - super(requestBuilder); - this.client = client; - this.requestBuilder = requestBuilder; - this.useQueryTermsFilterOptimization = requestBuilder.isUseTermFiltersOptimization(); - this.hashJoinComparisonStructure = new HashJoinComparisonStructure(requestBuilder.getT1ToT2FieldsComparison()); - this.alreadyMatched = new HashSet<>(); + private HashJoinElasticRequestBuilder requestBuilder; + + private Client client; + private boolean useQueryTermsFilterOptimization = false; + private final int MAX_RESULTS_FOR_FIRST_TABLE = 100000; + HashJoinComparisonStructure hashJoinComparisonStructure; + private Set alreadyMatched; + + public HashJoinElasticExecutor(Client client, HashJoinElasticRequestBuilder requestBuilder) { + super(requestBuilder); + this.client = client; + this.requestBuilder = requestBuilder; + this.useQueryTermsFilterOptimization = requestBuilder.isUseTermFiltersOptimization(); + this.hashJoinComparisonStructure = + new HashJoinComparisonStructure(requestBuilder.getT1ToT2FieldsComparison()); + this.alreadyMatched = new HashSet<>(); + } + + public List innerRun() throws IOException, SqlParseException { + + Map>> optimizationTermsFilterStructure = + initOptimizationStructure(); + + updateFirstTableLimitIfNeeded(); + TableInJoinRequestBuilder firstTableRequest = requestBuilder.getFirstTable(); + createKeyToResultsAndFillOptimizationStructure( + optimizationTermsFilterStructure, firstTableRequest); + + TableInJoinRequestBuilder secondTableRequest = requestBuilder.getSecondTable(); + if (needToOptimize(optimizationTermsFilterStructure)) { + updateRequestWithTermsFilter(optimizationTermsFilterStructure, secondTableRequest); } - public List innerRun() throws IOException, SqlParseException { - - Map>> optimizationTermsFilterStructure = initOptimizationStructure(); - - updateFirstTableLimitIfNeeded(); - TableInJoinRequestBuilder firstTableRequest = requestBuilder.getFirstTable(); - createKeyToResultsAndFillOptimizationStructure(optimizationTermsFilterStructure, firstTableRequest); - - TableInJoinRequestBuilder secondTableRequest = requestBuilder.getSecondTable(); - if (needToOptimize(optimizationTermsFilterStructure)) { - updateRequestWithTermsFilter(optimizationTermsFilterStructure, secondTableRequest); - } - - List combinedResult = createCombinedResults(secondTableRequest); - - int currentNumOfResults = combinedResult.size(); - int totalLimit = requestBuilder.getTotalLimit(); - if (requestBuilder.getJoinType() == SQLJoinTableSource.JoinType.LEFT_OUTER_JOIN - && currentNumOfResults < totalLimit) { - String t1Alias = requestBuilder.getFirstTable().getAlias(); - String t2Alias = requestBuilder.getSecondTable().getAlias(); - //todo: for each till Limit - addUnmatchedResults(combinedResult, this.hashJoinComparisonStructure.getAllSearchHits(), - requestBuilder.getSecondTable().getReturnedFields(), - currentNumOfResults, totalLimit, - t1Alias, - t2Alias); - } - if (firstTableRequest.getOriginalSelect().isOrderdSelect()) { - Collections.sort(combinedResult, new Comparator() { - @Override - public int compare(SearchHit o1, SearchHit o2) { - return o1.docId() - o2.docId(); - } - }); - - } - return combinedResult; + List combinedResult = createCombinedResults(secondTableRequest); + + int currentNumOfResults = combinedResult.size(); + int totalLimit = requestBuilder.getTotalLimit(); + if (requestBuilder.getJoinType() == SQLJoinTableSource.JoinType.LEFT_OUTER_JOIN + && currentNumOfResults < totalLimit) { + String t1Alias = requestBuilder.getFirstTable().getAlias(); + String t2Alias = requestBuilder.getSecondTable().getAlias(); + // todo: for each till Limit + addUnmatchedResults( + combinedResult, + this.hashJoinComparisonStructure.getAllSearchHits(), + requestBuilder.getSecondTable().getReturnedFields(), + currentNumOfResults, + totalLimit, + t1Alias, + t2Alias); } - - private Map>> initOptimizationStructure() { - Map>> optimizationTermsFilterStructure = new HashMap<>(); - for (String comparisonId : this.hashJoinComparisonStructure.getComparisons().keySet()) { - optimizationTermsFilterStructure.put(comparisonId, new HashMap>()); - } - return optimizationTermsFilterStructure; - } - - private void updateFirstTableLimitIfNeeded() { - if (requestBuilder.getJoinType() == SQLJoinTableSource.JoinType.LEFT_OUTER_JOIN) { - Integer firstTableHintLimit = requestBuilder.getFirstTable().getHintLimit(); - int totalLimit = requestBuilder.getTotalLimit(); - if (firstTableHintLimit == null || firstTableHintLimit > totalLimit) { - requestBuilder.getFirstTable().setHintLimit(totalLimit); + if (firstTableRequest.getOriginalSelect().isOrderdSelect()) { + Collections.sort( + combinedResult, + new Comparator() { + @Override + public int compare(SearchHit o1, SearchHit o2) { + return o1.docId() - o2.docId(); } - } + }); } + return combinedResult; + } - private List createCombinedResults(TableInJoinRequestBuilder secondTableRequest) { - List combinedResult = new ArrayList<>(); - int resultIds = 0; - int totalLimit = this.requestBuilder.getTotalLimit(); - Integer hintLimit = secondTableRequest.getHintLimit(); - SearchResponse searchResponse; - boolean finishedScrolling; - if (hintLimit != null && hintLimit < MAX_RESULTS_ON_ONE_FETCH) { - searchResponse = secondTableRequest.getRequestBuilder().setSize(hintLimit).get(); - finishedScrolling = true; - } else { - searchResponse = secondTableRequest.getRequestBuilder() - .setScroll(new TimeValue(60000)) - .setSize(MAX_RESULTS_ON_ONE_FETCH).get(); - //es5.0 no need to scroll again! -// searchResponse = client.prepareSearchScroll(searchResponse.getScrollId()) -// .setScroll(new TimeValue(600000)).get(); - finishedScrolling = false; + private Map>> initOptimizationStructure() { + Map>> optimizationTermsFilterStructure = new HashMap<>(); + for (String comparisonId : this.hashJoinComparisonStructure.getComparisons().keySet()) { + optimizationTermsFilterStructure.put(comparisonId, new HashMap>()); + } + return optimizationTermsFilterStructure; + } + + private void updateFirstTableLimitIfNeeded() { + if (requestBuilder.getJoinType() == SQLJoinTableSource.JoinType.LEFT_OUTER_JOIN) { + Integer firstTableHintLimit = requestBuilder.getFirstTable().getHintLimit(); + int totalLimit = requestBuilder.getTotalLimit(); + if (firstTableHintLimit == null || firstTableHintLimit > totalLimit) { + requestBuilder.getFirstTable().setHintLimit(totalLimit); + } + } + } + + private List createCombinedResults(TableInJoinRequestBuilder secondTableRequest) { + List combinedResult = new ArrayList<>(); + int resultIds = 0; + int totalLimit = this.requestBuilder.getTotalLimit(); + Integer hintLimit = secondTableRequest.getHintLimit(); + SearchResponse searchResponse; + boolean finishedScrolling; + if (hintLimit != null && hintLimit < MAX_RESULTS_ON_ONE_FETCH) { + searchResponse = secondTableRequest.getRequestBuilder().setSize(hintLimit).get(); + finishedScrolling = true; + } else { + searchResponse = + secondTableRequest + .getRequestBuilder() + .setScroll(new TimeValue(60000)) + .setSize(MAX_RESULTS_ON_ONE_FETCH) + .get(); + // es5.0 no need to scroll again! + // searchResponse = client.prepareSearchScroll(searchResponse.getScrollId()) + // .setScroll(new TimeValue(600000)).get(); + finishedScrolling = false; + } + updateMetaSearchResults(searchResponse); + + boolean limitReached = false; + int fetchedSoFarFromSecondTable = 0; + while (!limitReached) { + SearchHit[] secondTableHits = searchResponse.getHits().getHits(); + fetchedSoFarFromSecondTable += secondTableHits.length; + for (SearchHit secondTableHit : secondTableHits) { + if (limitReached) { + break; } - updateMetaSearchResults(searchResponse); - - boolean limitReached = false; - int fetchedSoFarFromSecondTable = 0; - while (!limitReached) { - SearchHit[] secondTableHits = searchResponse.getHits().getHits(); - fetchedSoFarFromSecondTable += secondTableHits.length; - for (SearchHit secondTableHit : secondTableHits) { - if (limitReached) { - break; - } - //todo: need to run on comparisons. for each comparison check if exists and add. - HashMap>> comparisons = - this.hashJoinComparisonStructure.getComparisons(); - - for (Map.Entry>> comparison : comparisons.entrySet()) { - String comparisonID = comparison.getKey(); - List> t1ToT2FieldsComparison = comparison.getValue(); - String key = getComparisonKey(t1ToT2FieldsComparison, secondTableHit, false, null); - - SearchHitsResult searchHitsResult = - this.hashJoinComparisonStructure.searchForMatchingSearchHits(comparisonID, key); - - if (searchHitsResult != null && searchHitsResult.getSearchHits().size() > 0) { - searchHitsResult.setMatchedWithOtherTable(true); - List searchHits = searchHitsResult.getSearchHits(); - for (SearchHit matchingHit : searchHits) { - String combinedId = matchingHit.getId() + "|" + secondTableHit.getId(); - //in order to prevent same matching when using OR on hashJoins. - if (this.alreadyMatched.contains(combinedId)) { - continue; - } else { - this.alreadyMatched.add(combinedId); - } - - Map copiedSource = new HashMap(); - copyMaps(copiedSource, secondTableHit.getSourceAsMap()); - onlyReturnedFields(copiedSource, secondTableRequest.getReturnedFields(), - secondTableRequest.getOriginalSelect().isSelectAll()); - - Map documentFields = new HashMap<>(); - Map metaFields = new HashMap<>(); - matchingHit.getFields().forEach((fieldName, docField) -> - (MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) ? metaFields : documentFields).put(fieldName, docField)); - SearchHit searchHit = new SearchHit(matchingHit.docId(), combinedId, - documentFields, metaFields); - searchHit.sourceRef(matchingHit.getSourceRef()); - searchHit.getSourceAsMap().clear(); - searchHit.getSourceAsMap().putAll(matchingHit.getSourceAsMap()); - String t1Alias = requestBuilder.getFirstTable().getAlias(); - String t2Alias = requestBuilder.getSecondTable().getAlias(); - mergeSourceAndAddAliases(copiedSource, searchHit, t1Alias, t2Alias); - - combinedResult.add(searchHit); - resultIds++; - if (resultIds >= totalLimit) { - limitReached = true; - break; - } - } - } - } - } - if (!finishedScrolling) { - if (secondTableHits.length > 0 && (hintLimit == null || fetchedSoFarFromSecondTable >= hintLimit)) { - searchResponse = client.prepareSearchScroll(searchResponse.getScrollId()) - .setScroll(new TimeValue(600000)).execute().actionGet(); - } else { - break; - } - } else { + // todo: need to run on comparisons. for each comparison check if exists and add. + HashMap>> comparisons = + this.hashJoinComparisonStructure.getComparisons(); + + for (Map.Entry>> comparison : comparisons.entrySet()) { + String comparisonID = comparison.getKey(); + List> t1ToT2FieldsComparison = comparison.getValue(); + String key = getComparisonKey(t1ToT2FieldsComparison, secondTableHit, false, null); + + SearchHitsResult searchHitsResult = + this.hashJoinComparisonStructure.searchForMatchingSearchHits(comparisonID, key); + + if (searchHitsResult != null && searchHitsResult.getSearchHits().size() > 0) { + searchHitsResult.setMatchedWithOtherTable(true); + List searchHits = searchHitsResult.getSearchHits(); + for (SearchHit matchingHit : searchHits) { + String combinedId = matchingHit.getId() + "|" + secondTableHit.getId(); + // in order to prevent same matching when using OR on hashJoins. + if (this.alreadyMatched.contains(combinedId)) { + continue; + } else { + this.alreadyMatched.add(combinedId); + } + + Map copiedSource = new HashMap(); + copyMaps(copiedSource, secondTableHit.getSourceAsMap()); + onlyReturnedFields( + copiedSource, + secondTableRequest.getReturnedFields(), + secondTableRequest.getOriginalSelect().isSelectAll()); + + Map documentFields = new HashMap<>(); + Map metaFields = new HashMap<>(); + matchingHit + .getFields() + .forEach( + (fieldName, docField) -> + (MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) + ? metaFields + : documentFields) + .put(fieldName, docField)); + SearchHit searchHit = + new SearchHit(matchingHit.docId(), combinedId, documentFields, metaFields); + searchHit.sourceRef(matchingHit.getSourceRef()); + searchHit.getSourceAsMap().clear(); + searchHit.getSourceAsMap().putAll(matchingHit.getSourceAsMap()); + String t1Alias = requestBuilder.getFirstTable().getAlias(); + String t2Alias = requestBuilder.getSecondTable().getAlias(); + mergeSourceAndAddAliases(copiedSource, searchHit, t1Alias, t2Alias); + + combinedResult.add(searchHit); + resultIds++; + if (resultIds >= totalLimit) { + limitReached = true; break; + } } + } } - return combinedResult; - } - - private void copyMaps(Map into, Map from) { - for (Map.Entry keyAndValue : from.entrySet()) { - into.put(keyAndValue.getKey(), keyAndValue.getValue()); + } + if (!finishedScrolling) { + if (secondTableHits.length > 0 + && (hintLimit == null || fetchedSoFarFromSecondTable >= hintLimit)) { + searchResponse = + client + .prepareSearchScroll(searchResponse.getScrollId()) + .setScroll(new TimeValue(600000)) + .execute() + .actionGet(); + } else { + break; } + } else { + break; + } } + return combinedResult; + } - private void createKeyToResultsAndFillOptimizationStructure( - Map>> optimizationTermsFilterStructure, - TableInJoinRequestBuilder firstTableRequest) { - List firstTableHits = fetchAllHits(firstTableRequest); - - int resultIds = 1; - for (SearchHit hit : firstTableHits) { - HashMap>> comparisons = - this.hashJoinComparisonStructure.getComparisons(); - for (Map.Entry>> comparison : comparisons.entrySet()) { - String comparisonID = comparison.getKey(); - List> t1ToT2FieldsComparison = comparison.getValue(); - - String key = getComparisonKey(t1ToT2FieldsComparison, hit, true, - optimizationTermsFilterStructure.get(comparisonID)); - - //int docid , id - Map documentFields = new HashMap<>(); - Map metaFields = new HashMap<>(); - hit.getFields().forEach((fieldName, docField) -> - (MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) ? metaFields : documentFields).put(fieldName, docField)); - SearchHit searchHit = new SearchHit(resultIds, hit.getId(), documentFields - , metaFields); - searchHit.sourceRef(hit.getSourceRef()); - - onlyReturnedFields(searchHit.getSourceAsMap(), firstTableRequest.getReturnedFields(), - firstTableRequest.getOriginalSelect().isSelectAll()); - resultIds++; - this.hashJoinComparisonStructure.insertIntoComparisonHash(comparisonID, key, searchHit); - } - } + private void copyMaps(Map into, Map from) { + for (Map.Entry keyAndValue : from.entrySet()) { + into.put(keyAndValue.getKey(), keyAndValue.getValue()); } - - private List fetchAllHits(TableInJoinRequestBuilder tableInJoinRequest) { - Integer hintLimit = tableInJoinRequest.getHintLimit(); - SearchRequestBuilder requestBuilder = tableInJoinRequest.getRequestBuilder(); - if (hintLimit != null && hintLimit < MAX_RESULTS_ON_ONE_FETCH) { - requestBuilder.setSize(hintLimit); - SearchResponse searchResponse = requestBuilder.get(); - updateMetaSearchResults(searchResponse); - return Arrays.asList(searchResponse.getHits().getHits()); - } - return scrollTillLimit(tableInJoinRequest, hintLimit); + } + + private void createKeyToResultsAndFillOptimizationStructure( + Map>> optimizationTermsFilterStructure, + TableInJoinRequestBuilder firstTableRequest) { + List firstTableHits = fetchAllHits(firstTableRequest); + + int resultIds = 1; + for (SearchHit hit : firstTableHits) { + HashMap>> comparisons = + this.hashJoinComparisonStructure.getComparisons(); + for (Map.Entry>> comparison : comparisons.entrySet()) { + String comparisonID = comparison.getKey(); + List> t1ToT2FieldsComparison = comparison.getValue(); + + String key = + getComparisonKey( + t1ToT2FieldsComparison, + hit, + true, + optimizationTermsFilterStructure.get(comparisonID)); + + // int docid , id + Map documentFields = new HashMap<>(); + Map metaFields = new HashMap<>(); + hit.getFields() + .forEach( + (fieldName, docField) -> + (MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) + ? metaFields + : documentFields) + .put(fieldName, docField)); + SearchHit searchHit = new SearchHit(resultIds, hit.getId(), documentFields, metaFields); + searchHit.sourceRef(hit.getSourceRef()); + + onlyReturnedFields( + searchHit.getSourceAsMap(), + firstTableRequest.getReturnedFields(), + firstTableRequest.getOriginalSelect().isSelectAll()); + resultIds++; + this.hashJoinComparisonStructure.insertIntoComparisonHash(comparisonID, key, searchHit); + } + } + } + + private List fetchAllHits(TableInJoinRequestBuilder tableInJoinRequest) { + Integer hintLimit = tableInJoinRequest.getHintLimit(); + SearchRequestBuilder requestBuilder = tableInJoinRequest.getRequestBuilder(); + if (hintLimit != null && hintLimit < MAX_RESULTS_ON_ONE_FETCH) { + requestBuilder.setSize(hintLimit); + SearchResponse searchResponse = requestBuilder.get(); + updateMetaSearchResults(searchResponse); + return Arrays.asList(searchResponse.getHits().getHits()); } + return scrollTillLimit(tableInJoinRequest, hintLimit); + } - private List scrollTillLimit(TableInJoinRequestBuilder tableInJoinRequest, Integer hintLimit) { - SearchResponse scrollResp = scrollOneTimeWithMax(client, tableInJoinRequest); + private List scrollTillLimit( + TableInJoinRequestBuilder tableInJoinRequest, Integer hintLimit) { + SearchResponse scrollResp = scrollOneTimeWithMax(client, tableInJoinRequest); - updateMetaSearchResults(scrollResp); - List hitsWithScan = new ArrayList<>(); - int curentNumOfResults = 0; - SearchHit[] hits = scrollResp.getHits().getHits(); + updateMetaSearchResults(scrollResp); + List hitsWithScan = new ArrayList<>(); + int curentNumOfResults = 0; + SearchHit[] hits = scrollResp.getHits().getHits(); - if (hintLimit == null) { - hintLimit = MAX_RESULTS_FOR_FIRST_TABLE; - } - - while (hits.length != 0 && curentNumOfResults < hintLimit) { - curentNumOfResults += hits.length; - Collections.addAll(hitsWithScan, hits); - if (curentNumOfResults >= MAX_RESULTS_FOR_FIRST_TABLE) { - //todo: log or exception? - System.out.println("too many results for first table, stoping at:" + curentNumOfResults); - break; - } - scrollResp = client.prepareSearchScroll(scrollResp.getScrollId()).setScroll(new TimeValue(600000)) - .execute().actionGet(); - hits = scrollResp.getHits().getHits(); - } - return hitsWithScan; + if (hintLimit == null) { + hintLimit = MAX_RESULTS_FOR_FIRST_TABLE; } - private boolean needToOptimize(Map>> optimizationTermsFilterStructure) { - if (!useQueryTermsFilterOptimization && optimizationTermsFilterStructure != null - && optimizationTermsFilterStructure.size() > 0) { - return false; - } - boolean allEmpty = true; - for (Map> optimization : optimizationTermsFilterStructure.values()) { - if (optimization.size() > 0) { - allEmpty = false; - break; - } - } - return !allEmpty; + while (hits.length != 0 && curentNumOfResults < hintLimit) { + curentNumOfResults += hits.length; + Collections.addAll(hitsWithScan, hits); + if (curentNumOfResults >= MAX_RESULTS_FOR_FIRST_TABLE) { + // todo: log or exception? + System.out.println("too many results for first table, stoping at:" + curentNumOfResults); + break; + } + scrollResp = + client + .prepareSearchScroll(scrollResp.getScrollId()) + .setScroll(new TimeValue(600000)) + .execute() + .actionGet(); + hits = scrollResp.getHits().getHits(); } - - private void updateRequestWithTermsFilter(Map>> optimizationTermsFilterStructure, - TableInJoinRequestBuilder secondTableRequest) throws SqlParseException { - Select select = secondTableRequest.getOriginalSelect(); - - BoolQueryBuilder orQuery = QueryBuilders.boolQuery(); - for (Map> optimization : optimizationTermsFilterStructure.values()) { - BoolQueryBuilder andQuery = QueryBuilders.boolQuery(); - for (Map.Entry> keyToValues : optimization.entrySet()) { - String fieldName = keyToValues.getKey(); - List values = keyToValues.getValue(); - andQuery.must(QueryBuilders.termsQuery(fieldName, values)); - } - orQuery.should(andQuery); - } - - Where where = select.getWhere(); - - BoolQueryBuilder boolQuery; - if (where != null) { - boolQuery = QueryMaker.explain(where, false); - boolQuery.must(orQuery); - } else { - boolQuery = orQuery; - } - secondTableRequest.getRequestBuilder().setQuery(boolQuery); + return hitsWithScan; + } + + private boolean needToOptimize( + Map>> optimizationTermsFilterStructure) { + if (!useQueryTermsFilterOptimization + && optimizationTermsFilterStructure != null + && optimizationTermsFilterStructure.size() > 0) { + return false; + } + boolean allEmpty = true; + for (Map> optimization : optimizationTermsFilterStructure.values()) { + if (optimization.size() > 0) { + allEmpty = false; + break; + } + } + return !allEmpty; + } + + private void updateRequestWithTermsFilter( + Map>> optimizationTermsFilterStructure, + TableInJoinRequestBuilder secondTableRequest) + throws SqlParseException { + Select select = secondTableRequest.getOriginalSelect(); + + BoolQueryBuilder orQuery = QueryBuilders.boolQuery(); + for (Map> optimization : optimizationTermsFilterStructure.values()) { + BoolQueryBuilder andQuery = QueryBuilders.boolQuery(); + for (Map.Entry> keyToValues : optimization.entrySet()) { + String fieldName = keyToValues.getKey(); + List values = keyToValues.getValue(); + andQuery.must(QueryBuilders.termsQuery(fieldName, values)); + } + orQuery.should(andQuery); } - private String getComparisonKey(List> t1ToT2FieldsComparison, SearchHit hit, - boolean firstTable, Map> optimizationTermsFilterStructure) { - String key = ""; - Map sourceAsMap = hit.getSourceAsMap(); - for (Map.Entry t1ToT2 : t1ToT2FieldsComparison) { - //todo: change to our function find if key contains '.' - String name; - if (firstTable) { - name = t1ToT2.getKey().getName(); - } else { - name = t1ToT2.getValue().getName(); - } + Where where = select.getWhere(); - Object data = deepSearchInMap(sourceAsMap, name); - if (firstTable && useQueryTermsFilterOptimization) { - updateOptimizationData(optimizationTermsFilterStructure, data, t1ToT2.getValue().getName()); - } - if (data == null) { - key += "|null|"; - } else { - key += "|" + data.toString() + "|"; - } - } - return key; + BoolQueryBuilder boolQuery; + if (where != null) { + boolQuery = QueryMaker.explain(where, false); + boolQuery.must(orQuery); + } else { + boolQuery = orQuery; } - - private void updateOptimizationData(Map> optimizationTermsFilterStructure, - Object data, String queryOptimizationKey) { - List values = optimizationTermsFilterStructure.get(queryOptimizationKey); - if (values == null) { - values = new ArrayList<>(); - optimizationTermsFilterStructure.put(queryOptimizationKey, values); - } - if (data instanceof String) { - //todo: analyzed or not analyzed check.. - data = ((String) data).toLowerCase(); - } - if (data != null) { - values.add(data); - } + secondTableRequest.getRequestBuilder().setQuery(boolQuery); + } + + private String getComparisonKey( + List> t1ToT2FieldsComparison, + SearchHit hit, + boolean firstTable, + Map> optimizationTermsFilterStructure) { + String key = ""; + Map sourceAsMap = hit.getSourceAsMap(); + for (Map.Entry t1ToT2 : t1ToT2FieldsComparison) { + // todo: change to our function find if key contains '.' + String name; + if (firstTable) { + name = t1ToT2.getKey().getName(); + } else { + name = t1ToT2.getValue().getName(); + } + + Object data = deepSearchInMap(sourceAsMap, name); + if (firstTable && useQueryTermsFilterOptimization) { + updateOptimizationData(optimizationTermsFilterStructure, data, t1ToT2.getValue().getName()); + } + if (data == null) { + key += "|null|"; + } else { + key += "|" + data.toString() + "|"; + } + } + return key; + } + + private void updateOptimizationData( + Map> optimizationTermsFilterStructure, + Object data, + String queryOptimizationKey) { + List values = optimizationTermsFilterStructure.get(queryOptimizationKey); + if (values == null) { + values = new ArrayList<>(); + optimizationTermsFilterStructure.put(queryOptimizationKey, values); + } + if (data instanceof String) { + // todo: analyzed or not analyzed check.. + data = ((String) data).toLowerCase(); + } + if (data != null) { + values.add(data); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/MetaSearchResult.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/MetaSearchResult.java index abdcf05751..a4174b7247 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/MetaSearchResult.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/MetaSearchResult.java @@ -3,64 +3,60 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.join; -/** - * Created by Eliran on 4/9/2015. - */ +/** Created by Eliran on 4/9/2015. */ public class MetaSearchResult { - private long tookImMilli; - private int totalNumOfShards; - private int successfulShards; - private int failedShards; - private boolean isTimedOut; - - public MetaSearchResult() { - totalNumOfShards = 0; - failedShards = 0; - successfulShards = 0; - isTimedOut = false; - } - - public int getTotalNumOfShards() { - return totalNumOfShards; - } - - public int getSuccessfulShards() { - return successfulShards; - } - - public int getFailedShards() { - return failedShards; - } - - public boolean isTimedOut() { - return isTimedOut; - } - - public long getTookImMilli() { - return tookImMilli; - } - - public void setTookImMilli(long tookImMilli) { - this.tookImMilli = tookImMilli; - } - - public void addFailedShards(int shards) { - this.failedShards += shards; - } - - public void addSuccessfulShards(int shards) { - this.successfulShards += shards; - } - - public void addTotalNumOfShards(int shards) { - this.totalNumOfShards += shards; - } - - public void updateTimeOut(boolean isTimedOut) { - this.isTimedOut = this.isTimedOut || isTimedOut; - } - + private long tookImMilli; + private int totalNumOfShards; + private int successfulShards; + private int failedShards; + private boolean isTimedOut; + + public MetaSearchResult() { + totalNumOfShards = 0; + failedShards = 0; + successfulShards = 0; + isTimedOut = false; + } + + public int getTotalNumOfShards() { + return totalNumOfShards; + } + + public int getSuccessfulShards() { + return successfulShards; + } + + public int getFailedShards() { + return failedShards; + } + + public boolean isTimedOut() { + return isTimedOut; + } + + public long getTookImMilli() { + return tookImMilli; + } + + public void setTookImMilli(long tookImMilli) { + this.tookImMilli = tookImMilli; + } + + public void addFailedShards(int shards) { + this.failedShards += shards; + } + + public void addSuccessfulShards(int shards) { + this.successfulShards += shards; + } + + public void addTotalNumOfShards(int shards) { + this.totalNumOfShards += shards; + } + + public void updateTimeOut(boolean isTimedOut) { + this.isTimedOut = this.isTimedOut || isTimedOut; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/NestedLoopsElasticExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/NestedLoopsElasticExecutor.java index 21a9a6054f..56c5f96af5 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/NestedLoopsElasticExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/NestedLoopsElasticExecutor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.join; import com.alibaba.druid.sql.ast.statement.SQLJoinTableSource; @@ -34,301 +33,354 @@ import org.opensearch.sql.legacy.query.join.TableInJoinRequestBuilder; import org.opensearch.sql.legacy.query.maker.Maker; -/** - * Created by Eliran on 15/9/2015. - */ +/** Created by Eliran on 15/9/2015. */ public class NestedLoopsElasticExecutor extends ElasticJoinExecutor { - private static final Logger LOG = LogManager.getLogger(); + private static final Logger LOG = LogManager.getLogger(); - private final NestedLoopsElasticRequestBuilder nestedLoopsRequest; - private final Client client; + private final NestedLoopsElasticRequestBuilder nestedLoopsRequest; + private final Client client; - public NestedLoopsElasticExecutor(Client client, NestedLoopsElasticRequestBuilder nestedLoops) { - super(nestedLoops); - this.client = client; - this.nestedLoopsRequest = nestedLoops; - } + public NestedLoopsElasticExecutor(Client client, NestedLoopsElasticRequestBuilder nestedLoops) { + super(nestedLoops); + this.client = client; + this.nestedLoopsRequest = nestedLoops; + } - @Override - protected List innerRun() throws SqlParseException { - List combinedResults = new ArrayList<>(); - int totalLimit = nestedLoopsRequest.getTotalLimit(); - int multiSearchMaxSize = nestedLoopsRequest.getMultiSearchMaxSize(); - Select secondTableSelect = nestedLoopsRequest.getSecondTable().getOriginalSelect(); - Where originalSecondTableWhere = secondTableSelect.getWhere(); + @Override + protected List innerRun() throws SqlParseException { + List combinedResults = new ArrayList<>(); + int totalLimit = nestedLoopsRequest.getTotalLimit(); + int multiSearchMaxSize = nestedLoopsRequest.getMultiSearchMaxSize(); + Select secondTableSelect = nestedLoopsRequest.getSecondTable().getOriginalSelect(); + Where originalSecondTableWhere = secondTableSelect.getWhere(); - orderConditions(nestedLoopsRequest.getFirstTable().getAlias(), nestedLoopsRequest.getSecondTable().getAlias()); + orderConditions( + nestedLoopsRequest.getFirstTable().getAlias(), + nestedLoopsRequest.getSecondTable().getAlias()); + if (!BackOffRetryStrategy.isHealthy()) { + throw new IllegalStateException("Memory circuit is broken"); + } + FetchWithScrollResponse fetchWithScrollResponse = + firstFetch(this.nestedLoopsRequest.getFirstTable()); + SearchResponse firstTableResponse = fetchWithScrollResponse.getResponse(); + boolean needScrollForFirstTable = fetchWithScrollResponse.isNeedScrollForFirstTable(); + + int currentCombinedResults = 0; + boolean finishedWithFirstTable = false; + + while (totalLimit > currentCombinedResults && !finishedWithFirstTable) { + + SearchHit[] hits = firstTableResponse.getHits().getHits(); + boolean finishedMultiSearches = hits.length == 0; + int currentHitsIndex = 0; + + while (!finishedMultiSearches) { + MultiSearchRequest multiSearchRequest = + createMultiSearchRequest( + multiSearchMaxSize, + nestedLoopsRequest.getConnectedWhere(), + hits, + secondTableSelect, + originalSecondTableWhere, + currentHitsIndex); + int multiSearchSize = multiSearchRequest.requests().size(); if (!BackOffRetryStrategy.isHealthy()) { - throw new IllegalStateException("Memory circuit is broken"); + throw new IllegalStateException("Memory circuit is broken"); } - FetchWithScrollResponse fetchWithScrollResponse = firstFetch(this.nestedLoopsRequest.getFirstTable()); - SearchResponse firstTableResponse = fetchWithScrollResponse.getResponse(); - boolean needScrollForFirstTable = fetchWithScrollResponse.isNeedScrollForFirstTable(); - - int currentCombinedResults = 0; - boolean finishedWithFirstTable = false; - - while (totalLimit > currentCombinedResults && !finishedWithFirstTable) { - - SearchHit[] hits = firstTableResponse.getHits().getHits(); - boolean finishedMultiSearches = hits.length == 0; - int currentHitsIndex = 0; - - while (!finishedMultiSearches) { - MultiSearchRequest multiSearchRequest = createMultiSearchRequest(multiSearchMaxSize, - nestedLoopsRequest.getConnectedWhere(), hits, secondTableSelect, - originalSecondTableWhere, currentHitsIndex); - int multiSearchSize = multiSearchRequest.requests().size(); - if (!BackOffRetryStrategy.isHealthy()) { - throw new IllegalStateException("Memory circuit is broken"); - } - currentCombinedResults = combineResultsFromMultiResponses(combinedResults, totalLimit, - currentCombinedResults, hits, currentHitsIndex, multiSearchRequest); - currentHitsIndex += multiSearchSize; - finishedMultiSearches = currentHitsIndex >= hits.length - 1 || currentCombinedResults >= totalLimit; - } - - if (hits.length < MAX_RESULTS_ON_ONE_FETCH) { - needScrollForFirstTable = false; - } - - if (!finishedWithFirstTable) { - if (needScrollForFirstTable) { - if (!BackOffRetryStrategy.isHealthy()) { - throw new IllegalStateException("Memory circuit is broken"); - } - firstTableResponse = client.prepareSearchScroll(firstTableResponse.getScrollId()) - .setScroll(new TimeValue(600000)).get(); - } else { - finishedWithFirstTable = true; - } - } - + currentCombinedResults = + combineResultsFromMultiResponses( + combinedResults, + totalLimit, + currentCombinedResults, + hits, + currentHitsIndex, + multiSearchRequest); + currentHitsIndex += multiSearchSize; + finishedMultiSearches = + currentHitsIndex >= hits.length - 1 || currentCombinedResults >= totalLimit; + } + + if (hits.length < MAX_RESULTS_ON_ONE_FETCH) { + needScrollForFirstTable = false; + } + + if (!finishedWithFirstTable) { + if (needScrollForFirstTable) { + if (!BackOffRetryStrategy.isHealthy()) { + throw new IllegalStateException("Memory circuit is broken"); + } + firstTableResponse = + client + .prepareSearchScroll(firstTableResponse.getScrollId()) + .setScroll(new TimeValue(600000)) + .get(); + } else { + finishedWithFirstTable = true; } - return combinedResults; + } } - - private int combineResultsFromMultiResponses(List combinedResults, int totalLimit, - int currentCombinedResults, SearchHit[] hits, int currentIndex, - MultiSearchRequest multiSearchRequest) { - MultiSearchResponse.Item[] responses = new OpenSearchClient(client).multiSearch(multiSearchRequest); - String t1Alias = nestedLoopsRequest.getFirstTable().getAlias(); - String t2Alias = nestedLoopsRequest.getSecondTable().getAlias(); - - for (int j = 0; j < responses.length && currentCombinedResults < totalLimit; j++) { - SearchHit hitFromFirstTable = hits[currentIndex + j]; - onlyReturnedFields(hitFromFirstTable.getSourceAsMap(), - nestedLoopsRequest.getFirstTable().getReturnedFields(), - nestedLoopsRequest.getFirstTable().getOriginalSelect().isSelectAll()); - - SearchResponse multiItemResponse = responses[j].getResponse(); - - if (multiItemResponse == null) { - continue; - } - - updateMetaSearchResults(multiItemResponse); - - //todo: if responseForHit.getHits.length < responseForHit.getTotalHits(). need to fetch more! - SearchHits responseForHit = multiItemResponse.getHits(); - - if (responseForHit.getHits().length == 0 && nestedLoopsRequest.getJoinType() - == SQLJoinTableSource.JoinType.LEFT_OUTER_JOIN) { - SearchHit unmachedResult = createUnmachedResult(nestedLoopsRequest.getSecondTable().getReturnedFields(), - currentCombinedResults, t1Alias, t2Alias, hitFromFirstTable); - combinedResults.add(unmachedResult); - currentCombinedResults++; - continue; - } - - for (SearchHit matchedHit : responseForHit.getHits()) { - SearchHit searchHit = getMergedHit(currentCombinedResults, t1Alias, t2Alias, hitFromFirstTable, - matchedHit); - combinedResults.add(searchHit); - currentCombinedResults++; - if (currentCombinedResults >= totalLimit) { - break; - } - } - if (currentCombinedResults >= totalLimit) { - break; - } - + return combinedResults; + } + + private int combineResultsFromMultiResponses( + List combinedResults, + int totalLimit, + int currentCombinedResults, + SearchHit[] hits, + int currentIndex, + MultiSearchRequest multiSearchRequest) { + MultiSearchResponse.Item[] responses = + new OpenSearchClient(client).multiSearch(multiSearchRequest); + String t1Alias = nestedLoopsRequest.getFirstTable().getAlias(); + String t2Alias = nestedLoopsRequest.getSecondTable().getAlias(); + + for (int j = 0; j < responses.length && currentCombinedResults < totalLimit; j++) { + SearchHit hitFromFirstTable = hits[currentIndex + j]; + onlyReturnedFields( + hitFromFirstTable.getSourceAsMap(), + nestedLoopsRequest.getFirstTable().getReturnedFields(), + nestedLoopsRequest.getFirstTable().getOriginalSelect().isSelectAll()); + + SearchResponse multiItemResponse = responses[j].getResponse(); + + if (multiItemResponse == null) { + continue; + } + + updateMetaSearchResults(multiItemResponse); + + // todo: if responseForHit.getHits.length < responseForHit.getTotalHits(). need to fetch more! + SearchHits responseForHit = multiItemResponse.getHits(); + + if (responseForHit.getHits().length == 0 + && nestedLoopsRequest.getJoinType() == SQLJoinTableSource.JoinType.LEFT_OUTER_JOIN) { + SearchHit unmachedResult = + createUnmachedResult( + nestedLoopsRequest.getSecondTable().getReturnedFields(), + currentCombinedResults, + t1Alias, + t2Alias, + hitFromFirstTable); + combinedResults.add(unmachedResult); + currentCombinedResults++; + continue; + } + + for (SearchHit matchedHit : responseForHit.getHits()) { + SearchHit searchHit = + getMergedHit(currentCombinedResults, t1Alias, t2Alias, hitFromFirstTable, matchedHit); + combinedResults.add(searchHit); + currentCombinedResults++; + if (currentCombinedResults >= totalLimit) { + break; } - return currentCombinedResults; - } - - private SearchHit getMergedHit(int currentCombinedResults, String t1Alias, String t2Alias, - SearchHit hitFromFirstTable, SearchHit matchedHit) { - onlyReturnedFields(matchedHit.getSourceAsMap(), nestedLoopsRequest.getSecondTable().getReturnedFields(), - nestedLoopsRequest.getSecondTable().getOriginalSelect().isSelectAll()); - Map documentFields = new HashMap<>(); - Map metaFields = new HashMap<>(); - matchedHit.getFields().forEach((fieldName, docField) -> - (MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) ? metaFields : documentFields).put(fieldName, docField)); - SearchHit searchHit = new SearchHit(currentCombinedResults, hitFromFirstTable.getId() + "|" - + matchedHit.getId(), documentFields, metaFields); - searchHit.sourceRef(hitFromFirstTable.getSourceRef()); - searchHit.getSourceAsMap().clear(); - searchHit.getSourceAsMap().putAll(hitFromFirstTable.getSourceAsMap()); - - mergeSourceAndAddAliases(matchedHit.getSourceAsMap(), searchHit, t1Alias, t2Alias); - return searchHit; + } + if (currentCombinedResults >= totalLimit) { + break; + } } - - private MultiSearchRequest createMultiSearchRequest(int multiSearchMaxSize, Where connectedWhere, SearchHit[] hits, - Select secondTableSelect, Where originalWhere, int currentIndex) - throws SqlParseException { - MultiSearchRequest multiSearchRequest = new MultiSearchRequest(); - for (int i = currentIndex; i < currentIndex + multiSearchMaxSize && i < hits.length; i++) { - Map hitFromFirstTableAsMap = hits[i].getSourceAsMap(); - Where newWhere = Where.newInstance(); - if (originalWhere != null) { - newWhere.addWhere(originalWhere); - } - if (connectedWhere != null) { - Where connectedWhereCloned = null; - try { - connectedWhereCloned = (Where) connectedWhere.clone(); - } catch (CloneNotSupportedException e) { - e.printStackTrace(); - } - updateValuesOnWhereConditions(hitFromFirstTableAsMap, connectedWhereCloned); - newWhere.addWhere(connectedWhereCloned); - } - - -// for(Condition c : conditions){ -// Object value = deepSearchInMap(hitFromFirstTableAsMap,c.getValue().toString()); -// Condition conditionWithValue = new Condition(Where.CONN.AND,c.getName(),c.getOpear(),value); -// newWhere.addWhere(conditionWithValue); -// } - //using the 2nd table select and DefaultAction because we can't just change query on request - // (need to create lot of requests) - if (newWhere.getWheres().size() != 0) { - secondTableSelect.setWhere(newWhere); - } - DefaultQueryAction action = new DefaultQueryAction(this.client, secondTableSelect); - action.explain(); - SearchRequestBuilder secondTableRequest = action.getRequestBuilder(); - Integer secondTableHintLimit = this.nestedLoopsRequest.getSecondTable().getHintLimit(); - if (secondTableHintLimit != null && secondTableHintLimit <= MAX_RESULTS_ON_ONE_FETCH) { - secondTableRequest.setSize(secondTableHintLimit); - } - multiSearchRequest.add(secondTableRequest); + return currentCombinedResults; + } + + private SearchHit getMergedHit( + int currentCombinedResults, + String t1Alias, + String t2Alias, + SearchHit hitFromFirstTable, + SearchHit matchedHit) { + onlyReturnedFields( + matchedHit.getSourceAsMap(), + nestedLoopsRequest.getSecondTable().getReturnedFields(), + nestedLoopsRequest.getSecondTable().getOriginalSelect().isSelectAll()); + Map documentFields = new HashMap<>(); + Map metaFields = new HashMap<>(); + matchedHit + .getFields() + .forEach( + (fieldName, docField) -> + (MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) + ? metaFields + : documentFields) + .put(fieldName, docField)); + SearchHit searchHit = + new SearchHit( + currentCombinedResults, + hitFromFirstTable.getId() + "|" + matchedHit.getId(), + documentFields, + metaFields); + searchHit.sourceRef(hitFromFirstTable.getSourceRef()); + searchHit.getSourceAsMap().clear(); + searchHit.getSourceAsMap().putAll(hitFromFirstTable.getSourceAsMap()); + + mergeSourceAndAddAliases(matchedHit.getSourceAsMap(), searchHit, t1Alias, t2Alias); + return searchHit; + } + + private MultiSearchRequest createMultiSearchRequest( + int multiSearchMaxSize, + Where connectedWhere, + SearchHit[] hits, + Select secondTableSelect, + Where originalWhere, + int currentIndex) + throws SqlParseException { + MultiSearchRequest multiSearchRequest = new MultiSearchRequest(); + for (int i = currentIndex; i < currentIndex + multiSearchMaxSize && i < hits.length; i++) { + Map hitFromFirstTableAsMap = hits[i].getSourceAsMap(); + Where newWhere = Where.newInstance(); + if (originalWhere != null) { + newWhere.addWhere(originalWhere); + } + if (connectedWhere != null) { + Where connectedWhereCloned = null; + try { + connectedWhereCloned = (Where) connectedWhere.clone(); + } catch (CloneNotSupportedException e) { + e.printStackTrace(); } - return multiSearchRequest; + updateValuesOnWhereConditions(hitFromFirstTableAsMap, connectedWhereCloned); + newWhere.addWhere(connectedWhereCloned); + } + + // for(Condition c : conditions){ + // Object value = + // deepSearchInMap(hitFromFirstTableAsMap,c.getValue().toString()); + // Condition conditionWithValue = new + // Condition(Where.CONN.AND,c.getName(),c.getOpear(),value); + // newWhere.addWhere(conditionWithValue); + // } + // using the 2nd table select and DefaultAction because we can't just change query on request + // (need to create lot of requests) + if (newWhere.getWheres().size() != 0) { + secondTableSelect.setWhere(newWhere); + } + DefaultQueryAction action = new DefaultQueryAction(this.client, secondTableSelect); + action.explain(); + SearchRequestBuilder secondTableRequest = action.getRequestBuilder(); + Integer secondTableHintLimit = this.nestedLoopsRequest.getSecondTable().getHintLimit(); + if (secondTableHintLimit != null && secondTableHintLimit <= MAX_RESULTS_ON_ONE_FETCH) { + secondTableRequest.setSize(secondTableHintLimit); + } + multiSearchRequest.add(secondTableRequest); } - - private void updateValuesOnWhereConditions(Map hit, Where where) { - if (where instanceof Condition) { - Condition c = (Condition) where; - Object value = deepSearchInMap(hit, c.getValue().toString()); - if (value == null) { - value = Maker.NONE; - } - c.setValue(value); - } - for (Where innerWhere : where.getWheres()) { - updateValuesOnWhereConditions(hit, innerWhere); - } + return multiSearchRequest; + } + + private void updateValuesOnWhereConditions(Map hit, Where where) { + if (where instanceof Condition) { + Condition c = (Condition) where; + Object value = deepSearchInMap(hit, c.getValue().toString()); + if (value == null) { + value = Maker.NONE; + } + c.setValue(value); } - - private FetchWithScrollResponse firstFetch(TableInJoinRequestBuilder tableRequest) { - Integer hintLimit = tableRequest.getHintLimit(); - boolean needScrollForFirstTable = false; - SearchResponse responseWithHits; - if (hintLimit != null && hintLimit < MAX_RESULTS_ON_ONE_FETCH) { - - responseWithHits = tableRequest.getRequestBuilder().setSize(hintLimit).get(); - needScrollForFirstTable = false; - } else { - //scroll request with max. - responseWithHits = scrollOneTimeWithMax(client, tableRequest); - if (responseWithHits.getHits().getTotalHits() != null - && responseWithHits.getHits().getTotalHits().value < MAX_RESULTS_ON_ONE_FETCH) { - needScrollForFirstTable = true; - } - } - - updateMetaSearchResults(responseWithHits); - return new FetchWithScrollResponse(responseWithHits, needScrollForFirstTable); + for (Where innerWhere : where.getWheres()) { + updateValuesOnWhereConditions(hit, innerWhere); } - - - private void orderConditions(String t1Alias, String t2Alias) { - orderConditionRecursive(t1Alias, t2Alias, nestedLoopsRequest.getConnectedWhere()); -// Collection conditions = nestedLoopsRequest.getT1FieldToCondition().values(); -// for(Condition c : conditions){ -// //TODO: support all orders and for each OPEAR find his related OPEAR (< is > , EQ is EQ ,etc..) -// if(!c.getName().startsWith(t2Alias+".") || !c.getValue().toString().startsWith(t1Alias +".")) -// throw new RuntimeException("On NestedLoops currently only supported Ordered conditions -// t2.field2 OPEAR t1.field1) , badCondition was:" + c); -// c.setName(c.getName().replaceFirst(t2Alias+".","")); -// c.setValue(c.getValue().toString().replaceFirst(t1Alias+ ".", "")); -// } + } + + private FetchWithScrollResponse firstFetch(TableInJoinRequestBuilder tableRequest) { + Integer hintLimit = tableRequest.getHintLimit(); + boolean needScrollForFirstTable = false; + SearchResponse responseWithHits; + if (hintLimit != null && hintLimit < MAX_RESULTS_ON_ONE_FETCH) { + + responseWithHits = tableRequest.getRequestBuilder().setSize(hintLimit).get(); + needScrollForFirstTable = false; + } else { + // scroll request with max. + responseWithHits = scrollOneTimeWithMax(client, tableRequest); + if (responseWithHits.getHits().getTotalHits() != null + && responseWithHits.getHits().getTotalHits().value < MAX_RESULTS_ON_ONE_FETCH) { + needScrollForFirstTable = true; + } } - private void orderConditionRecursive(String t1Alias, String t2Alias, Where where) { - if (where == null) { - return; - } - if (where instanceof Condition) { - Condition c = (Condition) where; - if (shouldReverse(c, t1Alias, t2Alias)) { - try { - reverseOrderOfCondition(c, t1Alias, t2Alias); - return; - } catch (SqlParseException e) { - //Do nothing here to continue using original logic below. - //The condition is not changed here. - } - } - if (!c.getName().startsWith(t2Alias + ".") || !c.getValue().toString().startsWith(t1Alias + ".")) { - throw new RuntimeException("On NestedLoops currently only supported Ordered conditions " - + "(t2.field2 OPEAR t1.field1) , badCondition was:" + c); - } - c.setName(c.getName().replaceFirst(t2Alias + ".", "")); - c.setValue(c.getValue().toString().replaceFirst(t1Alias + ".", "")); - return; - } else { - for (Where innerWhere : where.getWheres()) { - orderConditionRecursive(t1Alias, t2Alias, innerWhere); - } + updateMetaSearchResults(responseWithHits); + return new FetchWithScrollResponse(responseWithHits, needScrollForFirstTable); + } + + private void orderConditions(String t1Alias, String t2Alias) { + orderConditionRecursive(t1Alias, t2Alias, nestedLoopsRequest.getConnectedWhere()); + // Collection conditions = + // nestedLoopsRequest.getT1FieldToCondition().values(); + // for(Condition c : conditions){ + // //TODO: support all orders and for each OPEAR find his related OPEAR (< is > , EQ + // is EQ ,etc..) + // if(!c.getName().startsWith(t2Alias+".") || + // !c.getValue().toString().startsWith(t1Alias +".")) + // throw new RuntimeException("On NestedLoops currently only supported Ordered + // conditions + // t2.field2 OPEAR t1.field1) , badCondition was:" + c); + // c.setName(c.getName().replaceFirst(t2Alias+".","")); + // c.setValue(c.getValue().toString().replaceFirst(t1Alias+ ".", "")); + // } + } + + private void orderConditionRecursive(String t1Alias, String t2Alias, Where where) { + if (where == null) { + return; + } + if (where instanceof Condition) { + Condition c = (Condition) where; + if (shouldReverse(c, t1Alias, t2Alias)) { + try { + reverseOrderOfCondition(c, t1Alias, t2Alias); + return; + } catch (SqlParseException e) { + // Do nothing here to continue using original logic below. + // The condition is not changed here. } + } + if (!c.getName().startsWith(t2Alias + ".") + || !c.getValue().toString().startsWith(t1Alias + ".")) { + throw new RuntimeException( + "On NestedLoops currently only supported Ordered conditions " + + "(t2.field2 OPEAR t1.field1) , badCondition was:" + + c); + } + c.setName(c.getName().replaceFirst(t2Alias + ".", "")); + c.setValue(c.getValue().toString().replaceFirst(t1Alias + ".", "")); + return; + } else { + for (Where innerWhere : where.getWheres()) { + orderConditionRecursive(t1Alias, t2Alias, innerWhere); + } } - - private Boolean shouldReverse(Condition cond, String t1Alias, String t2Alias) { - return cond.getName().startsWith(t1Alias + ".") && cond.getValue().toString().startsWith(t2Alias + ".") - && cond.getOPERATOR().isSimpleOperator(); + } + + private Boolean shouldReverse(Condition cond, String t1Alias, String t2Alias) { + return cond.getName().startsWith(t1Alias + ".") + && cond.getValue().toString().startsWith(t2Alias + ".") + && cond.getOPERATOR().isSimpleOperator(); + } + + private void reverseOrderOfCondition(Condition cond, String t1Alias, String t2Alias) + throws SqlParseException { + cond.setOPERATOR(cond.getOPERATOR().simpleReverse()); + String name = cond.getName(); + cond.setName(cond.getValue().toString().replaceFirst(t2Alias + ".", "")); + cond.setValue(name.replaceFirst(t1Alias + ".", "")); + } + + private class FetchWithScrollResponse { + private SearchResponse response; + private boolean needScrollForFirstTable; + + private FetchWithScrollResponse(SearchResponse response, boolean needScrollForFirstTable) { + this.response = response; + this.needScrollForFirstTable = needScrollForFirstTable; } - private void reverseOrderOfCondition(Condition cond, String t1Alias, String t2Alias) throws SqlParseException { - cond.setOPERATOR(cond.getOPERATOR().simpleReverse()); - String name = cond.getName(); - cond.setName(cond.getValue().toString().replaceFirst(t2Alias + ".", "")); - cond.setValue(name.replaceFirst(t1Alias + ".", "")); + public SearchResponse getResponse() { + return response; } - - private class FetchWithScrollResponse { - private SearchResponse response; - private boolean needScrollForFirstTable; - - private FetchWithScrollResponse(SearchResponse response, boolean needScrollForFirstTable) { - this.response = response; - this.needScrollForFirstTable = needScrollForFirstTable; - } - - public SearchResponse getResponse() { - return response; - } - - public boolean isNeedScrollForFirstTable() { - return needScrollForFirstTable; - } - + public boolean isNeedScrollForFirstTable() { + return needScrollForFirstTable; } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/QueryPlanElasticExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/QueryPlanElasticExecutor.java index 5702d397d5..f4b2f5421d 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/QueryPlanElasticExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/QueryPlanElasticExecutor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.join; import java.util.List; @@ -12,31 +11,30 @@ import org.opensearch.sql.legacy.query.planner.core.QueryPlanner; /** - * Executor for generic QueryPlanner execution. This executor is just acting as adaptor to integrate with - * existing framework. In future, QueryPlanner should be executed by itself and leave the response sent back - * or other post-processing logic to ElasticDefaultRestExecutor. + * Executor for generic QueryPlanner execution. This executor is just acting as adaptor to integrate + * with existing framework. In future, QueryPlanner should be executed by itself and leave the + * response sent back or other post-processing logic to ElasticDefaultRestExecutor. */ class QueryPlanElasticExecutor extends ElasticJoinExecutor { - private final QueryPlanner queryPlanner; - - QueryPlanElasticExecutor(HashJoinQueryPlanRequestBuilder request) { - super(request); - this.queryPlanner = request.plan(); - } - - @Override - protected List innerRun() { - List result = queryPlanner.execute(); - populateMetaResult(); - return result; - } - - private void populateMetaResult() { - metaResults.addTotalNumOfShards(queryPlanner.getMetaResult().getTotalNumOfShards()); - metaResults.addSuccessfulShards(queryPlanner.getMetaResult().getSuccessfulShards()); - metaResults.addFailedShards(queryPlanner.getMetaResult().getFailedShards()); - metaResults.updateTimeOut(queryPlanner.getMetaResult().isTimedOut()); - } - + private final QueryPlanner queryPlanner; + + QueryPlanElasticExecutor(HashJoinQueryPlanRequestBuilder request) { + super(request); + this.queryPlanner = request.plan(); + } + + @Override + protected List innerRun() { + List result = queryPlanner.execute(); + populateMetaResult(); + return result; + } + + private void populateMetaResult() { + metaResults.addTotalNumOfShards(queryPlanner.getMetaResult().getTotalNumOfShards()); + metaResults.addSuccessfulShards(queryPlanner.getMetaResult().getSuccessfulShards()); + metaResults.addFailedShards(queryPlanner.getMetaResult().getFailedShards()); + metaResults.updateTimeOut(queryPlanner.getMetaResult().isTimedOut()); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/SearchHitsResult.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/SearchHitsResult.java index 0955de9b88..10a1555874 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/SearchHitsResult.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/join/SearchHitsResult.java @@ -3,42 +3,39 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.join; import java.util.ArrayList; import java.util.List; import org.opensearch.search.SearchHit; -/** - * Created by Eliran on 28/8/2015. - */ +/** Created by Eliran on 28/8/2015. */ public class SearchHitsResult { - private List searchHits; - private boolean matchedWithOtherTable; + private List searchHits; + private boolean matchedWithOtherTable; - public SearchHitsResult() { - searchHits = new ArrayList<>(); - } + public SearchHitsResult() { + searchHits = new ArrayList<>(); + } - public SearchHitsResult(List searchHits, boolean matchedWithOtherTable) { - this.searchHits = searchHits; - this.matchedWithOtherTable = matchedWithOtherTable; - } + public SearchHitsResult(List searchHits, boolean matchedWithOtherTable) { + this.searchHits = searchHits; + this.matchedWithOtherTable = matchedWithOtherTable; + } - public List getSearchHits() { - return searchHits; - } + public List getSearchHits() { + return searchHits; + } - public void setSearchHits(List searchHits) { - this.searchHits = searchHits; - } + public void setSearchHits(List searchHits) { + this.searchHits = searchHits; + } - public boolean isMatchedWithOtherTable() { - return matchedWithOtherTable; - } + public boolean isMatchedWithOtherTable() { + return matchedWithOtherTable; + } - public void setMatchedWithOtherTable(boolean matchedWithOtherTable) { - this.matchedWithOtherTable = matchedWithOtherTable; - } + public void setMatchedWithOtherTable(boolean matchedWithOtherTable) { + this.matchedWithOtherTable = matchedWithOtherTable; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/ComperableHitResult.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/ComperableHitResult.java index 766ecd3692..fa3514600b 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/ComperableHitResult.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/ComperableHitResult.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.multi; import com.google.common.base.Joiner; @@ -14,72 +13,70 @@ import org.opensearch.search.SearchHit; import org.opensearch.sql.legacy.utils.Util; -/** - * Created by Eliran on 9/9/2016. - */ +/** Created by Eliran on 9/9/2016. */ public class ComperableHitResult { - private SearchHit hit; - private String comperator; - private boolean isAllNull; - private Map flattenMap; + private SearchHit hit; + private String comperator; + private boolean isAllNull; + private Map flattenMap; - public ComperableHitResult(SearchHit hit, String[] fieldsOrder, String seperator) { - this.hit = hit; - Map hitAsMap = hit.getSourceAsMap(); - this.flattenMap = new HashMap<>(); - List results = new ArrayList<>(); - this.isAllNull = true; + public ComperableHitResult(SearchHit hit, String[] fieldsOrder, String seperator) { + this.hit = hit; + Map hitAsMap = hit.getSourceAsMap(); + this.flattenMap = new HashMap<>(); + List results = new ArrayList<>(); + this.isAllNull = true; - for (int i = 0; i < fieldsOrder.length; i++) { - String field = fieldsOrder[i]; - Object result = Util.deepSearchInMap(hitAsMap, field); - if (result == null) { - results.add(""); - } else { - this.isAllNull = false; - results.add(result.toString()); - this.flattenMap.put(field, result); - } - } - this.comperator = Joiner.on(seperator).join(results); + for (int i = 0; i < fieldsOrder.length; i++) { + String field = fieldsOrder[i]; + Object result = Util.deepSearchInMap(hitAsMap, field); + if (result == null) { + results.add(""); + } else { + this.isAllNull = false; + results.add(result.toString()); + this.flattenMap.put(field, result); + } } + this.comperator = Joiner.on(seperator).join(results); + } - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - ComperableHitResult that = (ComperableHitResult) o; + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } - if (!comperator.equals(that.comperator)) { - return false; - } + ComperableHitResult that = (ComperableHitResult) o; - return true; + if (!comperator.equals(that.comperator)) { + return false; } - public boolean isAllNull() { - return isAllNull; - } + return true; + } - @Override - public int hashCode() { - return comperator.hashCode(); - } + public boolean isAllNull() { + return isAllNull; + } - public String getComperator() { - return comperator; - } + @Override + public int hashCode() { + return comperator.hashCode(); + } - public Map getFlattenMap() { - return flattenMap; - } + public String getComperator() { + return comperator; + } - public SearchHit getOriginalHit() { - return hit; - } + public Map getFlattenMap() { + return flattenMap; + } + + public SearchHit getOriginalHit() { + return hit; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/MinusExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/MinusExecutor.java index 83901f1acb..03e16424e7 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/MinusExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/MinusExecutor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.multi; import java.util.ArrayList; @@ -36,409 +35,453 @@ import org.opensearch.sql.legacy.query.multi.MultiQueryRequestBuilder; import org.opensearch.sql.legacy.utils.Util; - -/** - * Created by Eliran on 26/8/2016. - */ +/** Created by Eliran on 26/8/2016. */ public class MinusExecutor implements ElasticHitsExecutor { - private Client client; - private MultiQueryRequestBuilder builder; - private SearchHits minusHits; - private boolean useTermsOptimization; - private boolean termsOptimizationWithToLower; - private boolean useScrolling; - private int maxDocsToFetchOnFirstTable; - private int maxDocsToFetchOnSecondTable; - private int maxDocsToFetchOnEachScrollShard; - private String[] fieldsOrderFirstTable; - private String[] fieldsOrderSecondTable; - private String seperator; - - public MinusExecutor(Client client, MultiQueryRequestBuilder builder) { - this.client = client; - this.builder = builder; - this.useTermsOptimization = false; - this.termsOptimizationWithToLower = false; - this.useScrolling = false; - parseHintsIfAny(builder.getOriginalSelect(true).getHints()); - fillFieldsOrder(); - seperator = UUID.randomUUID().toString(); + private Client client; + private MultiQueryRequestBuilder builder; + private SearchHits minusHits; + private boolean useTermsOptimization; + private boolean termsOptimizationWithToLower; + private boolean useScrolling; + private int maxDocsToFetchOnFirstTable; + private int maxDocsToFetchOnSecondTable; + private int maxDocsToFetchOnEachScrollShard; + private String[] fieldsOrderFirstTable; + private String[] fieldsOrderSecondTable; + private String seperator; + + public MinusExecutor(Client client, MultiQueryRequestBuilder builder) { + this.client = client; + this.builder = builder; + this.useTermsOptimization = false; + this.termsOptimizationWithToLower = false; + this.useScrolling = false; + parseHintsIfAny(builder.getOriginalSelect(true).getHints()); + fillFieldsOrder(); + seperator = UUID.randomUUID().toString(); + } + + @Override + public void run() throws SqlParseException { + if (this.useTermsOptimization && this.fieldsOrderFirstTable.length != 1) { + throw new SqlParseException( + "Terms optimization failed: terms optimization for minus execution is supported with one" + + " field"); } - - @Override - public void run() throws SqlParseException { - if (this.useTermsOptimization && this.fieldsOrderFirstTable.length != 1) { - throw new SqlParseException( - "Terms optimization failed: terms optimization for minus execution is supported with one field"); - } - if (this.useTermsOptimization && !this.useScrolling) { - throw new SqlParseException( - "Terms optimization failed: using scrolling is required for terms optimization"); - } - if (!this.useScrolling || !this.useTermsOptimization) { - Set comperableHitResults; - if (!this.useScrolling) { - //1. get results from first search , put in set - //2. get reults from second search - //2.1 for each result remove from set - comperableHitResults = simpleOneTimeQueryEach(); - } else { - //if scrolling - //1. get all results in scrolls (till some limit) . put on set - //2. scroll on second table - //3. on each scroll result remove items from set - comperableHitResults = runWithScrollings(); - } - fillMinusHitsFromResults(comperableHitResults); - return; - } else { - //if scrolling and optimization - // 0. save the original second table where , init set - // 1. on each scroll on first table , create miniSet - //1.1 build where from all results (terms filter) , and run query - //1.1.1 on each result remove from miniSet - //1.1.2 add all results left from miniset to bigset - Select firstSelect = this.builder.getOriginalSelect(true); - MinusOneFieldAndOptimizationResult optimizationResult = - runWithScrollingAndAddFilter(fieldsOrderFirstTable[0], fieldsOrderSecondTable[0]); - String fieldName = getFieldName(firstSelect.getFields().get(0)); - Set results = optimizationResult.getFieldValues(); - SearchHit someHit = optimizationResult.getSomeHit(); - fillMinusHitsFromOneField(fieldName, results, someHit); - + if (this.useTermsOptimization && !this.useScrolling) { + throw new SqlParseException( + "Terms optimization failed: using scrolling is required for terms optimization"); + } + if (!this.useScrolling || !this.useTermsOptimization) { + Set comperableHitResults; + if (!this.useScrolling) { + // 1. get results from first search , put in set + // 2. get reults from second search + // 2.1 for each result remove from set + comperableHitResults = simpleOneTimeQueryEach(); + } else { + // if scrolling + // 1. get all results in scrolls (till some limit) . put on set + // 2. scroll on second table + // 3. on each scroll result remove items from set + comperableHitResults = runWithScrollings(); + } + fillMinusHitsFromResults(comperableHitResults); + return; + } else { + // if scrolling and optimization + // 0. save the original second table where , init set + // 1. on each scroll on first table , create miniSet + // 1.1 build where from all results (terms filter) , and run query + // 1.1.1 on each result remove from miniSet + // 1.1.2 add all results left from miniset to bigset + Select firstSelect = this.builder.getOriginalSelect(true); + MinusOneFieldAndOptimizationResult optimizationResult = + runWithScrollingAndAddFilter(fieldsOrderFirstTable[0], fieldsOrderSecondTable[0]); + String fieldName = getFieldName(firstSelect.getFields().get(0)); + Set results = optimizationResult.getFieldValues(); + SearchHit someHit = optimizationResult.getSomeHit(); + fillMinusHitsFromOneField(fieldName, results, someHit); + } + } + + @Override + public SearchHits getHits() { + return this.minusHits; + } + + private void fillMinusHitsFromOneField( + String fieldName, Set fieldValues, SearchHit someHit) { + List minusHitsList = new ArrayList<>(); + int currentId = 1; + for (Object result : fieldValues) { + Map fields = new HashMap<>(); + ArrayList values = new ArrayList<>(); + values.add(result); + fields.put(fieldName, new DocumentField(fieldName, values)); + Map documentFields = new HashMap<>(); + Map metaFields = new HashMap<>(); + someHit + .getFields() + .forEach( + (field, docField) -> + (MapperService.META_FIELDS_BEFORE_7DOT8.contains(field) + ? metaFields + : documentFields) + .put(field, docField)); + SearchHit searchHit = new SearchHit(currentId, currentId + "", documentFields, metaFields); + searchHit.sourceRef(someHit.getSourceRef()); + searchHit.getSourceAsMap().clear(); + Map sourceAsMap = new HashMap<>(); + sourceAsMap.put(fieldName, result); + searchHit.getSourceAsMap().putAll(sourceAsMap); + currentId++; + minusHitsList.add(searchHit); + } + int totalSize = currentId - 1; + SearchHit[] unionHitsArr = minusHitsList.toArray(new SearchHit[totalSize]); + this.minusHits = + new SearchHits(unionHitsArr, new TotalHits(totalSize, Relation.EQUAL_TO), 1.0f); + } + + private void fillMinusHitsFromResults(Set comperableHitResults) { + int currentId = 1; + List minusHitsList = new ArrayList<>(); + for (ComperableHitResult result : comperableHitResults) { + ArrayList values = new ArrayList<>(); + values.add(result); + SearchHit originalHit = result.getOriginalHit(); + Map documentFields = new HashMap<>(); + Map metaFields = new HashMap<>(); + originalHit + .getFields() + .forEach( + (fieldName, docField) -> + (MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) + ? metaFields + : documentFields) + .put(fieldName, docField)); + SearchHit searchHit = + new SearchHit(currentId, originalHit.getId(), documentFields, metaFields); + searchHit.sourceRef(originalHit.getSourceRef()); + searchHit.getSourceAsMap().clear(); + Map sourceAsMap = result.getFlattenMap(); + for (Map.Entry entry : this.builder.getFirstTableFieldToAlias().entrySet()) { + if (sourceAsMap.containsKey(entry.getKey())) { + Object value = sourceAsMap.get(entry.getKey()); + sourceAsMap.remove(entry.getKey()); + sourceAsMap.put(entry.getValue(), value); } + } + searchHit.getSourceAsMap().putAll(sourceAsMap); + currentId++; + minusHitsList.add(searchHit); } - - - @Override - public SearchHits getHits() { - return this.minusHits; + int totalSize = currentId - 1; + SearchHit[] unionHitsArr = minusHitsList.toArray(new SearchHit[totalSize]); + this.minusHits = + new SearchHits(unionHitsArr, new TotalHits(totalSize, Relation.EQUAL_TO), 1.0f); + } + + private Set runWithScrollings() { + + SearchResponse scrollResp = + ElasticUtils.scrollOneTimeWithHits( + this.client, + this.builder.getFirstSearchRequest(), + builder.getOriginalSelect(true), + this.maxDocsToFetchOnEachScrollShard); + Set results = new HashSet<>(); + + SearchHit[] hits = scrollResp.getHits().getHits(); + if (hits == null || hits.length == 0) { + return new HashSet<>(); } - - private void fillMinusHitsFromOneField(String fieldName, Set fieldValues, SearchHit someHit) { - List minusHitsList = new ArrayList<>(); - int currentId = 1; - for (Object result : fieldValues) { - Map fields = new HashMap<>(); - ArrayList values = new ArrayList<>(); - values.add(result); - fields.put(fieldName, new DocumentField(fieldName, values)); - Map documentFields = new HashMap<>(); - Map metaFields = new HashMap<>(); - someHit.getFields().forEach((field, docField) -> - (MapperService.META_FIELDS_BEFORE_7DOT8.contains(field) ? metaFields : documentFields).put(field, docField)); - SearchHit searchHit = new SearchHit(currentId, currentId + "", - documentFields, metaFields); - searchHit.sourceRef(someHit.getSourceRef()); - searchHit.getSourceAsMap().clear(); - Map sourceAsMap = new HashMap<>(); - sourceAsMap.put(fieldName, result); - searchHit.getSourceAsMap().putAll(sourceAsMap); - currentId++; - minusHitsList.add(searchHit); - } - int totalSize = currentId - 1; - SearchHit[] unionHitsArr = minusHitsList.toArray(new SearchHit[totalSize]); - this.minusHits = new SearchHits(unionHitsArr, new TotalHits(totalSize, Relation.EQUAL_TO), 1.0f); + int totalDocsFetchedFromFirstTable = 0; + + // fetch from first table . fill set. + while (hits != null && hits.length != 0) { + totalDocsFetchedFromFirstTable += hits.length; + fillComperableSetFromHits(this.fieldsOrderFirstTable, hits, results); + if (totalDocsFetchedFromFirstTable > this.maxDocsToFetchOnFirstTable) { + break; + } + scrollResp = + client + .prepareSearchScroll(scrollResp.getScrollId()) + .setScroll(new TimeValue(600000)) + .execute() + .actionGet(); + hits = scrollResp.getHits().getHits(); } - - private void fillMinusHitsFromResults(Set comperableHitResults) { - int currentId = 1; - List minusHitsList = new ArrayList<>(); - for (ComperableHitResult result : comperableHitResults) { - ArrayList values = new ArrayList<>(); - values.add(result); - SearchHit originalHit = result.getOriginalHit(); - Map documentFields = new HashMap<>(); - Map metaFields = new HashMap<>(); - originalHit.getFields().forEach((fieldName, docField) -> - (MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) ? metaFields : documentFields).put(fieldName, docField)); - SearchHit searchHit = new SearchHit(currentId, originalHit.getId(), - documentFields, metaFields); - searchHit.sourceRef(originalHit.getSourceRef()); - searchHit.getSourceAsMap().clear(); - Map sourceAsMap = result.getFlattenMap(); - for (Map.Entry entry : this.builder.getFirstTableFieldToAlias().entrySet()) { - if (sourceAsMap.containsKey(entry.getKey())) { - Object value = sourceAsMap.get(entry.getKey()); - sourceAsMap.remove(entry.getKey()); - sourceAsMap.put(entry.getValue(), value); - } - } - - searchHit.getSourceAsMap().putAll(sourceAsMap); - currentId++; - minusHitsList.add(searchHit); - } - int totalSize = currentId - 1; - SearchHit[] unionHitsArr = minusHitsList.toArray(new SearchHit[totalSize]); - this.minusHits = new SearchHits(unionHitsArr, new TotalHits(totalSize, Relation.EQUAL_TO), 1.0f); + scrollResp = + ElasticUtils.scrollOneTimeWithHits( + this.client, + this.builder.getSecondSearchRequest(), + builder.getOriginalSelect(false), + this.maxDocsToFetchOnEachScrollShard); + + hits = scrollResp.getHits().getHits(); + if (hits == null || hits.length == 0) { + return results; } - - private Set runWithScrollings() { - - SearchResponse scrollResp = ElasticUtils.scrollOneTimeWithHits(this.client, - this.builder.getFirstSearchRequest(), - builder.getOriginalSelect(true), this.maxDocsToFetchOnEachScrollShard); - Set results = new HashSet<>(); - - SearchHit[] hits = scrollResp.getHits().getHits(); - if (hits == null || hits.length == 0) { - return new HashSet<>(); - } - int totalDocsFetchedFromFirstTable = 0; - - //fetch from first table . fill set. - while (hits != null && hits.length != 0) { - totalDocsFetchedFromFirstTable += hits.length; - fillComperableSetFromHits(this.fieldsOrderFirstTable, hits, results); - if (totalDocsFetchedFromFirstTable > this.maxDocsToFetchOnFirstTable) { - break; - } - scrollResp = client.prepareSearchScroll(scrollResp.getScrollId()) - .setScroll(new TimeValue(600000)).execute().actionGet(); - hits = scrollResp.getHits().getHits(); - } - scrollResp = ElasticUtils.scrollOneTimeWithHits(this.client, this.builder.getSecondSearchRequest(), - builder.getOriginalSelect(false), this.maxDocsToFetchOnEachScrollShard); - - - hits = scrollResp.getHits().getHits(); - if (hits == null || hits.length == 0) { - return results; - } - int totalDocsFetchedFromSecondTable = 0; - while (hits != null && hits.length != 0) { - totalDocsFetchedFromSecondTable += hits.length; - removeValuesFromSetAccordingToHits(this.fieldsOrderSecondTable, results, hits); - if (totalDocsFetchedFromSecondTable > this.maxDocsToFetchOnSecondTable) { - break; - } - scrollResp = client.prepareSearchScroll(scrollResp.getScrollId()) - .setScroll(new TimeValue(600000)).execute().actionGet(); - hits = scrollResp.getHits().getHits(); - } - - return results; + int totalDocsFetchedFromSecondTable = 0; + while (hits != null && hits.length != 0) { + totalDocsFetchedFromSecondTable += hits.length; + removeValuesFromSetAccordingToHits(this.fieldsOrderSecondTable, results, hits); + if (totalDocsFetchedFromSecondTable > this.maxDocsToFetchOnSecondTable) { + break; + } + scrollResp = + client + .prepareSearchScroll(scrollResp.getScrollId()) + .setScroll(new TimeValue(600000)) + .execute() + .actionGet(); + hits = scrollResp.getHits().getHits(); } - private Set simpleOneTimeQueryEach() { - SearchHit[] firstTableHits = this.builder.getFirstSearchRequest().get().getHits().getHits(); - if (firstTableHits == null || firstTableHits.length == 0) { - return new HashSet<>(); - } + return results; + } - Set result = new HashSet<>(); - fillComperableSetFromHits(this.fieldsOrderFirstTable, firstTableHits, result); - SearchHit[] secondTableHits = this.builder.getSecondSearchRequest().get().getHits().getHits(); - if (secondTableHits == null || secondTableHits.length == 0) { - return result; - } - removeValuesFromSetAccordingToHits(this.fieldsOrderSecondTable, result, secondTableHits); - return result; + private Set simpleOneTimeQueryEach() { + SearchHit[] firstTableHits = this.builder.getFirstSearchRequest().get().getHits().getHits(); + if (firstTableHits == null || firstTableHits.length == 0) { + return new HashSet<>(); } - private void removeValuesFromSetAccordingToHits(String[] fieldsOrder, - Set set, SearchHit[] hits) { - for (SearchHit hit : hits) { - ComperableHitResult comperableHitResult = new ComperableHitResult(hit, fieldsOrder, this.seperator); - if (!comperableHitResult.isAllNull()) { - set.remove(comperableHitResult); - } - } + Set result = new HashSet<>(); + fillComperableSetFromHits(this.fieldsOrderFirstTable, firstTableHits, result); + SearchHit[] secondTableHits = this.builder.getSecondSearchRequest().get().getHits().getHits(); + if (secondTableHits == null || secondTableHits.length == 0) { + return result; } - - private void fillComperableSetFromHits(String[] fieldsOrder, SearchHit[] hits, Set setToFill) { - for (SearchHit hit : hits) { - ComperableHitResult comperableHitResult = new ComperableHitResult(hit, fieldsOrder, this.seperator); - if (!comperableHitResult.isAllNull()) { - setToFill.add(comperableHitResult); - } - } + removeValuesFromSetAccordingToHits(this.fieldsOrderSecondTable, result, secondTableHits); + return result; + } + + private void removeValuesFromSetAccordingToHits( + String[] fieldsOrder, Set set, SearchHit[] hits) { + for (SearchHit hit : hits) { + ComperableHitResult comperableHitResult = + new ComperableHitResult(hit, fieldsOrder, this.seperator); + if (!comperableHitResult.isAllNull()) { + set.remove(comperableHitResult); + } } - - private String getFieldName(Field field) { - String alias = field.getAlias(); - if (alias != null && !alias.isEmpty()) { - return alias; - } - return field.getName(); + } + + private void fillComperableSetFromHits( + String[] fieldsOrder, SearchHit[] hits, Set setToFill) { + for (SearchHit hit : hits) { + ComperableHitResult comperableHitResult = + new ComperableHitResult(hit, fieldsOrder, this.seperator); + if (!comperableHitResult.isAllNull()) { + setToFill.add(comperableHitResult); + } } + } - private boolean checkIfOnlyOneField(Select firstSelect, Select secondSelect) { - return firstSelect.getFields().size() == 1 && secondSelect.getFields().size() == 1; + private String getFieldName(Field field) { + String alias = field.getAlias(); + if (alias != null && !alias.isEmpty()) { + return alias; } - - - // 0. save the original second table where , init set - // 1. on each scroll on first table , create miniSet - //1.1 build where from all results (terms filter) , and run query - //1.1.1 on each result remove from miniSet - //1.1.2 add all results left from miniset to bigset - private MinusOneFieldAndOptimizationResult runWithScrollingAndAddFilter(String firstFieldName, - String secondFieldName) - throws SqlParseException { - SearchResponse scrollResp = ElasticUtils.scrollOneTimeWithHits(this.client, - this.builder.getFirstSearchRequest(), - builder.getOriginalSelect(true), this.maxDocsToFetchOnEachScrollShard); - Set results = new HashSet<>(); - int currentNumOfResults = 0; - SearchHit[] hits = scrollResp.getHits().getHits(); - SearchHit someHit = null; - if (hits.length != 0) { - //we need some hit for creating InnerResults. - someHit = hits[0]; - } - int totalDocsFetchedFromFirstTable = 0; - int totalDocsFetchedFromSecondTable = 0; - Where originalWhereSecondTable = this.builder.getOriginalSelect(false).getWhere(); - while (hits.length != 0) { - totalDocsFetchedFromFirstTable += hits.length; - Set currentSetFromResults = new HashSet<>(); - fillSetFromHits(firstFieldName, hits, currentSetFromResults); - //fetch from second - Select secondQuerySelect = this.builder.getOriginalSelect(false); - Where where = createWhereWithOrigianlAndTermsFilter(secondFieldName, originalWhereSecondTable, - currentSetFromResults); - secondQuerySelect.setWhere(where); - DefaultQueryAction queryAction = new DefaultQueryAction(this.client, secondQuerySelect); - queryAction.explain(); - if (totalDocsFetchedFromSecondTable > this.maxDocsToFetchOnSecondTable) { - break; - } - SearchResponse responseForSecondTable = ElasticUtils.scrollOneTimeWithHits(this.client, - queryAction.getRequestBuilder(), secondQuerySelect, this.maxDocsToFetchOnEachScrollShard); - SearchHits secondQuerySearchHits = responseForSecondTable.getHits(); - - SearchHit[] secondQueryHits = secondQuerySearchHits.getHits(); - while (secondQueryHits.length > 0) { - totalDocsFetchedFromSecondTable += secondQueryHits.length; - removeValuesFromSetAccordingToHits(secondFieldName, currentSetFromResults, secondQueryHits); - if (totalDocsFetchedFromSecondTable > this.maxDocsToFetchOnSecondTable) { - break; - } - responseForSecondTable = client.prepareSearchScroll(responseForSecondTable.getScrollId()) - .setScroll(new TimeValue(600000)).execute().actionGet(); - secondQueryHits = responseForSecondTable.getHits().getHits(); - } - results.addAll(currentSetFromResults); - if (totalDocsFetchedFromFirstTable > this.maxDocsToFetchOnFirstTable) { - System.out.println("too many results for first table, stoping at:" + totalDocsFetchedFromFirstTable); - break; - } - - scrollResp = client.prepareSearchScroll(scrollResp.getScrollId()) - .setScroll(new TimeValue(600000)).execute().actionGet(); - hits = scrollResp.getHits().getHits(); - } - return new MinusOneFieldAndOptimizationResult(results, someHit); - - + return field.getName(); + } + + private boolean checkIfOnlyOneField(Select firstSelect, Select secondSelect) { + return firstSelect.getFields().size() == 1 && secondSelect.getFields().size() == 1; + } + + // 0. save the original second table where , init set + // 1. on each scroll on first table , create miniSet + // 1.1 build where from all results (terms filter) , and run query + // 1.1.1 on each result remove from miniSet + // 1.1.2 add all results left from miniset to bigset + private MinusOneFieldAndOptimizationResult runWithScrollingAndAddFilter( + String firstFieldName, String secondFieldName) throws SqlParseException { + SearchResponse scrollResp = + ElasticUtils.scrollOneTimeWithHits( + this.client, + this.builder.getFirstSearchRequest(), + builder.getOriginalSelect(true), + this.maxDocsToFetchOnEachScrollShard); + Set results = new HashSet<>(); + int currentNumOfResults = 0; + SearchHit[] hits = scrollResp.getHits().getHits(); + SearchHit someHit = null; + if (hits.length != 0) { + // we need some hit for creating InnerResults. + someHit = hits[0]; } - - private void removeValuesFromSetAccordingToHits(String fieldName, Set setToRemoveFrom, SearchHit[] hits) { - for (SearchHit hit : hits) { - Object fieldValue = getFieldValue(hit, fieldName); - if (fieldValue != null) { - if (setToRemoveFrom.contains(fieldValue)) { - setToRemoveFrom.remove(fieldValue); - } - } + int totalDocsFetchedFromFirstTable = 0; + int totalDocsFetchedFromSecondTable = 0; + Where originalWhereSecondTable = this.builder.getOriginalSelect(false).getWhere(); + while (hits.length != 0) { + totalDocsFetchedFromFirstTable += hits.length; + Set currentSetFromResults = new HashSet<>(); + fillSetFromHits(firstFieldName, hits, currentSetFromResults); + // fetch from second + Select secondQuerySelect = this.builder.getOriginalSelect(false); + Where where = + createWhereWithOrigianlAndTermsFilter( + secondFieldName, originalWhereSecondTable, currentSetFromResults); + secondQuerySelect.setWhere(where); + DefaultQueryAction queryAction = new DefaultQueryAction(this.client, secondQuerySelect); + queryAction.explain(); + if (totalDocsFetchedFromSecondTable > this.maxDocsToFetchOnSecondTable) { + break; + } + SearchResponse responseForSecondTable = + ElasticUtils.scrollOneTimeWithHits( + this.client, + queryAction.getRequestBuilder(), + secondQuerySelect, + this.maxDocsToFetchOnEachScrollShard); + SearchHits secondQuerySearchHits = responseForSecondTable.getHits(); + + SearchHit[] secondQueryHits = secondQuerySearchHits.getHits(); + while (secondQueryHits.length > 0) { + totalDocsFetchedFromSecondTable += secondQueryHits.length; + removeValuesFromSetAccordingToHits(secondFieldName, currentSetFromResults, secondQueryHits); + if (totalDocsFetchedFromSecondTable > this.maxDocsToFetchOnSecondTable) { + break; } + responseForSecondTable = + client + .prepareSearchScroll(responseForSecondTable.getScrollId()) + .setScroll(new TimeValue(600000)) + .execute() + .actionGet(); + secondQueryHits = responseForSecondTable.getHits().getHits(); + } + results.addAll(currentSetFromResults); + if (totalDocsFetchedFromFirstTable > this.maxDocsToFetchOnFirstTable) { + System.out.println( + "too many results for first table, stoping at:" + totalDocsFetchedFromFirstTable); + break; + } + + scrollResp = + client + .prepareSearchScroll(scrollResp.getScrollId()) + .setScroll(new TimeValue(600000)) + .execute() + .actionGet(); + hits = scrollResp.getHits().getHits(); } - - private void fillSetFromHits(String fieldName, SearchHit[] hits, Set setToFill) { - for (SearchHit hit : hits) { - Object fieldValue = getFieldValue(hit, fieldName); - if (fieldValue != null) { - setToFill.add(fieldValue); - } + return new MinusOneFieldAndOptimizationResult(results, someHit); + } + + private void removeValuesFromSetAccordingToHits( + String fieldName, Set setToRemoveFrom, SearchHit[] hits) { + for (SearchHit hit : hits) { + Object fieldValue = getFieldValue(hit, fieldName); + if (fieldValue != null) { + if (setToRemoveFrom.contains(fieldValue)) { + setToRemoveFrom.remove(fieldValue); } + } } - - private Where createWhereWithOrigianlAndTermsFilter(String secondFieldName, Where originalWhereSecondTable, - Set currentSetFromResults) throws SqlParseException { - Where where = Where.newInstance(); - where.setConn(Where.CONN.AND); - where.addWhere(originalWhereSecondTable); - where.addWhere(buildTermsFilterFromResults(currentSetFromResults, secondFieldName)); - return where; + } + + private void fillSetFromHits(String fieldName, SearchHit[] hits, Set setToFill) { + for (SearchHit hit : hits) { + Object fieldValue = getFieldValue(hit, fieldName); + if (fieldValue != null) { + setToFill.add(fieldValue); + } } - - private Where buildTermsFilterFromResults(Set results, String fieldName) throws SqlParseException { - return new Condition(Where.CONN.AND, fieldName, null, Condition.OPERATOR.IN_TERMS, results.toArray(), null); + } + + private Where createWhereWithOrigianlAndTermsFilter( + String secondFieldName, Where originalWhereSecondTable, Set currentSetFromResults) + throws SqlParseException { + Where where = Where.newInstance(); + where.setConn(Where.CONN.AND); + where.addWhere(originalWhereSecondTable); + where.addWhere(buildTermsFilterFromResults(currentSetFromResults, secondFieldName)); + return where; + } + + private Where buildTermsFilterFromResults(Set results, String fieldName) + throws SqlParseException { + return new Condition( + Where.CONN.AND, fieldName, null, Condition.OPERATOR.IN_TERMS, results.toArray(), null); + } + + private Object getFieldValue(SearchHit hit, String fieldName) { + Map sourceAsMap = hit.getSourceAsMap(); + if (fieldName.contains(".")) { + String[] split = fieldName.split("\\."); + return Util.searchPathInMap(sourceAsMap, split); + } else if (sourceAsMap.containsKey(fieldName)) { + return sourceAsMap.get(fieldName); } - - private Object getFieldValue(SearchHit hit, String fieldName) { - Map sourceAsMap = hit.getSourceAsMap(); - if (fieldName.contains(".")) { - String[] split = fieldName.split("\\."); - return Util.searchPathInMap(sourceAsMap, split); - } else if (sourceAsMap.containsKey(fieldName)) { - return sourceAsMap.get(fieldName); - } - return null; + return null; + } + + private void fillFieldsOrder() { + List fieldsOrAliases = new ArrayList<>(); + Map firstTableFieldToAlias = this.builder.getFirstTableFieldToAlias(); + List firstTableFields = this.builder.getOriginalSelect(true).getFields(); + + for (Field field : firstTableFields) { + if (firstTableFieldToAlias.containsKey(field.getName())) { + fieldsOrAliases.add(field.getAlias()); + } else { + fieldsOrAliases.add(field.getName()); + } } - - private void fillFieldsOrder() { - List fieldsOrAliases = new ArrayList<>(); - Map firstTableFieldToAlias = this.builder.getFirstTableFieldToAlias(); - List firstTableFields = this.builder.getOriginalSelect(true).getFields(); - - for (Field field : firstTableFields) { - if (firstTableFieldToAlias.containsKey(field.getName())) { - fieldsOrAliases.add(field.getAlias()); - } else { - fieldsOrAliases.add(field.getName()); - } - } - Collections.sort(fieldsOrAliases); - - int fieldsSize = fieldsOrAliases.size(); - this.fieldsOrderFirstTable = new String[fieldsSize]; - fillFieldsArray(fieldsOrAliases, firstTableFieldToAlias, this.fieldsOrderFirstTable); - this.fieldsOrderSecondTable = new String[fieldsSize]; - fillFieldsArray(fieldsOrAliases, this.builder.getSecondTableFieldToAlias(), this.fieldsOrderSecondTable); + Collections.sort(fieldsOrAliases); + + int fieldsSize = fieldsOrAliases.size(); + this.fieldsOrderFirstTable = new String[fieldsSize]; + fillFieldsArray(fieldsOrAliases, firstTableFieldToAlias, this.fieldsOrderFirstTable); + this.fieldsOrderSecondTable = new String[fieldsSize]; + fillFieldsArray( + fieldsOrAliases, this.builder.getSecondTableFieldToAlias(), this.fieldsOrderSecondTable); + } + + private void fillFieldsArray( + List fieldsOrAliases, Map fieldsToAlias, String[] fields) { + Map aliasToField = inverseMap(fieldsToAlias); + for (int i = 0; i < fields.length; i++) { + String field = fieldsOrAliases.get(i); + if (aliasToField.containsKey(field)) { + field = aliasToField.get(field); + } + fields[i] = field; } + } - private void fillFieldsArray(List fieldsOrAliases, Map fieldsToAlias, String[] fields) { - Map aliasToField = inverseMap(fieldsToAlias); - for (int i = 0; i < fields.length; i++) { - String field = fieldsOrAliases.get(i); - if (aliasToField.containsKey(field)) { - field = aliasToField.get(field); - } - fields[i] = field; - } + private Map inverseMap(Map mapToInverse) { + Map inversedMap = new HashMap<>(); + for (Map.Entry entry : mapToInverse.entrySet()) { + inversedMap.put(entry.getValue(), entry.getKey()); } + return inversedMap; + } - private Map inverseMap(Map mapToInverse) { - Map inversedMap = new HashMap<>(); - for (Map.Entry entry : mapToInverse.entrySet()) { - inversedMap.put(entry.getValue(), entry.getKey()); - } - return inversedMap; + private void parseHintsIfAny(List hints) { + if (hints == null) { + return; } - - private void parseHintsIfAny(List hints) { - if (hints == null) { - return; - } - for (Hint hint : hints) { - if (hint.getType() == HintType.MINUS_USE_TERMS_OPTIMIZATION) { - Object[] params = hint.getParams(); - if (params != null && params.length == 1) { - this.termsOptimizationWithToLower = (boolean) params[0]; - } - } else if (hint.getType() == HintType.MINUS_FETCH_AND_RESULT_LIMITS) { - Object[] params = hint.getParams(); - this.useScrolling = true; - this.maxDocsToFetchOnFirstTable = (int) params[0]; - this.maxDocsToFetchOnSecondTable = (int) params[1]; - this.maxDocsToFetchOnEachScrollShard = (int) params[2]; - } + for (Hint hint : hints) { + if (hint.getType() == HintType.MINUS_USE_TERMS_OPTIMIZATION) { + Object[] params = hint.getParams(); + if (params != null && params.length == 1) { + this.termsOptimizationWithToLower = (boolean) params[0]; } + } else if (hint.getType() == HintType.MINUS_FETCH_AND_RESULT_LIMITS) { + Object[] params = hint.getParams(); + this.useScrolling = true; + this.maxDocsToFetchOnFirstTable = (int) params[0]; + this.maxDocsToFetchOnSecondTable = (int) params[1]; + this.maxDocsToFetchOnEachScrollShard = (int) params[2]; + } } - + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/MinusOneFieldAndOptimizationResult.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/MinusOneFieldAndOptimizationResult.java index 3b4696bc1e..3d7206ab13 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/MinusOneFieldAndOptimizationResult.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/MinusOneFieldAndOptimizationResult.java @@ -3,30 +3,26 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.multi; import java.util.Set; import org.opensearch.search.SearchHit; - -/** - * Created by Eliran on 26/8/2016. - */ +/** Created by Eliran on 26/8/2016. */ class MinusOneFieldAndOptimizationResult { - private Set fieldValues; - private SearchHit someHit; + private Set fieldValues; + private SearchHit someHit; - MinusOneFieldAndOptimizationResult(Set fieldValues, SearchHit someHit) { - this.fieldValues = fieldValues; - this.someHit = someHit; - } + MinusOneFieldAndOptimizationResult(Set fieldValues, SearchHit someHit) { + this.fieldValues = fieldValues; + this.someHit = someHit; + } - public Set getFieldValues() { - return fieldValues; - } + public Set getFieldValues() { + return fieldValues; + } - public SearchHit getSomeHit() { - return someHit; - } + public SearchHit getSomeHit() { + return someHit; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/MultiRequestExecutorFactory.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/MultiRequestExecutorFactory.java index 239bc98772..03c6958076 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/MultiRequestExecutorFactory.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/MultiRequestExecutorFactory.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.multi; import com.alibaba.druid.sql.ast.statement.SQLUnionOperator; @@ -12,20 +11,19 @@ import org.opensearch.sql.legacy.executor.ElasticHitsExecutor; import org.opensearch.sql.legacy.query.multi.MultiQueryRequestBuilder; -/** - * Created by Eliran on 21/8/2016. - */ +/** Created by Eliran on 21/8/2016. */ public class MultiRequestExecutorFactory { - public static ElasticHitsExecutor createExecutor(Client client, MultiQueryRequestBuilder builder) { - SQLUnionOperator relation = builder.getRelation(); - switch (relation) { - case UNION_ALL: - case UNION: - return new UnionExecutor(client, builder); - case MINUS: - return new MinusExecutor(client, builder); - default: - throw new SemanticAnalysisException("Unsupported operator: " + relation); - } + public static ElasticHitsExecutor createExecutor( + Client client, MultiQueryRequestBuilder builder) { + SQLUnionOperator relation = builder.getRelation(); + switch (relation) { + case UNION_ALL: + case UNION: + return new UnionExecutor(client, builder); + case MINUS: + return new MinusExecutor(client, builder); + default: + throw new SemanticAnalysisException("Unsupported operator: " + relation); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/UnionExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/UnionExecutor.java index 4b4080156d..6b8b64c4e8 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/UnionExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/executor/multi/UnionExecutor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.multi; import java.util.ArrayList; @@ -23,87 +22,92 @@ import org.opensearch.sql.legacy.query.multi.MultiQueryRequestBuilder; import org.opensearch.sql.legacy.utils.Util; -/** - * Created by Eliran on 21/8/2016. - */ +/** Created by Eliran on 21/8/2016. */ public class UnionExecutor implements ElasticHitsExecutor { - private MultiQueryRequestBuilder multiQueryBuilder; - private SearchHits results; - private Client client; - private int currentId; + private MultiQueryRequestBuilder multiQueryBuilder; + private SearchHits results; + private Client client; + private int currentId; - public UnionExecutor(Client client, MultiQueryRequestBuilder builder) { - multiQueryBuilder = builder; - this.client = client; - currentId = 0; - } + public UnionExecutor(Client client, MultiQueryRequestBuilder builder) { + multiQueryBuilder = builder; + this.client = client; + currentId = 0; + } - @Override - public void run() { - SearchResponse firstResponse = this.multiQueryBuilder.getFirstSearchRequest().get(); - SearchHit[] hits = firstResponse.getHits().getHits(); - List unionHits = new ArrayList<>(hits.length); - fillInternalSearchHits(unionHits, hits, this.multiQueryBuilder.getFirstTableFieldToAlias()); - SearchResponse secondResponse = this.multiQueryBuilder.getSecondSearchRequest().get(); - fillInternalSearchHits(unionHits, secondResponse.getHits().getHits(), - this.multiQueryBuilder.getSecondTableFieldToAlias()); - int totalSize = unionHits.size(); - SearchHit[] unionHitsArr = unionHits.toArray(new SearchHit[totalSize]); - this.results = new SearchHits(unionHitsArr, new TotalHits(totalSize, Relation.EQUAL_TO), 1.0f); - } + @Override + public void run() { + SearchResponse firstResponse = this.multiQueryBuilder.getFirstSearchRequest().get(); + SearchHit[] hits = firstResponse.getHits().getHits(); + List unionHits = new ArrayList<>(hits.length); + fillInternalSearchHits(unionHits, hits, this.multiQueryBuilder.getFirstTableFieldToAlias()); + SearchResponse secondResponse = this.multiQueryBuilder.getSecondSearchRequest().get(); + fillInternalSearchHits( + unionHits, + secondResponse.getHits().getHits(), + this.multiQueryBuilder.getSecondTableFieldToAlias()); + int totalSize = unionHits.size(); + SearchHit[] unionHitsArr = unionHits.toArray(new SearchHit[totalSize]); + this.results = new SearchHits(unionHitsArr, new TotalHits(totalSize, Relation.EQUAL_TO), 1.0f); + } - private void fillInternalSearchHits(List unionHits, SearchHit[] hits, - Map fieldNameToAlias) { - for (SearchHit hit : hits) { - Map documentFields = new HashMap<>(); - Map metaFields = new HashMap<>(); - hit.getFields().forEach((fieldName, docField) -> - (MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) ? metaFields : documentFields).put(fieldName, docField)); - SearchHit searchHit = new SearchHit(currentId, hit.getId(), documentFields, metaFields); - searchHit.sourceRef(hit.getSourceRef()); - searchHit.getSourceAsMap().clear(); - Map sourceAsMap = hit.getSourceAsMap(); - if (!fieldNameToAlias.isEmpty()) { - updateFieldNamesToAlias(sourceAsMap, fieldNameToAlias); - } - searchHit.getSourceAsMap().putAll(sourceAsMap); - currentId++; - unionHits.add(searchHit); - } + private void fillInternalSearchHits( + List unionHits, SearchHit[] hits, Map fieldNameToAlias) { + for (SearchHit hit : hits) { + Map documentFields = new HashMap<>(); + Map metaFields = new HashMap<>(); + hit.getFields() + .forEach( + (fieldName, docField) -> + (MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) + ? metaFields + : documentFields) + .put(fieldName, docField)); + SearchHit searchHit = new SearchHit(currentId, hit.getId(), documentFields, metaFields); + searchHit.sourceRef(hit.getSourceRef()); + searchHit.getSourceAsMap().clear(); + Map sourceAsMap = hit.getSourceAsMap(); + if (!fieldNameToAlias.isEmpty()) { + updateFieldNamesToAlias(sourceAsMap, fieldNameToAlias); + } + searchHit.getSourceAsMap().putAll(sourceAsMap); + currentId++; + unionHits.add(searchHit); } + } - - private void updateFieldNamesToAlias(Map sourceAsMap, Map fieldNameToAlias) { - for (Map.Entry fieldToAlias : fieldNameToAlias.entrySet()) { - String fieldName = fieldToAlias.getKey(); - Object value = null; - Map deleteFrom = null; - if (fieldName.contains(".")) { - String[] split = fieldName.split("\\."); - String[] path = Arrays.copyOf(split, split.length - 1); - Object placeInMap = Util.searchPathInMap(sourceAsMap, path); - if (placeInMap != null) { - if (!Map.class.isAssignableFrom(placeInMap.getClass())) { - continue; - } - } - deleteFrom = (Map) placeInMap; - value = deleteFrom.get(split[split.length - 1]); - } else if (sourceAsMap.containsKey(fieldName)) { - value = sourceAsMap.get(fieldName); - deleteFrom = sourceAsMap; - } - if (value != null) { - sourceAsMap.put(fieldToAlias.getValue(), value); - deleteFrom.remove(fieldName); - } + private void updateFieldNamesToAlias( + Map sourceAsMap, Map fieldNameToAlias) { + for (Map.Entry fieldToAlias : fieldNameToAlias.entrySet()) { + String fieldName = fieldToAlias.getKey(); + Object value = null; + Map deleteFrom = null; + if (fieldName.contains(".")) { + String[] split = fieldName.split("\\."); + String[] path = Arrays.copyOf(split, split.length - 1); + Object placeInMap = Util.searchPathInMap(sourceAsMap, path); + if (placeInMap != null) { + if (!Map.class.isAssignableFrom(placeInMap.getClass())) { + continue; + } } - Util.clearEmptyPaths(sourceAsMap); + deleteFrom = (Map) placeInMap; + value = deleteFrom.get(split[split.length - 1]); + } else if (sourceAsMap.containsKey(fieldName)) { + value = sourceAsMap.get(fieldName); + deleteFrom = sourceAsMap; + } + if (value != null) { + sourceAsMap.put(fieldToAlias.getValue(), value); + deleteFrom.remove(fieldName); + } } + Util.clearEmptyPaths(sourceAsMap); + } - @Override - public SearchHits getHits() { - return results; - } + @Override + public SearchHits getHits() { + return results; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/Expression.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/Expression.java index a858d99d3f..3a9ac5a66d 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/Expression.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/Expression.java @@ -3,21 +3,18 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.core; - import org.opensearch.sql.legacy.expression.domain.BindingTuple; import org.opensearch.sql.legacy.expression.model.ExprValue; -/** - * The definition of the Expression. - */ +/** The definition of the Expression. */ public interface Expression { - /** - * Evaluate the result on the BindingTuple context. - * @param tuple BindingTuple - * @return ExprValue - */ - ExprValue valueOf(BindingTuple tuple); + /** + * Evaluate the result on the BindingTuple context. + * + * @param tuple BindingTuple + * @return ExprValue + */ + ExprValue valueOf(BindingTuple tuple); } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/ExpressionFactory.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/ExpressionFactory.java index cf5fd4627f..1df81e34b3 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/ExpressionFactory.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/ExpressionFactory.java @@ -3,10 +3,8 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.core; - import static org.opensearch.sql.legacy.expression.core.operator.ScalarOperation.ABS; import static org.opensearch.sql.legacy.expression.core.operator.ScalarOperation.ACOS; import static org.opensearch.sql.legacy.expression.core.operator.ScalarOperation.ADD; @@ -38,89 +36,80 @@ import org.opensearch.sql.legacy.expression.domain.BindingTuple; import org.opensearch.sql.legacy.expression.model.ExprValue; - -/** - * The definition of Expression factory. - */ +/** The definition of Expression factory. */ public class ExpressionFactory { - private static final Map operationExpressionBuilderMap = - new ImmutableMap.Builder() - .put(ADD, ArithmeticFunctionFactory.add()) - .put(SUBTRACT, ArithmeticFunctionFactory.subtract()) - .put(MULTIPLY, ArithmeticFunctionFactory.multiply()) - .put(DIVIDE, ArithmeticFunctionFactory.divide()) - .put(MODULES, ArithmeticFunctionFactory.modules()) - .put(ABS, ArithmeticFunctionFactory.abs()) - .put(ACOS, ArithmeticFunctionFactory.acos()) - .put(ASIN, ArithmeticFunctionFactory.asin()) - .put(ATAN, ArithmeticFunctionFactory.atan()) - .put(ATAN2, ArithmeticFunctionFactory.atan2()) - .put(TAN, ArithmeticFunctionFactory.tan()) - .put(CBRT, ArithmeticFunctionFactory.cbrt()) - .put(CEIL, ArithmeticFunctionFactory.ceil()) - .put(COS, ArithmeticFunctionFactory.cos()) - .put(COSH, ArithmeticFunctionFactory.cosh()) - .put(EXP, ArithmeticFunctionFactory.exp()) - .put(FLOOR, ArithmeticFunctionFactory.floor()) - .put(LN, ArithmeticFunctionFactory.ln()) - .put(LOG, ArithmeticFunctionFactory.log()) - .put(LOG2, ArithmeticFunctionFactory.log2()) - .put(LOG10, ArithmeticFunctionFactory.log10()) - .build(); + private static final Map operationExpressionBuilderMap = + new ImmutableMap.Builder() + .put(ADD, ArithmeticFunctionFactory.add()) + .put(SUBTRACT, ArithmeticFunctionFactory.subtract()) + .put(MULTIPLY, ArithmeticFunctionFactory.multiply()) + .put(DIVIDE, ArithmeticFunctionFactory.divide()) + .put(MODULES, ArithmeticFunctionFactory.modules()) + .put(ABS, ArithmeticFunctionFactory.abs()) + .put(ACOS, ArithmeticFunctionFactory.acos()) + .put(ASIN, ArithmeticFunctionFactory.asin()) + .put(ATAN, ArithmeticFunctionFactory.atan()) + .put(ATAN2, ArithmeticFunctionFactory.atan2()) + .put(TAN, ArithmeticFunctionFactory.tan()) + .put(CBRT, ArithmeticFunctionFactory.cbrt()) + .put(CEIL, ArithmeticFunctionFactory.ceil()) + .put(COS, ArithmeticFunctionFactory.cos()) + .put(COSH, ArithmeticFunctionFactory.cosh()) + .put(EXP, ArithmeticFunctionFactory.exp()) + .put(FLOOR, ArithmeticFunctionFactory.floor()) + .put(LN, ArithmeticFunctionFactory.ln()) + .put(LOG, ArithmeticFunctionFactory.log()) + .put(LOG2, ArithmeticFunctionFactory.log2()) + .put(LOG10, ArithmeticFunctionFactory.log10()) + .build(); - public static Expression of(ScalarOperation op, List expressions) { - return operationExpressionBuilderMap.get(op).build(expressions); - } + public static Expression of(ScalarOperation op, List expressions) { + return operationExpressionBuilderMap.get(op).build(expressions); + } - /** - * Ref Expression. Define the binding name which could be resolved in {@link BindingTuple} - */ - public static Expression ref(String bindingName) { - return new Expression() { - @Override - public ExprValue valueOf(BindingTuple tuple) { - return tuple.resolve(bindingName); - } + /** Ref Expression. Define the binding name which could be resolved in {@link BindingTuple} */ + public static Expression ref(String bindingName) { + return new Expression() { + @Override + public ExprValue valueOf(BindingTuple tuple) { + return tuple.resolve(bindingName); + } - @Override - public String toString() { - return String.format("%s", bindingName); - } - }; - } + @Override + public String toString() { + return String.format("%s", bindingName); + } + }; + } - /** - * Literal Expression. - */ - public static Expression literal(ExprValue value) { - return new Expression() { - @Override - public ExprValue valueOf(BindingTuple tuple) { - return value; - } + /** Literal Expression. */ + public static Expression literal(ExprValue value) { + return new Expression() { + @Override + public ExprValue valueOf(BindingTuple tuple) { + return value; + } - @Override - public String toString() { - return String.format("%s", value); - } - }; - } + @Override + public String toString() { + return String.format("%s", value); + } + }; + } - /** - * Cast Expression. - */ - public static Expression cast(Expression expr) { - return new Expression() { - @Override - public ExprValue valueOf(BindingTuple tuple) { - return expr.valueOf(tuple); - } + /** Cast Expression. */ + public static Expression cast(Expression expr) { + return new Expression() { + @Override + public ExprValue valueOf(BindingTuple tuple) { + return expr.valueOf(tuple); + } - @Override - public String toString() { - return String.format("cast(%s)", expr); - } - }; - } + @Override + public String toString() { + return String.format("cast(%s)", expr); + } + }; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/builder/ArithmeticFunctionFactory.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/builder/ArithmeticFunctionFactory.java index afa6f6c439..c1de63fe88 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/builder/ArithmeticFunctionFactory.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/builder/ArithmeticFunctionFactory.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.core.builder; import org.opensearch.sql.legacy.expression.core.operator.BinaryScalarOperator; @@ -12,205 +11,130 @@ import org.opensearch.sql.legacy.expression.core.operator.ScalarOperation; import org.opensearch.sql.legacy.expression.core.operator.UnaryScalarOperator; -/** - * The definition of arithmetic function builder factory. - */ +/** The definition of arithmetic function builder factory. */ public class ArithmeticFunctionFactory { - public static ExpressionBuilder add() { - return new BinaryExpressionBuilder( - new BinaryScalarOperator( - ScalarOperation.ADD, - Math::addExact, - Math::addExact, - Double::sum, - Float::sum)); - } - - public static ExpressionBuilder subtract() { - return new BinaryExpressionBuilder( - new BinaryScalarOperator( - ScalarOperation.ADD, - Math::subtractExact, - Math::subtractExact, - (v1, v2) -> v1 - v2, - (v1, v2) -> v1 - v2)); - } - - public static ExpressionBuilder multiply() { - return new BinaryExpressionBuilder( - new BinaryScalarOperator( - ScalarOperation.MULTIPLY, - Math::multiplyExact, - Math::multiplyExact, - (v1, v2) -> v1 * v2, - (v1, v2) -> v1 * v2 - )); - } - - public static ExpressionBuilder divide() { - return new BinaryExpressionBuilder( - new BinaryScalarOperator( - ScalarOperation.DIVIDE, - (v1, v2) -> v1 / v2, - (v1, v2) -> v1 / v2, - (v1, v2) -> v1 / v2, - (v1, v2) -> v1 / v2 - )); - } - - public static ExpressionBuilder modules() { - return new BinaryExpressionBuilder( - new BinaryScalarOperator( - ScalarOperation.MODULES, - (v1, v2) -> v1 % v2, - (v1, v2) -> v1 % v2, - (v1, v2) -> v1 % v2, - (v1, v2) -> v1 % v2 - )); - } - - public static ExpressionBuilder abs() { - return new UnaryExpressionBuilder( - new UnaryScalarOperator( - ScalarOperation.ABS, - Math::abs, - Math::abs, - Math::abs, - Math::abs - )); - } - - public static ExpressionBuilder acos() { - return new UnaryExpressionBuilder( - new DoubleUnaryScalarOperator( - ScalarOperation.ACOS, - Math::acos - )); - } - - public static ExpressionBuilder asin() { - return new UnaryExpressionBuilder( - new DoubleUnaryScalarOperator( - ScalarOperation.ASIN, - Math::asin - ) - ); - } - - public static ExpressionBuilder atan() { - return new UnaryExpressionBuilder( - new DoubleUnaryScalarOperator( - ScalarOperation.ATAN, - Math::atan - ) - ); - } - - public static ExpressionBuilder atan2() { - return new BinaryExpressionBuilder( - new DoubleBinaryScalarOperator( - ScalarOperation.ATAN2, - Math::atan2 - ) - ); - } - - public static ExpressionBuilder tan() { - return new UnaryExpressionBuilder( - new DoubleUnaryScalarOperator( - ScalarOperation.TAN, - Math::tan - ) - ); - } - - public static ExpressionBuilder cbrt() { - return new UnaryExpressionBuilder( - new DoubleUnaryScalarOperator( - ScalarOperation.CBRT, - Math::cbrt - ) - ); - } - - public static ExpressionBuilder ceil() { - return new UnaryExpressionBuilder( - new DoubleUnaryScalarOperator( - ScalarOperation.CEIL, - Math::ceil - ) - ); - } - - public static ExpressionBuilder cos() { - return new UnaryExpressionBuilder( - new DoubleUnaryScalarOperator( - ScalarOperation.COS, - Math::cos - ) - ); - } - - public static ExpressionBuilder cosh() { - return new UnaryExpressionBuilder( - new DoubleUnaryScalarOperator( - ScalarOperation.COSH, - Math::cosh - ) - ); - } - - public static ExpressionBuilder exp() { - return new UnaryExpressionBuilder( - new DoubleUnaryScalarOperator( - ScalarOperation.EXP, - Math::exp - ) - ); - } - - public static ExpressionBuilder floor() { - return new UnaryExpressionBuilder( - new DoubleUnaryScalarOperator( - ScalarOperation.FLOOR, - Math::floor - ) - ); - } - - public static ExpressionBuilder ln() { - return new UnaryExpressionBuilder( - new DoubleUnaryScalarOperator( - ScalarOperation.LN, - Math::log - ) - ); - } - - public static ExpressionBuilder log() { - return new UnaryExpressionBuilder( - new DoubleUnaryScalarOperator( - ScalarOperation.LOG, - Math::log - ) - ); - } - - public static ExpressionBuilder log2() { - return new UnaryExpressionBuilder( - new DoubleUnaryScalarOperator( - ScalarOperation.LOG2, - (x) -> Math.log(x) / Math.log(2d) - ) - ); - } - - public static ExpressionBuilder log10() { - return new UnaryExpressionBuilder( - new DoubleUnaryScalarOperator( - ScalarOperation.LOG10, - Math::log10 - ) - ); - } + public static ExpressionBuilder add() { + return new BinaryExpressionBuilder( + new BinaryScalarOperator( + ScalarOperation.ADD, Math::addExact, Math::addExact, Double::sum, Float::sum)); + } + + public static ExpressionBuilder subtract() { + return new BinaryExpressionBuilder( + new BinaryScalarOperator( + ScalarOperation.ADD, + Math::subtractExact, + Math::subtractExact, + (v1, v2) -> v1 - v2, + (v1, v2) -> v1 - v2)); + } + + public static ExpressionBuilder multiply() { + return new BinaryExpressionBuilder( + new BinaryScalarOperator( + ScalarOperation.MULTIPLY, + Math::multiplyExact, + Math::multiplyExact, + (v1, v2) -> v1 * v2, + (v1, v2) -> v1 * v2)); + } + + public static ExpressionBuilder divide() { + return new BinaryExpressionBuilder( + new BinaryScalarOperator( + ScalarOperation.DIVIDE, + (v1, v2) -> v1 / v2, + (v1, v2) -> v1 / v2, + (v1, v2) -> v1 / v2, + (v1, v2) -> v1 / v2)); + } + + public static ExpressionBuilder modules() { + return new BinaryExpressionBuilder( + new BinaryScalarOperator( + ScalarOperation.MODULES, + (v1, v2) -> v1 % v2, + (v1, v2) -> v1 % v2, + (v1, v2) -> v1 % v2, + (v1, v2) -> v1 % v2)); + } + + public static ExpressionBuilder abs() { + return new UnaryExpressionBuilder( + new UnaryScalarOperator(ScalarOperation.ABS, Math::abs, Math::abs, Math::abs, Math::abs)); + } + + public static ExpressionBuilder acos() { + return new UnaryExpressionBuilder( + new DoubleUnaryScalarOperator(ScalarOperation.ACOS, Math::acos)); + } + + public static ExpressionBuilder asin() { + return new UnaryExpressionBuilder( + new DoubleUnaryScalarOperator(ScalarOperation.ASIN, Math::asin)); + } + + public static ExpressionBuilder atan() { + return new UnaryExpressionBuilder( + new DoubleUnaryScalarOperator(ScalarOperation.ATAN, Math::atan)); + } + + public static ExpressionBuilder atan2() { + return new BinaryExpressionBuilder( + new DoubleBinaryScalarOperator(ScalarOperation.ATAN2, Math::atan2)); + } + + public static ExpressionBuilder tan() { + return new UnaryExpressionBuilder( + new DoubleUnaryScalarOperator(ScalarOperation.TAN, Math::tan)); + } + + public static ExpressionBuilder cbrt() { + return new UnaryExpressionBuilder( + new DoubleUnaryScalarOperator(ScalarOperation.CBRT, Math::cbrt)); + } + + public static ExpressionBuilder ceil() { + return new UnaryExpressionBuilder( + new DoubleUnaryScalarOperator(ScalarOperation.CEIL, Math::ceil)); + } + + public static ExpressionBuilder cos() { + return new UnaryExpressionBuilder( + new DoubleUnaryScalarOperator(ScalarOperation.COS, Math::cos)); + } + + public static ExpressionBuilder cosh() { + return new UnaryExpressionBuilder( + new DoubleUnaryScalarOperator(ScalarOperation.COSH, Math::cosh)); + } + + public static ExpressionBuilder exp() { + return new UnaryExpressionBuilder( + new DoubleUnaryScalarOperator(ScalarOperation.EXP, Math::exp)); + } + + public static ExpressionBuilder floor() { + return new UnaryExpressionBuilder( + new DoubleUnaryScalarOperator(ScalarOperation.FLOOR, Math::floor)); + } + + public static ExpressionBuilder ln() { + return new UnaryExpressionBuilder(new DoubleUnaryScalarOperator(ScalarOperation.LN, Math::log)); + } + + public static ExpressionBuilder log() { + return new UnaryExpressionBuilder( + new DoubleUnaryScalarOperator(ScalarOperation.LOG, Math::log)); + } + + public static ExpressionBuilder log2() { + return new UnaryExpressionBuilder( + new DoubleUnaryScalarOperator(ScalarOperation.LOG2, (x) -> Math.log(x) / Math.log(2d))); + } + + public static ExpressionBuilder log10() { + return new UnaryExpressionBuilder( + new DoubleUnaryScalarOperator(ScalarOperation.LOG10, Math::log10)); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/builder/BinaryExpressionBuilder.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/builder/BinaryExpressionBuilder.java index 99ddd50248..fcf08180a5 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/builder/BinaryExpressionBuilder.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/builder/BinaryExpressionBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.core.builder; import java.util.Arrays; @@ -14,33 +13,32 @@ import org.opensearch.sql.legacy.expression.domain.BindingTuple; import org.opensearch.sql.legacy.expression.model.ExprValue; -/** - * The definition of the Expression Builder which has two arguments. - */ +/** The definition of the Expression Builder which has two arguments. */ @RequiredArgsConstructor public class BinaryExpressionBuilder implements ExpressionBuilder { - private final ScalarOperator op; + private final ScalarOperator op; - /** - * Build the expression with two {@link Expression} as arguments. - * @param expressionList expression list. - * @return expression. - */ - @Override - public Expression build(List expressionList) { - Expression e1 = expressionList.get(0); - Expression e2 = expressionList.get(1); + /** + * Build the expression with two {@link Expression} as arguments. + * + * @param expressionList expression list. + * @return expression. + */ + @Override + public Expression build(List expressionList) { + Expression e1 = expressionList.get(0); + Expression e2 = expressionList.get(1); - return new Expression() { - @Override - public ExprValue valueOf(BindingTuple tuple) { - return op.apply(Arrays.asList(e1.valueOf(tuple), e2.valueOf(tuple))); - } + return new Expression() { + @Override + public ExprValue valueOf(BindingTuple tuple) { + return op.apply(Arrays.asList(e1.valueOf(tuple), e2.valueOf(tuple))); + } - @Override - public String toString() { - return String.format("%s(%s,%s)", op.name(), e1, e2); - } - }; - } + @Override + public String toString() { + return String.format("%s(%s,%s)", op.name(), e1, e2); + } + }; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/builder/ExpressionBuilder.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/builder/ExpressionBuilder.java index 5f2cbb5776..76744d7d34 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/builder/ExpressionBuilder.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/builder/ExpressionBuilder.java @@ -3,15 +3,12 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.core.builder; import java.util.List; import org.opensearch.sql.legacy.expression.core.Expression; -/** - * The definition of the {@link Expression} builder. - */ +/** The definition of the {@link Expression} builder. */ public interface ExpressionBuilder { - Expression build(List expressionList); + Expression build(List expressionList); } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/builder/UnaryExpressionBuilder.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/builder/UnaryExpressionBuilder.java index f9bdce8ce4..3d40c3a527 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/builder/UnaryExpressionBuilder.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/builder/UnaryExpressionBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.core.builder; import java.util.Arrays; @@ -14,32 +13,31 @@ import org.opensearch.sql.legacy.expression.domain.BindingTuple; import org.opensearch.sql.legacy.expression.model.ExprValue; -/** - * The definition of the Expression Builder which has one argument. - */ +/** The definition of the Expression Builder which has one argument. */ @RequiredArgsConstructor public class UnaryExpressionBuilder implements ExpressionBuilder { - private final ScalarOperator op; + private final ScalarOperator op; - /** - * Build the expression with two {@link Expression} as arguments. - * @param expressionList expression list. - * @return expression. - */ - @Override - public Expression build(List expressionList) { - Expression expression = expressionList.get(0); + /** + * Build the expression with two {@link Expression} as arguments. + * + * @param expressionList expression list. + * @return expression. + */ + @Override + public Expression build(List expressionList) { + Expression expression = expressionList.get(0); - return new Expression() { - @Override - public ExprValue valueOf(BindingTuple tuple) { - return op.apply(Arrays.asList(expression.valueOf(tuple))); - } + return new Expression() { + @Override + public ExprValue valueOf(BindingTuple tuple) { + return op.apply(Arrays.asList(expression.valueOf(tuple))); + } - @Override - public String toString() { - return String.format("%s(%s)", op.name(), expression); - } - }; - } + @Override + public String toString() { + return String.format("%s(%s)", op.name(), expression); + } + }; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/BinaryScalarOperator.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/BinaryScalarOperator.java index 70d47a3e83..02d29e1ed9 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/BinaryScalarOperator.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/BinaryScalarOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.core.operator; import static org.opensearch.sql.legacy.expression.model.ExprValue.ExprValueKind.DOUBLE_VALUE; @@ -24,54 +23,53 @@ import org.opensearch.sql.legacy.expression.model.ExprValueFactory; /** - * Binary Scalar Operator take two {@link ExprValue} as arguments ans return one {@link ExprValue} as result. + * Binary Scalar Operator take two {@link ExprValue} as arguments ans return one {@link ExprValue} + * as result. */ @RequiredArgsConstructor public class BinaryScalarOperator implements ScalarOperator { - private static final Map numberTypeOrder = - new ImmutableMap.Builder() - .put(INTEGER_VALUE, 0) - .put(LONG_VALUE, 1) - .put(DOUBLE_VALUE, 2) - .put(FLOAT_VALUE, 3) - .build(); + private static final Map numberTypeOrder = + new ImmutableMap.Builder() + .put(INTEGER_VALUE, 0) + .put(LONG_VALUE, 1) + .put(DOUBLE_VALUE, 2) + .put(FLOAT_VALUE, 3) + .build(); - private final ScalarOperation op; - private final BiFunction integerFunc; - private final BiFunction longFunc; - private final BiFunction doubleFunc; - private final BiFunction floatFunc; + private final ScalarOperation op; + private final BiFunction integerFunc; + private final BiFunction longFunc; + private final BiFunction doubleFunc; + private final BiFunction floatFunc; - @Override - public ExprValue apply(List valueList) { - ExprValue v1 = valueList.get(0); - ExprValue v2 = valueList.get(1); - if (!numberTypeOrder.containsKey(v1.kind()) || !numberTypeOrder.containsKey(v2.kind())) { - throw new RuntimeException( - String.format("unexpected operation type: %s(%s, %s) ", op.name(), v1.kind(), v2.kind())); - } - ExprValue.ExprValueKind expectedType = numberTypeOrder.get(v1.kind()) > numberTypeOrder.get(v2.kind()) - ? v1.kind() : v2.kind(); - switch (expectedType) { - case DOUBLE_VALUE: - return ExprValueFactory.from(doubleFunc.apply(getDoubleValue(v1), getDoubleValue(v2))); - case INTEGER_VALUE: - return ExprValueFactory - .from(integerFunc.apply(getIntegerValue(v1), getIntegerValue(v2))); - case LONG_VALUE: - return ExprValueFactory - .from(longFunc.apply(getLongValue(v1), getLongValue(v2))); - case FLOAT_VALUE: - return ExprValueFactory - .from(floatFunc.apply(getFloatValue(v1), getFloatValue(v2))); - default: - throw new RuntimeException(String.format("unexpected operation type: %s(%s, %s)", op.name(), v1.kind(), - v2.kind())); - } + @Override + public ExprValue apply(List valueList) { + ExprValue v1 = valueList.get(0); + ExprValue v2 = valueList.get(1); + if (!numberTypeOrder.containsKey(v1.kind()) || !numberTypeOrder.containsKey(v2.kind())) { + throw new RuntimeException( + String.format("unexpected operation type: %s(%s, %s) ", op.name(), v1.kind(), v2.kind())); } - - @Override - public String name() { - return op.name(); + ExprValue.ExprValueKind expectedType = + numberTypeOrder.get(v1.kind()) > numberTypeOrder.get(v2.kind()) ? v1.kind() : v2.kind(); + switch (expectedType) { + case DOUBLE_VALUE: + return ExprValueFactory.from(doubleFunc.apply(getDoubleValue(v1), getDoubleValue(v2))); + case INTEGER_VALUE: + return ExprValueFactory.from(integerFunc.apply(getIntegerValue(v1), getIntegerValue(v2))); + case LONG_VALUE: + return ExprValueFactory.from(longFunc.apply(getLongValue(v1), getLongValue(v2))); + case FLOAT_VALUE: + return ExprValueFactory.from(floatFunc.apply(getFloatValue(v1), getFloatValue(v2))); + default: + throw new RuntimeException( + String.format( + "unexpected operation type: %s(%s, %s)", op.name(), v1.kind(), v2.kind())); } + } + + @Override + public String name() { + return op.name(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/DoubleBinaryScalarOperator.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/DoubleBinaryScalarOperator.java index 2555b2a53c..12e7aacbaa 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/DoubleBinaryScalarOperator.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/DoubleBinaryScalarOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.core.operator; import static org.opensearch.sql.legacy.expression.model.ExprValueUtils.getDoubleValue; @@ -16,37 +15,41 @@ import org.opensearch.sql.legacy.expression.model.ExprValueFactory; /** - * Double Binary Scalar Operator take two {@link ExprValue} which have double value as arguments ans return one - * {@link ExprDoubleValue} as result. + * Double Binary Scalar Operator take two {@link ExprValue} which have double value as arguments ans + * return one {@link ExprDoubleValue} as result. */ @RequiredArgsConstructor public class DoubleBinaryScalarOperator implements ScalarOperator { - private final ScalarOperation op; - private final BiFunction doubleFunc; + private final ScalarOperation op; + private final BiFunction doubleFunc; - @Override - public ExprValue apply(List exprValues) { - ExprValue exprValue1 = exprValues.get(0); - ExprValue exprValue2 = exprValues.get(1); - if (exprValue1.kind() != exprValue2.kind()) { - throw new RuntimeException(String.format("unexpected operation type: %s(%s,%s)", op.name(), - exprValue1.kind(), exprValue2.kind())); - } - switch (exprValue1.kind()) { - case DOUBLE_VALUE: - case INTEGER_VALUE: - case LONG_VALUE: - case FLOAT_VALUE: - return ExprValueFactory.from(doubleFunc.apply(getDoubleValue(exprValue1), - getDoubleValue(exprValue2))); - default: - throw new RuntimeException(String.format("unexpected operation type: %s(%s,%s)", op.name(), - exprValue1.kind(), exprValue2.kind())); - } + @Override + public ExprValue apply(List exprValues) { + ExprValue exprValue1 = exprValues.get(0); + ExprValue exprValue2 = exprValues.get(1); + if (exprValue1.kind() != exprValue2.kind()) { + throw new RuntimeException( + String.format( + "unexpected operation type: %s(%s,%s)", + op.name(), exprValue1.kind(), exprValue2.kind())); } - - @Override - public String name() { - return op.name(); + switch (exprValue1.kind()) { + case DOUBLE_VALUE: + case INTEGER_VALUE: + case LONG_VALUE: + case FLOAT_VALUE: + return ExprValueFactory.from( + doubleFunc.apply(getDoubleValue(exprValue1), getDoubleValue(exprValue2))); + default: + throw new RuntimeException( + String.format( + "unexpected operation type: %s(%s,%s)", + op.name(), exprValue1.kind(), exprValue2.kind())); } + } + + @Override + public String name() { + return op.name(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/DoubleUnaryScalarOperator.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/DoubleUnaryScalarOperator.java index 736216472f..8242eee8a6 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/DoubleUnaryScalarOperator.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/DoubleUnaryScalarOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.core.operator; import static org.opensearch.sql.legacy.expression.model.ExprValueUtils.getDoubleValue; @@ -16,31 +15,31 @@ import org.opensearch.sql.legacy.expression.model.ExprValueFactory; /** - * Unary Binary Scalar Operator take one {@link ExprValue} which have double value as arguments ans return one - * {@link ExprDoubleValue} as result. + * Unary Binary Scalar Operator take one {@link ExprValue} which have double value as arguments ans + * return one {@link ExprDoubleValue} as result. */ @RequiredArgsConstructor public class DoubleUnaryScalarOperator implements ScalarOperator { - private final ScalarOperation op; - private final Function doubleFunc; + private final ScalarOperation op; + private final Function doubleFunc; - @Override - public ExprValue apply(List exprValues) { - ExprValue exprValue = exprValues.get(0); - switch (exprValue.kind()) { - case DOUBLE_VALUE: - case INTEGER_VALUE: - case LONG_VALUE: - case FLOAT_VALUE: - return ExprValueFactory.from(doubleFunc.apply(getDoubleValue(exprValue))); - default: - throw new RuntimeException(String.format("unexpected operation type: %s(%s)", - op.name(), exprValue.kind())); - } + @Override + public ExprValue apply(List exprValues) { + ExprValue exprValue = exprValues.get(0); + switch (exprValue.kind()) { + case DOUBLE_VALUE: + case INTEGER_VALUE: + case LONG_VALUE: + case FLOAT_VALUE: + return ExprValueFactory.from(doubleFunc.apply(getDoubleValue(exprValue))); + default: + throw new RuntimeException( + String.format("unexpected operation type: %s(%s)", op.name(), exprValue.kind())); } + } - @Override - public String name() { - return op.name(); - } + @Override + public String name() { + return op.name(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/ScalarOperation.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/ScalarOperation.java index 0be4dfa786..ea2a698921 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/ScalarOperation.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/ScalarOperation.java @@ -3,39 +3,36 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.core.operator; import lombok.Getter; import lombok.RequiredArgsConstructor; -/** - * The definition of the Scalar Operation. - */ +/** The definition of the Scalar Operation. */ @Getter @RequiredArgsConstructor public enum ScalarOperation { - ADD("add"), - SUBTRACT("subtract"), - MULTIPLY("multiply"), - DIVIDE("divide"), - MODULES("modules"), - ABS("abs"), - ACOS("acos"), - ASIN("asin"), - ATAN("atan"), - ATAN2("atan2"), - TAN("tan"), - CBRT("cbrt"), - CEIL("ceil"), - COS("cos"), - COSH("cosh"), - EXP("exp"), - FLOOR("floor"), - LN("ln"), - LOG("log"), - LOG2("log2"), - LOG10("log10"); + ADD("add"), + SUBTRACT("subtract"), + MULTIPLY("multiply"), + DIVIDE("divide"), + MODULES("modules"), + ABS("abs"), + ACOS("acos"), + ASIN("asin"), + ATAN("atan"), + ATAN2("atan2"), + TAN("tan"), + CBRT("cbrt"), + CEIL("ceil"), + COS("cos"), + COSH("cosh"), + EXP("exp"), + FLOOR("floor"), + LN("ln"), + LOG("log"), + LOG2("log2"), + LOG10("log10"); - private final String name; + private final String name; } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/ScalarOperator.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/ScalarOperator.java index bfb3a75afb..c0c3360afc 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/ScalarOperator.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/ScalarOperator.java @@ -3,26 +3,25 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.core.operator; import java.util.List; import org.opensearch.sql.legacy.expression.model.ExprValue; -/** - * Scalar Operator is a function has one or more arguments and return a single value. - */ +/** Scalar Operator is a function has one or more arguments and return a single value. */ public interface ScalarOperator { - /** - * Apply the operator to the input arguments. - * @param valueList argument list. - * @return result. - */ - ExprValue apply(List valueList); + /** + * Apply the operator to the input arguments. + * + * @param valueList argument list. + * @return result. + */ + ExprValue apply(List valueList); - /** - * The name of the operator. - * @return name. - */ - String name(); + /** + * The name of the operator. + * + * @return name. + */ + String name(); } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/UnaryScalarOperator.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/UnaryScalarOperator.java index a6bfc48a1a..deb979f767 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/UnaryScalarOperator.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/core/operator/UnaryScalarOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.core.operator; import static org.opensearch.sql.legacy.expression.model.ExprValueUtils.getDoubleValue; @@ -18,36 +17,37 @@ import org.opensearch.sql.legacy.expression.model.ExprValueFactory; /** - * Unary Scalar Operator take one {@link ExprValue} as arguments ans return one {@link ExprValue} as result. + * Unary Scalar Operator take one {@link ExprValue} as arguments ans return one {@link ExprValue} as + * result. */ @RequiredArgsConstructor public class UnaryScalarOperator implements ScalarOperator { - private final ScalarOperation op; - private final Function integerFunc; - private final Function longFunc; - private final Function doubleFunc; - private final Function floatFunc; + private final ScalarOperation op; + private final Function integerFunc; + private final Function longFunc; + private final Function doubleFunc; + private final Function floatFunc; - @Override - public ExprValue apply(List exprValues) { - ExprValue exprValue = exprValues.get(0); - switch (exprValue.kind()) { - case DOUBLE_VALUE: - return ExprValueFactory.from(doubleFunc.apply(getDoubleValue(exprValue))); - case INTEGER_VALUE: - return ExprValueFactory.from(integerFunc.apply(getIntegerValue(exprValue))); - case LONG_VALUE: - return ExprValueFactory.from(longFunc.apply(getLongValue(exprValue))); - case FLOAT_VALUE: - return ExprValueFactory.from(floatFunc.apply(getFloatValue(exprValue))); - default: - throw new RuntimeException(String.format("unexpected operation type: %s(%s)", op.name(), - exprValue.kind())); - } + @Override + public ExprValue apply(List exprValues) { + ExprValue exprValue = exprValues.get(0); + switch (exprValue.kind()) { + case DOUBLE_VALUE: + return ExprValueFactory.from(doubleFunc.apply(getDoubleValue(exprValue))); + case INTEGER_VALUE: + return ExprValueFactory.from(integerFunc.apply(getIntegerValue(exprValue))); + case LONG_VALUE: + return ExprValueFactory.from(longFunc.apply(getLongValue(exprValue))); + case FLOAT_VALUE: + return ExprValueFactory.from(floatFunc.apply(getFloatValue(exprValue))); + default: + throw new RuntimeException( + String.format("unexpected operation type: %s(%s)", op.name(), exprValue.kind())); } + } - @Override - public String name() { - return op.name(); - } + @Override + public String name() { + return op.name(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/domain/BindingTuple.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/domain/BindingTuple.java index badc7c8355..328f63b7ca 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/domain/BindingTuple.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/domain/BindingTuple.java @@ -3,10 +3,8 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.domain; - import java.util.Map; import java.util.stream.Collectors; import lombok.Builder; @@ -19,42 +17,41 @@ import org.opensearch.sql.legacy.expression.model.ExprValueFactory; /** - * BindingTuple represents the a relationship between bindingName and ExprValue. - * e.g. The operation output column name is bindingName, the value is the ExprValue. + * BindingTuple represents the a relationship between bindingName and ExprValue. e.g. The operation + * output column name is bindingName, the value is the ExprValue. */ @Builder @Getter @EqualsAndHashCode public class BindingTuple { - @Singular("binding") - private final Map bindingMap; - - /** - * Resolve the Binding Name in BindingTuple context. - * - * @param bindingName binding name. - * @return binding value. - */ - public ExprValue resolve(String bindingName) { - return bindingMap.getOrDefault(bindingName, new ExprMissingValue()); - } - - @Override - public String toString() { - return bindingMap.entrySet() - .stream() - .map(entry -> String.format("%s:%s", entry.getKey(), entry.getValue())) - .collect(Collectors.joining(",", "<", ">")); - } - - public static BindingTuple from(Map map) { - return from(new JSONObject(map)); - } - - public static BindingTuple from(JSONObject json) { - Map map = json.toMap(); - BindingTupleBuilder bindingTupleBuilder = BindingTuple.builder(); - map.forEach((key, value) -> bindingTupleBuilder.binding(key, ExprValueFactory.from(value))); - return bindingTupleBuilder.build(); - } + @Singular("binding") + private final Map bindingMap; + + /** + * Resolve the Binding Name in BindingTuple context. + * + * @param bindingName binding name. + * @return binding value. + */ + public ExprValue resolve(String bindingName) { + return bindingMap.getOrDefault(bindingName, new ExprMissingValue()); + } + + @Override + public String toString() { + return bindingMap.entrySet().stream() + .map(entry -> String.format("%s:%s", entry.getKey(), entry.getValue())) + .collect(Collectors.joining(",", "<", ">")); + } + + public static BindingTuple from(Map map) { + return from(new JSONObject(map)); + } + + public static BindingTuple from(JSONObject json) { + Map map = json.toMap(); + BindingTupleBuilder bindingTupleBuilder = BindingTuple.builder(); + map.forEach((key, value) -> bindingTupleBuilder.binding(key, ExprValueFactory.from(value))); + return bindingTupleBuilder.build(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprBooleanValue.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprBooleanValue.java index 50b1523497..ce7c1a8fca 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprBooleanValue.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprBooleanValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.model; import lombok.EqualsAndHashCode; @@ -12,23 +11,23 @@ @EqualsAndHashCode @RequiredArgsConstructor public class ExprBooleanValue implements ExprValue { - private final Boolean value; + private final Boolean value; - @Override - public Object value() { - return value; - } + @Override + public Object value() { + return value; + } - @Override - public ExprValueKind kind() { - return ExprValueKind.BOOLEAN_VALUE; - } + @Override + public ExprValueKind kind() { + return ExprValueKind.BOOLEAN_VALUE; + } - @Override - public String toString() { - final StringBuffer sb = new StringBuffer("SSBooleanValue{"); - sb.append("value=").append(value); - sb.append('}'); - return sb.toString(); - } + @Override + public String toString() { + final StringBuffer sb = new StringBuffer("SSBooleanValue{"); + sb.append("value=").append(value); + sb.append('}'); + return sb.toString(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprCollectionValue.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprCollectionValue.java index 99eb35272d..f6200a2bea 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprCollectionValue.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprCollectionValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.model; import static org.opensearch.sql.legacy.expression.model.ExprValue.ExprValueKind.COLLECTION_VALUE; @@ -16,22 +15,20 @@ @EqualsAndHashCode @RequiredArgsConstructor public class ExprCollectionValue implements ExprValue { - private final List valueList; - - @Override - public Object value() { - return valueList; - } - - @Override - public ExprValueKind kind() { - return COLLECTION_VALUE; - } - - @Override - public String toString() { - return valueList.stream() - .map(Object::toString) - .collect(Collectors.joining(",", "[", "]")); - } + private final List valueList; + + @Override + public Object value() { + return valueList; + } + + @Override + public ExprValueKind kind() { + return COLLECTION_VALUE; + } + + @Override + public String toString() { + return valueList.stream().map(Object::toString).collect(Collectors.joining(",", "[", "]")); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprDoubleValue.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprDoubleValue.java index fdfacc4c55..16c607cae5 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprDoubleValue.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprDoubleValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.model; import lombok.EqualsAndHashCode; @@ -12,20 +11,20 @@ @EqualsAndHashCode @RequiredArgsConstructor public class ExprDoubleValue implements ExprValue { - private final Double value; + private final Double value; - @Override - public Object value() { - return value; - } + @Override + public Object value() { + return value; + } - @Override - public ExprValueKind kind() { - return ExprValueKind.DOUBLE_VALUE; - } + @Override + public ExprValueKind kind() { + return ExprValueKind.DOUBLE_VALUE; + } - @Override - public String toString() { - return value.toString(); - } + @Override + public String toString() { + return value.toString(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprFloatValue.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprFloatValue.java index f4d4dfc1b3..478ca645a7 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprFloatValue.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprFloatValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.model; import lombok.EqualsAndHashCode; @@ -12,20 +11,20 @@ @EqualsAndHashCode @RequiredArgsConstructor public class ExprFloatValue implements ExprValue { - private final Float value; + private final Float value; - @Override - public Object value() { - return value; - } + @Override + public Object value() { + return value; + } - @Override - public ExprValueKind kind() { - return ExprValueKind.DOUBLE_VALUE; - } + @Override + public ExprValueKind kind() { + return ExprValueKind.DOUBLE_VALUE; + } - @Override - public String toString() { - return value.toString(); - } + @Override + public String toString() { + return value.toString(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprIntegerValue.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprIntegerValue.java index 3285934280..92c4d38a4c 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprIntegerValue.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprIntegerValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.model; import lombok.EqualsAndHashCode; @@ -12,20 +11,20 @@ @EqualsAndHashCode @RequiredArgsConstructor public class ExprIntegerValue implements ExprValue { - private final Integer value; + private final Integer value; - @Override - public Object value() { - return value; - } + @Override + public Object value() { + return value; + } - @Override - public ExprValueKind kind() { - return ExprValueKind.INTEGER_VALUE; - } + @Override + public ExprValueKind kind() { + return ExprValueKind.INTEGER_VALUE; + } - @Override - public String toString() { - return value.toString(); - } + @Override + public String toString() { + return value.toString(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprLongValue.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprLongValue.java index b50a0088db..4f96ecf89a 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprLongValue.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprLongValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.model; import lombok.EqualsAndHashCode; @@ -12,20 +11,20 @@ @EqualsAndHashCode @RequiredArgsConstructor public class ExprLongValue implements ExprValue { - private final Long value; + private final Long value; - @Override - public Object value() { - return value; - } + @Override + public Object value() { + return value; + } - @Override - public ExprValueKind kind() { - return ExprValueKind.LONG_VALUE; - } + @Override + public ExprValueKind kind() { + return ExprValueKind.LONG_VALUE; + } - @Override - public String toString() { - return value.toString(); - } + @Override + public String toString() { + return value.toString(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprMissingValue.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprMissingValue.java index e05e32b920..c2b70537c9 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprMissingValue.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprMissingValue.java @@ -3,15 +3,12 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.model; -/** - * The definition of the missing value. - */ +/** The definition of the missing value. */ public class ExprMissingValue implements ExprValue { - @Override - public ExprValueKind kind() { - return ExprValueKind.MISSING_VALUE; - } + @Override + public ExprValueKind kind() { + return ExprValueKind.MISSING_VALUE; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprStringValue.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprStringValue.java index dcdec6117f..3c93b82187 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprStringValue.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprStringValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.model; import lombok.EqualsAndHashCode; @@ -12,20 +11,20 @@ @EqualsAndHashCode @RequiredArgsConstructor public class ExprStringValue implements ExprValue { - private final String value; + private final String value; - @Override - public Object value() { - return value; - } + @Override + public Object value() { + return value; + } - @Override - public ExprValueKind kind() { - return ExprValueKind.STRING_VALUE; - } + @Override + public ExprValueKind kind() { + return ExprValueKind.STRING_VALUE; + } - @Override - public String toString() { - return value; - } + @Override + public String toString() { + return value; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprTupleValue.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprTupleValue.java index 7debcef864..5f690a6b3e 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprTupleValue.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprTupleValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.model; import java.util.Map; @@ -14,23 +13,22 @@ @EqualsAndHashCode @RequiredArgsConstructor public class ExprTupleValue implements ExprValue { - private final Map valueMap; + private final Map valueMap; - @Override - public Object value() { - return valueMap; - } + @Override + public Object value() { + return valueMap; + } - @Override - public ExprValueKind kind() { - return ExprValueKind.TUPLE_VALUE; - } + @Override + public ExprValueKind kind() { + return ExprValueKind.TUPLE_VALUE; + } - @Override - public String toString() { - return valueMap.entrySet() - .stream() - .map(entry -> String.format("%s:%s", entry.getKey(), entry.getValue())) - .collect(Collectors.joining(",", "{", "}")); - } + @Override + public String toString() { + return valueMap.entrySet().stream() + .map(entry -> String.format("%s:%s", entry.getKey(), entry.getValue())) + .collect(Collectors.joining(",", "{", "}")); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprValue.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprValue.java index d15cb39270..aae1973d4b 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprValue.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprValue.java @@ -3,31 +3,28 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.model; -/** - * The definition of the Expression Value. - */ +/** The definition of the Expression Value. */ public interface ExprValue { - default Object value() { - throw new IllegalStateException("invalid value operation on " + kind()); - } + default Object value() { + throw new IllegalStateException("invalid value operation on " + kind()); + } - default ExprValueKind kind() { - throw new IllegalStateException("invalid kind operation"); - } + default ExprValueKind kind() { + throw new IllegalStateException("invalid kind operation"); + } - enum ExprValueKind { - TUPLE_VALUE, - COLLECTION_VALUE, - MISSING_VALUE, + enum ExprValueKind { + TUPLE_VALUE, + COLLECTION_VALUE, + MISSING_VALUE, - BOOLEAN_VALUE, - INTEGER_VALUE, - DOUBLE_VALUE, - LONG_VALUE, - FLOAT_VALUE, - STRING_VALUE - } + BOOLEAN_VALUE, + INTEGER_VALUE, + DOUBLE_VALUE, + LONG_VALUE, + FLOAT_VALUE, + STRING_VALUE + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprValueFactory.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprValueFactory.java index 5dc2b5b50a..28f4c70293 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprValueFactory.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprValueFactory.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.model; import java.math.BigDecimal; @@ -12,62 +11,60 @@ import java.util.List; import java.util.Map; -/** - * The definition of {@link ExprValue} factory. - */ +/** The definition of {@link ExprValue} factory. */ public class ExprValueFactory { - public static ExprValue booleanValue(Boolean value) { - return new ExprBooleanValue(value); - } + public static ExprValue booleanValue(Boolean value) { + return new ExprBooleanValue(value); + } - public static ExprValue integerValue(Integer value) { - return new ExprIntegerValue(value); - } + public static ExprValue integerValue(Integer value) { + return new ExprIntegerValue(value); + } - public static ExprValue doubleValue(Double value) { - return new ExprDoubleValue(value); - } + public static ExprValue doubleValue(Double value) { + return new ExprDoubleValue(value); + } - public static ExprValue stringValue(String value) { - return new ExprStringValue(value); - } + public static ExprValue stringValue(String value) { + return new ExprStringValue(value); + } - public static ExprValue longValue(Long value) { - return new ExprLongValue(value); - } + public static ExprValue longValue(Long value) { + return new ExprLongValue(value); + } - public static ExprValue tupleValue(Map map) { - Map valueMap = new HashMap<>(); - map.forEach((k, v) -> valueMap.put(k, from(v))); - return new ExprTupleValue(valueMap); - } + public static ExprValue tupleValue(Map map) { + Map valueMap = new HashMap<>(); + map.forEach((k, v) -> valueMap.put(k, from(v))); + return new ExprTupleValue(valueMap); + } - public static ExprValue collectionValue(List list) { - List valueList = new ArrayList<>(); - list.forEach(o -> valueList.add(from(o))); - return new ExprCollectionValue(valueList); - } + public static ExprValue collectionValue(List list) { + List valueList = new ArrayList<>(); + list.forEach(o -> valueList.add(from(o))); + return new ExprCollectionValue(valueList); + } - public static ExprValue from(Object o) { - if (o instanceof Map) { - return tupleValue((Map) o); - } else if (o instanceof List) { - return collectionValue(((List) o)); - } else if (o instanceof Integer) { - return integerValue((Integer) o); - } else if (o instanceof Long) { - return longValue(((Long) o)); - } else if (o instanceof Boolean) { - return booleanValue((Boolean) o); - } else if (o instanceof Double) { - return doubleValue((Double) o); - } else if (o instanceof BigDecimal) { - return doubleValue(((BigDecimal) o).doubleValue()); - } else if (o instanceof String) { - return stringValue((String) o); - } else { - throw new IllegalStateException("unsupported type " + o.getClass()); - } + public static ExprValue from(Object o) { + if (o instanceof Map) { + return tupleValue((Map) o); + } else if (o instanceof List) { + return collectionValue(((List) o)); + } else if (o instanceof Integer) { + return integerValue((Integer) o); + } else if (o instanceof Long) { + return longValue(((Long) o)); + } else if (o instanceof Boolean) { + return booleanValue((Boolean) o); + } else if (o instanceof Double) { + return doubleValue((Double) o); + } else if (o instanceof BigDecimal) { + return doubleValue(((BigDecimal) o).doubleValue()); + } else if (o instanceof String) { + return stringValue((String) o); + } else { + throw new IllegalStateException("unsupported type " + o.getClass()); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprValueUtils.java b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprValueUtils.java index 4688e74b6a..9873c72886 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprValueUtils.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/expression/model/ExprValueUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.expression.model; import static org.opensearch.sql.legacy.expression.model.ExprValue.ExprValueKind.BOOLEAN_VALUE; @@ -15,64 +14,62 @@ import java.util.List; import java.util.Map; -/** - * The definition of ExprValue Utils. - */ +/** The definition of ExprValue Utils. */ public class ExprValueUtils { - public static Integer getIntegerValue(ExprValue exprValue) { - return getNumberValue(exprValue).intValue(); - } + public static Integer getIntegerValue(ExprValue exprValue) { + return getNumberValue(exprValue).intValue(); + } - public static Double getDoubleValue(ExprValue exprValue) { - return getNumberValue(exprValue).doubleValue(); - } + public static Double getDoubleValue(ExprValue exprValue) { + return getNumberValue(exprValue).doubleValue(); + } - public static Long getLongValue(ExprValue exprValue) { - return getNumberValue(exprValue).longValue(); - } + public static Long getLongValue(ExprValue exprValue) { + return getNumberValue(exprValue).longValue(); + } - public static Float getFloatValue(ExprValue exprValue) { - return getNumberValue(exprValue).floatValue(); - } + public static Float getFloatValue(ExprValue exprValue) { + return getNumberValue(exprValue).floatValue(); + } - public static String getStringValue(ExprValue exprValue) { - return convert(exprValue, STRING_VALUE); - } + public static String getStringValue(ExprValue exprValue) { + return convert(exprValue, STRING_VALUE); + } - public static List getCollectionValue(ExprValue exprValue) { - return convert(exprValue, COLLECTION_VALUE); - } + public static List getCollectionValue(ExprValue exprValue) { + return convert(exprValue, COLLECTION_VALUE); + } - public static Map getTupleValue(ExprValue exprValue) { - return convert(exprValue, TUPLE_VALUE); - } + public static Map getTupleValue(ExprValue exprValue) { + return convert(exprValue, TUPLE_VALUE); + } - public static Boolean getBooleanValue(ExprValue exprValue) { - return convert(exprValue, BOOLEAN_VALUE); - } + public static Boolean getBooleanValue(ExprValue exprValue) { + return convert(exprValue, BOOLEAN_VALUE); + } - @VisibleForTesting - public static Number getNumberValue(ExprValue exprValue) { - switch (exprValue.kind()) { - case INTEGER_VALUE: - case DOUBLE_VALUE: - case LONG_VALUE: - case FLOAT_VALUE: - return (Number) exprValue.value(); - default: - break; - } - throw new IllegalStateException( - String.format("invalid to get NUMBER_VALUE from expr type of %s", exprValue.kind())); + @VisibleForTesting + public static Number getNumberValue(ExprValue exprValue) { + switch (exprValue.kind()) { + case INTEGER_VALUE: + case DOUBLE_VALUE: + case LONG_VALUE: + case FLOAT_VALUE: + return (Number) exprValue.value(); + default: + break; } + throw new IllegalStateException( + String.format("invalid to get NUMBER_VALUE from expr type of %s", exprValue.kind())); + } - @SuppressWarnings("unchecked") - private static T convert(ExprValue exprValue, ExprValue.ExprValueKind toType) { - if (exprValue.kind() == toType) { - return (T) exprValue.value(); - } else { - throw new IllegalStateException( - String.format("invalid to get %s from expr type of %s", toType, exprValue.kind())); - } + @SuppressWarnings("unchecked") + private static T convert(ExprValue exprValue, ExprValue.ExprValueKind toType) { + if (exprValue.kind() == toType) { + return (T) exprValue.value(); + } else { + throw new IllegalStateException( + String.format("invalid to get %s from expr type of %s", toType, exprValue.kind())); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/BasicCounter.java b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/BasicCounter.java index 8bb15eeb74..88d5f817e8 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/BasicCounter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/BasicCounter.java @@ -3,32 +3,31 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.metrics; import java.util.concurrent.atomic.LongAdder; public class BasicCounter implements Counter { - private LongAdder count = new LongAdder(); + private LongAdder count = new LongAdder(); - @Override - public void increment() { - count.increment(); - } + @Override + public void increment() { + count.increment(); + } - @Override - public void add(long n) { - count.add(n); - } + @Override + public void add(long n) { + count.add(n); + } - @Override - public Long getValue() { - return count.longValue(); - } + @Override + public Long getValue() { + return count.longValue(); + } - @Override - public void reset() { - count.reset(); - } + @Override + public void reset() { + count.reset(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/Counter.java b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/Counter.java index 7d490704e8..f91731ab0e 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/Counter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/Counter.java @@ -3,16 +3,15 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.metrics; public interface Counter { - void increment(); + void increment(); - void add(long n); + void add(long n); - T getValue(); + T getValue(); - void reset(); + void reset(); } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/GaugeMetric.java b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/GaugeMetric.java index 5752927952..2f7c269351 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/GaugeMetric.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/GaugeMetric.java @@ -3,29 +3,25 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.metrics; import java.util.function.Supplier; -/** - * Gauge metric, an instant value like cpu usage, state and so on - */ +/** Gauge metric, an instant value like cpu usage, state and so on */ public class GaugeMetric extends Metric { - private Supplier loadValue; - - public GaugeMetric(String name, Supplier supplier) { - super(name); - this.loadValue = supplier; - } + private Supplier loadValue; - public String getName() { - return super.getName(); - } + public GaugeMetric(String name, Supplier supplier) { + super(name); + this.loadValue = supplier; + } - public T getValue() { - return loadValue.get(); - } + public String getName() { + return super.getName(); + } + public T getValue() { + return loadValue.get(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/Metric.java b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/Metric.java index 9e31b0d9cd..956e0f558c 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/Metric.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/Metric.java @@ -3,23 +3,21 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.metrics; public abstract class Metric implements java.io.Serializable { - private static final long serialVersionUID = 1L; - - private String name; + private static final long serialVersionUID = 1L; - public Metric(String name) { - this.name = name; - } + private String name; - public String getName() { - return name; - } + public Metric(String name) { + this.name = name; + } - public abstract T getValue(); + public String getName() { + return name; + } + public abstract T getValue(); } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/MetricFactory.java b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/MetricFactory.java index 9319b77644..e4fbd173c9 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/MetricFactory.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/MetricFactory.java @@ -3,34 +3,33 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.metrics; import org.opensearch.sql.legacy.query.join.BackOffRetryStrategy; public class MetricFactory { - public static Metric createMetric(MetricName name) { + public static Metric createMetric(MetricName name) { - switch (name) { - case REQ_TOTAL: - case DEFAULT_CURSOR_REQUEST_TOTAL: - case DEFAULT: - case PPL_REQ_TOTAL: - return new NumericMetric<>(name.getName(), new BasicCounter()); - case CIRCUIT_BREAKER: - return new GaugeMetric<>(name.getName(), BackOffRetryStrategy.GET_CB_STATE); - case REQ_COUNT_TOTAL: - case DEFAULT_CURSOR_REQUEST_COUNT_TOTAL: - case FAILED_REQ_COUNT_CUS: - case FAILED_REQ_COUNT_SYS: - case FAILED_REQ_COUNT_CB: - case PPL_REQ_COUNT_TOTAL: - case PPL_FAILED_REQ_COUNT_CUS: - case PPL_FAILED_REQ_COUNT_SYS: - return new NumericMetric<>(name.getName(), new RollingCounter()); - default: - return new NumericMetric<>(name.getName(), new BasicCounter()); - } + switch (name) { + case REQ_TOTAL: + case DEFAULT_CURSOR_REQUEST_TOTAL: + case DEFAULT: + case PPL_REQ_TOTAL: + return new NumericMetric<>(name.getName(), new BasicCounter()); + case CIRCUIT_BREAKER: + return new GaugeMetric<>(name.getName(), BackOffRetryStrategy.GET_CB_STATE); + case REQ_COUNT_TOTAL: + case DEFAULT_CURSOR_REQUEST_COUNT_TOTAL: + case FAILED_REQ_COUNT_CUS: + case FAILED_REQ_COUNT_SYS: + case FAILED_REQ_COUNT_CB: + case PPL_REQ_COUNT_TOTAL: + case PPL_FAILED_REQ_COUNT_CUS: + case PPL_FAILED_REQ_COUNT_SYS: + return new NumericMetric<>(name.getName(), new RollingCounter()); + default: + return new NumericMetric<>(name.getName(), new BasicCounter()); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/MetricName.java b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/MetricName.java index 16a719b97e..1c895f5d69 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/MetricName.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/MetricName.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.metrics; import com.google.common.collect.ImmutableSet; @@ -13,51 +12,55 @@ import java.util.stream.Collectors; public enum MetricName { + REQ_TOTAL("request_total"), + REQ_COUNT_TOTAL("request_count"), + FAILED_REQ_COUNT_SYS("failed_request_count_syserr"), + FAILED_REQ_COUNT_CUS("failed_request_count_cuserr"), + FAILED_REQ_COUNT_CB("failed_request_count_cb"), + DEFAULT_CURSOR_REQUEST_TOTAL("default_cursor_request_total"), + DEFAULT_CURSOR_REQUEST_COUNT_TOTAL("default_cursor_request_count"), + CIRCUIT_BREAKER("circuit_breaker"), + DEFAULT("default"), + + PPL_REQ_TOTAL("ppl_request_total"), + PPL_REQ_COUNT_TOTAL("ppl_request_count"), + PPL_FAILED_REQ_COUNT_SYS("ppl_failed_request_count_syserr"), + PPL_FAILED_REQ_COUNT_CUS("ppl_failed_request_count_cuserr"), + DATASOURCE_REQ_COUNT("datasource_request_count"), + DATASOURCE_FAILED_REQ_COUNT_SYS("datasource_failed_request_count_syserr"), + DATASOURCE_FAILED_REQ_COUNT_CUS("datasource_failed_request_count_cuserr"); + + private String name; + + MetricName(String name) { + this.name = name; + } + + public String getName() { + return name; + } + + public static List getNames() { + return Arrays.stream(MetricName.values()).map(v -> v.name).collect(Collectors.toList()); + } + + private static Set NUMERICAL_METRIC = + new ImmutableSet.Builder() + .add(PPL_REQ_TOTAL) + .add(PPL_REQ_COUNT_TOTAL) + .add(PPL_FAILED_REQ_COUNT_SYS) + .add(PPL_FAILED_REQ_COUNT_CUS) + .build(); - REQ_TOTAL("request_total"), - REQ_COUNT_TOTAL("request_count"), - FAILED_REQ_COUNT_SYS("failed_request_count_syserr"), - FAILED_REQ_COUNT_CUS("failed_request_count_cuserr"), - FAILED_REQ_COUNT_CB("failed_request_count_cb"), - DEFAULT_CURSOR_REQUEST_TOTAL("default_cursor_request_total"), - DEFAULT_CURSOR_REQUEST_COUNT_TOTAL("default_cursor_request_count"), - CIRCUIT_BREAKER("circuit_breaker"), - DEFAULT("default"), - - PPL_REQ_TOTAL("ppl_request_total"), - PPL_REQ_COUNT_TOTAL("ppl_request_count"), - PPL_FAILED_REQ_COUNT_SYS("ppl_failed_request_count_syserr"), - PPL_FAILED_REQ_COUNT_CUS("ppl_failed_request_count_cuserr"), - DATASOURCE_REQ_COUNT("datasource_request_count"), - DATASOURCE_FAILED_REQ_COUNT_SYS("datasource_failed_request_count_syserr"), - DATASOURCE_FAILED_REQ_COUNT_CUS("datasource_failed_request_count_cuserr"); - - private String name; - - MetricName(String name) { - this.name = name; - } - - public String getName() { - return name; - } - - public static List getNames() { - return Arrays.stream(MetricName.values()).map(v -> v.name).collect(Collectors.toList()); - } - - - private static Set NUMERICAL_METRIC = new ImmutableSet.Builder() - .add(PPL_REQ_TOTAL) - .add(PPL_REQ_COUNT_TOTAL) - .add(PPL_FAILED_REQ_COUNT_SYS) - .add(PPL_FAILED_REQ_COUNT_CUS) - .build(); - - public boolean isNumerical() { - return this == REQ_TOTAL || this == REQ_COUNT_TOTAL || this == FAILED_REQ_COUNT_SYS - || this == FAILED_REQ_COUNT_CUS || this == FAILED_REQ_COUNT_CB || this == DEFAULT - || this == DEFAULT_CURSOR_REQUEST_TOTAL || this == DEFAULT_CURSOR_REQUEST_COUNT_TOTAL - || NUMERICAL_METRIC.contains(this); - } + public boolean isNumerical() { + return this == REQ_TOTAL + || this == REQ_COUNT_TOTAL + || this == FAILED_REQ_COUNT_SYS + || this == FAILED_REQ_COUNT_CUS + || this == FAILED_REQ_COUNT_CB + || this == DEFAULT + || this == DEFAULT_CURSOR_REQUEST_TOTAL + || this == DEFAULT_CURSOR_REQUEST_COUNT_TOTAL + || NUMERICAL_METRIC.contains(this); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/Metrics.java b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/Metrics.java index e53dfa6804..858f9e5cef 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/Metrics.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/Metrics.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.metrics; import java.util.ArrayList; @@ -13,69 +12,68 @@ public class Metrics { - private static Metrics metrics = new Metrics(); - private ConcurrentHashMap registeredMetricsByName = new ConcurrentHashMap<>(); - - public static Metrics getInstance() { - return metrics; - } + private static Metrics metrics = new Metrics(); + private ConcurrentHashMap registeredMetricsByName = new ConcurrentHashMap<>(); - private Metrics() { - } + public static Metrics getInstance() { + return metrics; + } - public void registerDefaultMetrics() { - for (MetricName metricName : MetricName.values()) { - registerMetric(MetricFactory.createMetric(metricName)); - } - } + private Metrics() {} - public void registerMetric(Metric metric) { - registeredMetricsByName.put(metric.getName(), metric); + public void registerDefaultMetrics() { + for (MetricName metricName : MetricName.values()) { + registerMetric(MetricFactory.createMetric(metricName)); } + } - public void unregisterMetric(String name) { - if (name == null) { - return; - } + public void registerMetric(Metric metric) { + registeredMetricsByName.put(metric.getName(), metric); + } - registeredMetricsByName.remove(name); + public void unregisterMetric(String name) { + if (name == null) { + return; } - public Metric getMetric(String name) { - if (name == null) { - return null; - } + registeredMetricsByName.remove(name); + } - return registeredMetricsByName.get(name); + public Metric getMetric(String name) { + if (name == null) { + return null; } - public NumericMetric getNumericalMetric(MetricName metricName) { - String name = metricName.getName(); - if (!metricName.isNumerical()) { - name = MetricName.DEFAULT.getName(); - } + return registeredMetricsByName.get(name); + } - return (NumericMetric) registeredMetricsByName.get(name); + public NumericMetric getNumericalMetric(MetricName metricName) { + String name = metricName.getName(); + if (!metricName.isNumerical()) { + name = MetricName.DEFAULT.getName(); } - public List getAllMetrics() { - return new ArrayList<>(registeredMetricsByName.values()); - } + return (NumericMetric) registeredMetricsByName.get(name); + } - public String collectToJSON() { - JSONObject metricsJSONObject = new JSONObject(); + public List getAllMetrics() { + return new ArrayList<>(registeredMetricsByName.values()); + } - for (Metric metric : registeredMetricsByName.values()) { - if (metric.getName().equals("default")) { - continue; - } - metricsJSONObject.put(metric.getName(), metric.getValue()); - } + public String collectToJSON() { + JSONObject metricsJSONObject = new JSONObject(); - return metricsJSONObject.toString(); + for (Metric metric : registeredMetricsByName.values()) { + if (metric.getName().equals("default")) { + continue; + } + metricsJSONObject.put(metric.getName(), metric.getValue()); } - public void clear() { - registeredMetricsByName.clear(); - } + return metricsJSONObject.toString(); + } + + public void clear() { + registeredMetricsByName.clear(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/NumericMetric.java b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/NumericMetric.java index 085034bcd2..ee6d373f8f 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/NumericMetric.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/NumericMetric.java @@ -3,40 +3,38 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.metrics; public class NumericMetric extends Metric { - private Counter counter; - - public NumericMetric(String name, Counter counter) { - super(name); - this.counter = counter; - } + private Counter counter; - public String getName() { - return super.getName(); - } + public NumericMetric(String name, Counter counter) { + super(name); + this.counter = counter; + } - public Counter getCounter() { - return counter; - } + public String getName() { + return super.getName(); + } - public void increment() { - counter.increment(); - } + public Counter getCounter() { + return counter; + } - public void increment(long n) { - counter.add(n); - } + public void increment() { + counter.increment(); + } - public T getValue() { - return counter.getValue(); - } + public void increment(long n) { + counter.add(n); + } - public void clear() { - counter.reset(); - } + public T getValue() { + return counter.getValue(); + } + public void clear() { + counter.reset(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/RollingCounter.java b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/RollingCounter.java index 1c624d7ffe..c7b9ec56ec 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/metrics/RollingCounter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/metrics/RollingCounter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.metrics; import java.time.Clock; @@ -13,87 +12,85 @@ import org.opensearch.sql.legacy.esdomain.LocalClusterState; /** - * Rolling counter. The count is refreshed every interval. In every interval the count is cumulative. + * Rolling counter. The count is refreshed every interval. In every interval the count is + * cumulative. */ public class RollingCounter implements Counter { - private final long capacity; - private final long window; - private final long interval; - private final Clock clock; - private final ConcurrentSkipListMap time2CountWin; - private final LongAdder count; - - public RollingCounter() { - this( - LocalClusterState.state().getSettingValue( - Settings.Key.METRICS_ROLLING_WINDOW), - LocalClusterState.state().getSettingValue( - Settings.Key.METRICS_ROLLING_INTERVAL)); - } - - public RollingCounter(long window, long interval, Clock clock) { - this.window = window; - this.interval = interval; - this.clock = clock; - time2CountWin = new ConcurrentSkipListMap<>(); - count = new LongAdder(); - capacity = window / interval * 2; - } - - public RollingCounter(long window, long interval) { - this(window, interval, Clock.systemDefaultZone()); + private final long capacity; + private final long window; + private final long interval; + private final Clock clock; + private final ConcurrentSkipListMap time2CountWin; + private final LongAdder count; + + public RollingCounter() { + this( + LocalClusterState.state().getSettingValue(Settings.Key.METRICS_ROLLING_WINDOW), + LocalClusterState.state().getSettingValue(Settings.Key.METRICS_ROLLING_INTERVAL)); + } + + public RollingCounter(long window, long interval, Clock clock) { + this.window = window; + this.interval = interval; + this.clock = clock; + time2CountWin = new ConcurrentSkipListMap<>(); + count = new LongAdder(); + capacity = window / interval * 2; + } + + public RollingCounter(long window, long interval) { + this(window, interval, Clock.systemDefaultZone()); + } + + @Override + public void increment() { + add(1L); + } + + @Override + public void add(long n) { + trim(); + time2CountWin.compute(getKey(clock.millis()), (k, v) -> (v == null) ? n : v + n); + } + + @Override + public Long getValue() { + return getValue(getPreKey(clock.millis())); + } + + public long getValue(long key) { + Long res = time2CountWin.get(key); + if (res == null) { + return 0; } - @Override - public void increment() { - add(1L); - } + return res; + } - @Override - public void add(long n) { - trim(); - time2CountWin.compute(getKey(clock.millis()), (k, v) -> (v == null) ? n : v + n); - } + public long getSum() { + return count.longValue(); + } - @Override - public Long getValue() { - return getValue(getPreKey(clock.millis())); + private void trim() { + if (time2CountWin.size() > capacity) { + time2CountWin.headMap(getKey(clock.millis() - window * 1000)).clear(); } + } - public long getValue(long key) { - Long res = time2CountWin.get(key); - if (res == null) { - return 0; - } + private long getKey(long millis) { + return millis / 1000 / this.interval; + } - return res; - } - - public long getSum() { - return count.longValue(); - } + private long getPreKey(long millis) { + return getKey(millis) - 1; + } - private void trim() { - if (time2CountWin.size() > capacity) { - time2CountWin.headMap(getKey(clock.millis() - window * 1000)).clear(); - } - } - - private long getKey(long millis) { - return millis / 1000 / this.interval; - } - - private long getPreKey(long millis) { - return getKey(millis) - 1; - } - - public int size() { - return time2CountWin.size(); - } - - public void reset() { - time2CountWin.clear(); - } + public int size() { + return time2CountWin.size(); + } + public void reset() { + time2CountWin.clear(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/parser/CaseWhenParser.java b/legacy/src/main/java/org/opensearch/sql/legacy/parser/CaseWhenParser.java index c711ee2929..d55ee64601 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/parser/CaseWhenParser.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/parser/CaseWhenParser.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.parser; import com.alibaba.druid.sql.ast.SQLExpr; @@ -19,101 +18,119 @@ import org.opensearch.sql.legacy.exception.SqlParseException; import org.opensearch.sql.legacy.utils.Util; -/** - * Created by allwefantasy on 9/3/16. - */ +/** Created by allwefantasy on 9/3/16. */ public class CaseWhenParser { - private SQLCaseExpr caseExpr; - private String alias; - private String tableAlias; - - public CaseWhenParser(SQLCaseExpr caseExpr, String alias, String tableAlias) { - this.alias = alias; - this.tableAlias = tableAlias; - this.caseExpr = caseExpr; + private SQLCaseExpr caseExpr; + private String alias; + private String tableAlias; + + public CaseWhenParser(SQLCaseExpr caseExpr, String alias, String tableAlias) { + this.alias = alias; + this.tableAlias = tableAlias; + this.caseExpr = caseExpr; + } + + public String parse() throws SqlParseException { + List result = new ArrayList<>(); + + if (caseExpr.getValueExpr() != null) { + for (SQLCaseExpr.Item item : caseExpr.getItems()) { + SQLExpr left = caseExpr.getValueExpr(); + SQLExpr right = item.getConditionExpr(); + SQLBinaryOpExpr conditionExpr = + new SQLBinaryOpExpr(left, SQLBinaryOperator.Equality, right); + item.setConditionExpr(conditionExpr); + } + caseExpr.setValueExpr(null); } - public String parse() throws SqlParseException { - List result = new ArrayList<>(); - - if (caseExpr.getValueExpr() != null) { - for (SQLCaseExpr.Item item : caseExpr.getItems()) { - SQLExpr left = caseExpr.getValueExpr(); - SQLExpr right = item.getConditionExpr(); - SQLBinaryOpExpr conditionExpr = new SQLBinaryOpExpr(left, SQLBinaryOperator.Equality, right); - item.setConditionExpr(conditionExpr); - } - caseExpr.setValueExpr(null); - } - - for (SQLCaseExpr.Item item : caseExpr.getItems()) { - SQLExpr conditionExpr = item.getConditionExpr(); - - WhereParser parser = new WhereParser(new SqlParser(), conditionExpr); - String scriptCode = explain(parser.findWhere()); - if (scriptCode.startsWith(" &&")) { - scriptCode = scriptCode.substring(3); - } - if (result.size() == 0) { - result.add("if(" + scriptCode + ")" + "{" + Util.getScriptValueWithQuote(item.getValueExpr(), - "'") + "}"); - } else { - result.add("else if(" + scriptCode + ")" + "{" + Util.getScriptValueWithQuote(item.getValueExpr(), - "'") + "}"); - } - - } - SQLExpr elseExpr = caseExpr.getElseExpr(); - if (elseExpr == null) { - result.add("else { null }"); - } else { - result.add("else {" + Util.getScriptValueWithQuote(elseExpr, "'") + "}"); - } - - - return Joiner.on(" ").join(result); + for (SQLCaseExpr.Item item : caseExpr.getItems()) { + SQLExpr conditionExpr = item.getConditionExpr(); + + WhereParser parser = new WhereParser(new SqlParser(), conditionExpr); + String scriptCode = explain(parser.findWhere()); + if (scriptCode.startsWith(" &&")) { + scriptCode = scriptCode.substring(3); + } + if (result.size() == 0) { + result.add( + "if(" + + scriptCode + + ")" + + "{" + + Util.getScriptValueWithQuote(item.getValueExpr(), "'") + + "}"); + } else { + result.add( + "else if(" + + scriptCode + + ")" + + "{" + + Util.getScriptValueWithQuote(item.getValueExpr(), "'") + + "}"); + } } - - public String explain(Where where) throws SqlParseException { - List codes = new ArrayList<>(); - while (where.getWheres().size() == 1) { - where = where.getWheres().getFirst(); - } - explainWhere(codes, where); - String relation = where.getConn().name().equals("AND") ? " && " : " || "; - return Joiner.on(relation).join(codes); + SQLExpr elseExpr = caseExpr.getElseExpr(); + if (elseExpr == null) { + result.add("else { null }"); + } else { + result.add("else {" + Util.getScriptValueWithQuote(elseExpr, "'") + "}"); } + return Joiner.on(" ").join(result); + } - private void explainWhere(List codes, Where where) throws SqlParseException { - if (where instanceof Condition) { - Condition condition = (Condition) where; - - if (condition.getValue() instanceof ScriptFilter) { - codes.add("(" + ((ScriptFilter) condition.getValue()).getScript() + ")"); - } else if (condition.getOPERATOR() == Condition.OPERATOR.BETWEEN) { - Object[] objs = (Object[]) condition.getValue(); - codes.add("(" + "doc['" + condition.getName() + "'].value >= " + objs[0] + " && doc['" - + condition.getName() + "'].value <=" + objs[1] + ")"); - } else { - SQLExpr nameExpr = condition.getNameExpr(); - SQLExpr valueExpr = condition.getValueExpr(); - if (valueExpr instanceof SQLNullExpr) { - codes.add("(" + "doc['" + nameExpr.toString() + "']" + ".empty)"); - } else { - codes.add("(" + Util.getScriptValueWithQuote(nameExpr, "'") + condition.getOpertatorSymbol() - + Util.getScriptValueWithQuote(valueExpr, "'") + ")"); - } - } + public String explain(Where where) throws SqlParseException { + List codes = new ArrayList<>(); + while (where.getWheres().size() == 1) { + where = where.getWheres().getFirst(); + } + explainWhere(codes, where); + String relation = where.getConn().name().equals("AND") ? " && " : " || "; + return Joiner.on(relation).join(codes); + } + + private void explainWhere(List codes, Where where) throws SqlParseException { + if (where instanceof Condition) { + Condition condition = (Condition) where; + + if (condition.getValue() instanceof ScriptFilter) { + codes.add("(" + ((ScriptFilter) condition.getValue()).getScript() + ")"); + } else if (condition.getOPERATOR() == Condition.OPERATOR.BETWEEN) { + Object[] objs = (Object[]) condition.getValue(); + codes.add( + "(" + + "doc['" + + condition.getName() + + "'].value >= " + + objs[0] + + " && doc['" + + condition.getName() + + "'].value <=" + + objs[1] + + ")"); + } else { + SQLExpr nameExpr = condition.getNameExpr(); + SQLExpr valueExpr = condition.getValueExpr(); + if (valueExpr instanceof SQLNullExpr) { + codes.add("(" + "doc['" + nameExpr.toString() + "']" + ".empty)"); } else { - for (Where subWhere : where.getWheres()) { - List subCodes = new ArrayList<>(); - explainWhere(subCodes, subWhere); - String relation = subWhere.getConn().name().equals("AND") ? "&&" : "||"; - codes.add(Joiner.on(relation).join(subCodes)); - } + codes.add( + "(" + + Util.getScriptValueWithQuote(nameExpr, "'") + + condition.getOpertatorSymbol() + + Util.getScriptValueWithQuote(valueExpr, "'") + + ")"); } + } + } else { + for (Where subWhere : where.getWheres()) { + List subCodes = new ArrayList<>(); + explainWhere(subCodes, subWhere); + String relation = subWhere.getConn().name().equals("AND") ? "&&" : "||"; + codes.add(Joiner.on(relation).join(subCodes)); + } } - + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/parser/ChildrenType.java b/legacy/src/main/java/org/opensearch/sql/legacy/parser/ChildrenType.java index 74945cb94f..27374849df 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/parser/ChildrenType.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/parser/ChildrenType.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.parser; import com.alibaba.druid.sql.ast.SQLExpr; @@ -16,56 +15,55 @@ import org.opensearch.sql.legacy.exception.SqlParseException; import org.opensearch.sql.legacy.utils.Util; -/** - * Created by Razma Tazz on 14/04/2016. - */ +/** Created by Razma Tazz on 14/04/2016. */ public class ChildrenType { - public String field; - public String childType; - public Where where; - private boolean simple; + public String field; + public String childType; + public Where where; + private boolean simple; - public boolean tryFillFromExpr(SQLExpr expr) throws SqlParseException { - if (!(expr instanceof SQLMethodInvokeExpr)) { - return false; - } - SQLMethodInvokeExpr method = (SQLMethodInvokeExpr) expr; - - String methodName = method.getMethodName(); + public boolean tryFillFromExpr(SQLExpr expr) throws SqlParseException { + if (!(expr instanceof SQLMethodInvokeExpr)) { + return false; + } + SQLMethodInvokeExpr method = (SQLMethodInvokeExpr) expr; - if (!methodName.toLowerCase().equals("children")) { - return false; - } + String methodName = method.getMethodName(); - List parameters = method.getParameters(); + if (!methodName.toLowerCase().equals("children")) { + return false; + } - if (parameters.size() != 2) { - throw new SqlParseException( - "on children object only allowed 2 parameters (type, field)/(type, conditions...) "); - } + List parameters = method.getParameters(); - String type = Util.extendedToString(parameters.get(0)); - this.childType = type; + if (parameters.size() != 2) { + throw new SqlParseException( + "on children object only allowed 2 parameters (type, field)/(type, conditions...) "); + } - SQLExpr secondParameter = parameters.get(1); - if (secondParameter instanceof SQLTextLiteralExpr || secondParameter instanceof SQLIdentifierExpr - || secondParameter instanceof SQLPropertyExpr) { - this.field = Util.extendedToString(secondParameter); - this.simple = true; - } else { - Where where = Where.newInstance(); - new WhereParser(new SqlParser()).parseWhere(secondParameter, where); - if (where.getWheres().size() == 0) { - throw new SqlParseException("Failed to parse filter condition"); - } - this.where = where; - simple = false; - } + String type = Util.extendedToString(parameters.get(0)); + this.childType = type; - return true; + SQLExpr secondParameter = parameters.get(1); + if (secondParameter instanceof SQLTextLiteralExpr + || secondParameter instanceof SQLIdentifierExpr + || secondParameter instanceof SQLPropertyExpr) { + this.field = Util.extendedToString(secondParameter); + this.simple = true; + } else { + Where where = Where.newInstance(); + new WhereParser(new SqlParser()).parseWhere(secondParameter, where); + if (where.getWheres().size() == 0) { + throw new SqlParseException("Failed to parse filter condition"); + } + this.where = where; + simple = false; } - public boolean isSimple() { - return simple; - } + return true; + } + + public boolean isSimple() { + return simple; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/parser/ElasticLexer.java b/legacy/src/main/java/org/opensearch/sql/legacy/parser/ElasticLexer.java index 8720c3ba85..67b49fb4ad 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/parser/ElasticLexer.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/parser/ElasticLexer.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.parser; import static com.alibaba.druid.sql.parser.CharTypes.isFirstIdentifierChar; @@ -14,86 +13,82 @@ import com.alibaba.druid.sql.parser.ParserException; import com.alibaba.druid.sql.parser.Token; -/** - * Created by Eliran on 18/8/2015. - */ +/** Created by Eliran on 18/8/2015. */ public class ElasticLexer extends MySqlLexer { - public ElasticLexer(String input) { - super(input); - } + public ElasticLexer(String input) { + super(input); + } + public ElasticLexer(char[] input, int inputLength, boolean skipComment) { + super(input, inputLength, skipComment); + } - public ElasticLexer(char[] input, int inputLength, boolean skipComment) { - super(input, inputLength, skipComment); - } + public void scanIdentifier() { + final char first = ch; + + if (ch == '`') { - public void scanIdentifier() { - final char first = ch; + mark = pos; + bufPos = 1; + char ch; + for (; ; ) { + ch = charAt(++pos); if (ch == '`') { + bufPos++; + ch = charAt(++pos); + break; + } else if (ch == EOI) { + throw new ParserException("illegal identifier"); + } - mark = pos; - bufPos = 1; - char ch; - for (; ; ) { - ch = charAt(++pos); - - if (ch == '`') { - bufPos++; - ch = charAt(++pos); - break; - } else if (ch == EOI) { - throw new ParserException("illegal identifier"); - } - - bufPos++; - continue; - } - - this.ch = charAt(pos); - - stringVal = subString(mark, bufPos); - Token tok = keywods.getKeyword(stringVal); - if (tok != null) { - token = tok; - } else { - token = Token.IDENTIFIER; - } - } else { - - final boolean firstFlag = isFirstIdentifierChar(first); - if (!firstFlag) { - throw new ParserException("illegal identifier"); - } - - mark = pos; - bufPos = 1; - char ch; - for (; ; ) { - ch = charAt(++pos); - - if (!isElasticIdentifierChar(ch)) { - break; - } - - bufPos++; - continue; - } - - this.ch = charAt(pos); - - stringVal = addSymbol(); - Token tok = keywods.getKeyword(stringVal); - if (tok != null) { - token = tok; - } else { - token = Token.IDENTIFIER; - } + bufPos++; + continue; + } + + this.ch = charAt(pos); + + stringVal = subString(mark, bufPos); + Token tok = keywods.getKeyword(stringVal); + if (tok != null) { + token = tok; + } else { + token = Token.IDENTIFIER; + } + } else { + + final boolean firstFlag = isFirstIdentifierChar(first); + if (!firstFlag) { + throw new ParserException("illegal identifier"); + } + + mark = pos; + bufPos = 1; + char ch; + for (; ; ) { + ch = charAt(++pos); + + if (!isElasticIdentifierChar(ch)) { + break; } - } + bufPos++; + continue; + } - private boolean isElasticIdentifierChar(char ch) { - return ch == '*' || ch == ':' || ch == '-' || ch == '.' || ch == ';' || isIdentifierChar(ch); + this.ch = charAt(pos); + + stringVal = addSymbol(); + Token tok = keywods.getKeyword(stringVal); + if (tok != null) { + token = tok; + } else { + token = Token.IDENTIFIER; + } } + } + + private boolean isElasticIdentifierChar(char ch) { + return ch == '*' || ch == ':' || ch == '-' || ch == '.' || ch == ';' || isIdentifierChar(ch); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/parser/ElasticSqlExprParser.java b/legacy/src/main/java/org/opensearch/sql/legacy/parser/ElasticSqlExprParser.java index 5f6d03f0ac..be9c2f9652 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/parser/ElasticSqlExprParser.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/parser/ElasticSqlExprParser.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.parser; import static org.opensearch.sql.legacy.utils.StringUtils.isQuoted; @@ -53,987 +52,1002 @@ import com.alibaba.druid.util.JdbcConstants; import java.util.List; -/** - * Created by Eliran on 18/8/2015. - */ +/** Created by Eliran on 18/8/2015. */ public class ElasticSqlExprParser extends SQLExprParser { - public ElasticSqlExprParser(Lexer lexer) { - super(lexer); - this.aggregateFunctions = AGGREGATE_FUNCTIONS; + public ElasticSqlExprParser(Lexer lexer) { + super(lexer); + this.aggregateFunctions = AGGREGATE_FUNCTIONS; + } + + public ElasticSqlExprParser(String sql) { + this(new ElasticLexer(sql)); + this.lexer.nextToken(); + } + + @SuppressWarnings({"unchecked", "rawtypes"}) + @Override + public void parseHints(List hints) { + while (lexer.token() == Token.HINT) { + hints.add(new SQLCommentHint(lexer.stringVal())); + lexer.nextToken(); } + } - public ElasticSqlExprParser(String sql) { - this(new ElasticLexer(sql)); - this.lexer.nextToken(); + @Override + protected SQLExpr methodRest(SQLExpr expr, boolean acceptLPAREN) { + if (acceptLPAREN) { + accept(Token.LPAREN); } - @SuppressWarnings({"unchecked", "rawtypes"}) - @Override - public void parseHints(List hints) { - while (lexer.token() == Token.HINT) { - hints.add(new SQLCommentHint(lexer.stringVal())); - lexer.nextToken(); - } + if (expr instanceof SQLName || expr instanceof SQLDefaultExpr) { + String methodName; + + SQLMethodInvokeExpr methodInvokeExpr; + if (expr instanceof SQLPropertyExpr) { + methodName = ((SQLPropertyExpr) expr).getName(); + methodInvokeExpr = new SQLMethodInvokeExpr(methodName); + methodInvokeExpr.setOwner(((SQLPropertyExpr) expr).getOwner()); + } else { + methodName = expr.toString(); + methodInvokeExpr = new SQLMethodInvokeExpr(methodName); + } + + if (isAggreateFunction(methodName)) { + SQLAggregateExpr aggregateExpr = parseAggregateExpr(methodName); + + return aggregateExpr; + } + + if (lexer.token() != Token.RPAREN) { + exprList(methodInvokeExpr.getParameters(), methodInvokeExpr); + } + + accept(Token.RPAREN); + + return primaryRest(methodInvokeExpr); } - @Override - protected SQLExpr methodRest(SQLExpr expr, boolean acceptLPAREN) { - if (acceptLPAREN) { - accept(Token.LPAREN); + throw new ParserException("Syntax error: " + lexer.token()); + } + + public SQLExpr primary() { + + if (lexer.token() == Token.LBRACE) { + lexer.nextToken(); + boolean foundRBrace = false; + if (lexer.stringVal().equals("ts")) { + String current = lexer.stringVal(); + do { + if (current.equals(lexer.token().RBRACE.name())) { + foundRBrace = true; + break; + } + lexer.nextToken(); + current = lexer.token().name(); + } while (!foundRBrace && !current.trim().equals("")); + + if (foundRBrace) { + SQLOdbcExpr sdle = new SQLOdbcExpr(lexer.stringVal()); + + accept(Token.RBRACE); + return sdle; + } else { + throw new ParserException("Error. Unable to find closing RBRACE"); } + } else { + throw new ParserException("Error. Unable to parse ODBC Literal Timestamp"); + } + } else if (lexer.token() == Token.LBRACKET) { + StringBuilder identifier = new StringBuilder(); + lexer.nextToken(); + String prefix = ""; + while (lexer.token() != Token.RBRACKET) { + if (lexer.token() != Token.IDENTIFIER + && lexer.token() != Token.INDEX + && lexer.token() != Token.LITERAL_CHARS) { + throw new ParserException( + "All items between Brackets should be identifiers , got:" + lexer.token()); + } + identifier.append(prefix); + identifier.append(lexer.stringVal()); + prefix = " "; + lexer.nextToken(); + } + + accept(Token.RBRACKET); + return new SQLIdentifierExpr(identifier.toString()); + } else if (lexer.token() == Token.NOT) { + lexer.nextToken(); + SQLExpr sqlExpr; + if (lexer.token() == Token.EXISTS) { + lexer.nextToken(); + accept(Token.LPAREN); + sqlExpr = new SQLExistsExpr(createSelectParser().select(), true); + accept(Token.RPAREN); + } else if (lexer.token() == Token.LPAREN) { + lexer.nextToken(); - if (expr instanceof SQLName || expr instanceof SQLDefaultExpr) { - String methodName; - - SQLMethodInvokeExpr methodInvokeExpr; - if (expr instanceof SQLPropertyExpr) { - methodName = ((SQLPropertyExpr) expr).getName(); - methodInvokeExpr = new SQLMethodInvokeExpr(methodName); - methodInvokeExpr.setOwner(((SQLPropertyExpr) expr).getOwner()); - } else { - methodName = expr.toString(); - methodInvokeExpr = new SQLMethodInvokeExpr(methodName); - } + SQLExpr notTarget = expr(); - if (isAggreateFunction(methodName)) { - SQLAggregateExpr aggregateExpr = parseAggregateExpr(methodName); + accept(Token.RPAREN); - return aggregateExpr; - } + sqlExpr = new SQLNotExpr(notTarget); - if (lexer.token() != Token.RPAREN) { - exprList(methodInvokeExpr.getParameters(), methodInvokeExpr); - } + return primaryRest(sqlExpr); + } else { + SQLExpr restExpr = relational(); + sqlExpr = new SQLNotExpr(restExpr); + } + return sqlExpr; + } - accept(Token.RPAREN); + boolean parenWrapped = lexer.token() == Token.LPAREN; - return primaryRest(methodInvokeExpr); - } + SQLExpr expr = primary2(); - throw new ParserException("Syntax error: " + lexer.token()); + // keep track of if the identifier is wrapped in parens + if (parenWrapped && expr instanceof SQLIdentifierExpr) { + expr = new SQLParensIdentifierExpr((SQLIdentifierExpr) expr); } + return expr; + } - public SQLExpr primary() { + public static String[] AGGREGATE_FUNCTIONS = { + "AVG", "COUNT", "GROUP_CONCAT", "MAX", "MIN", "STDDEV", "SUM" + }; - if (lexer.token() == Token.LBRACE) { - lexer.nextToken(); - boolean foundRBrace = false; - if (lexer.stringVal().equals("ts")) { - String current = lexer.stringVal(); - do { - if (current.equals(lexer.token().RBRACE.name())) { - foundRBrace = true; - break; - } - lexer.nextToken(); - current = lexer.token().name(); - } while (!foundRBrace && !current.trim().equals("")); - - if (foundRBrace) { - SQLOdbcExpr sdle = new SQLOdbcExpr(lexer.stringVal()); - - accept(Token.RBRACE); - return sdle; - } else { - throw new ParserException("Error. Unable to find closing RBRACE"); - } - } else { - throw new ParserException("Error. Unable to parse ODBC Literal Timestamp"); - } - } else if (lexer.token() == Token.LBRACKET) { - StringBuilder identifier = new StringBuilder(); - lexer.nextToken(); - String prefix = ""; - while (lexer.token() != Token.RBRACKET) { - if (lexer.token() != Token.IDENTIFIER && lexer.token() != Token.INDEX - && lexer.token() != Token.LITERAL_CHARS) { - throw new ParserException("All items between Brackets should be identifiers , got:" - + lexer.token()); - } - identifier.append(prefix); - identifier.append(lexer.stringVal()); - prefix = " "; - lexer.nextToken(); - } + public SQLExpr relationalRest(SQLExpr expr) { + if (identifierEquals("REGEXP")) { + lexer.nextToken(); + SQLExpr rightExp = equality(); - accept(Token.RBRACKET); - return new SQLIdentifierExpr(identifier.toString()); - } else if (lexer.token() == Token.NOT) { - lexer.nextToken(); - SQLExpr sqlExpr; - if (lexer.token() == Token.EXISTS) { - lexer.nextToken(); - accept(Token.LPAREN); - sqlExpr = new SQLExistsExpr(createSelectParser().select(), true); - accept(Token.RPAREN); - } else if (lexer.token() == Token.LPAREN) { - lexer.nextToken(); + rightExp = relationalRest(rightExp); - SQLExpr notTarget = expr(); + return new SQLBinaryOpExpr(expr, SQLBinaryOperator.RegExp, rightExp, JdbcConstants.MYSQL); + } - accept(Token.RPAREN); + return super.relationalRest(expr); + } - sqlExpr = new SQLNotExpr(notTarget); + public SQLExpr multiplicativeRest(SQLExpr expr) { + if (lexer.token() == Token.IDENTIFIER && "MOD".equalsIgnoreCase(lexer.stringVal())) { + lexer.nextToken(); + SQLExpr rightExp = primary(); - return primaryRest(sqlExpr); - } else { - SQLExpr restExpr = relational(); - sqlExpr = new SQLNotExpr(restExpr); - } - return sqlExpr; - } + rightExp = relationalRest(rightExp); + + return new SQLBinaryOpExpr(expr, SQLBinaryOperator.Modulus, rightExp, JdbcConstants.MYSQL); + } - boolean parenWrapped = lexer.token() == Token.LPAREN; + return super.multiplicativeRest(expr); + } - SQLExpr expr = primary2(); + public SQLExpr notRationalRest(SQLExpr expr) { + if (identifierEquals("REGEXP")) { + lexer.nextToken(); + SQLExpr rightExp = primary(); - // keep track of if the identifier is wrapped in parens - if (parenWrapped && expr instanceof SQLIdentifierExpr) { - expr = new SQLParensIdentifierExpr((SQLIdentifierExpr) expr); - } + rightExp = relationalRest(rightExp); - return expr; + return new SQLBinaryOpExpr(expr, SQLBinaryOperator.NotRegExp, rightExp, JdbcConstants.MYSQL); } - public static String[] AGGREGATE_FUNCTIONS = {"AVG", "COUNT", "GROUP_CONCAT", "MAX", "MIN", "STDDEV", "SUM"}; + return super.notRationalRest(expr); + } + public SQLExpr primary2() { + final Token tok = lexer.token(); - public SQLExpr relationalRest(SQLExpr expr) { - if (identifierEquals("REGEXP")) { - lexer.nextToken(); - SQLExpr rightExp = equality(); + if (identifierEquals("outfile")) { + lexer.nextToken(); + SQLExpr file = primary(); + SQLExpr expr = new MySqlOutFileExpr(file); - rightExp = relationalRest(rightExp); + return primaryRest(expr); + } - return new SQLBinaryOpExpr(expr, SQLBinaryOperator.RegExp, rightExp, JdbcConstants.MYSQL); + switch (tok) { + case LITERAL_ALIAS: + String aliasValue = lexer.stringVal(); + lexer.nextToken(); + return primaryRest(new SQLCharExpr(aliasValue)); + case VARIANT: + SQLVariantRefExpr varRefExpr = new SQLVariantRefExpr(lexer.stringVal()); + lexer.nextToken(); + if (varRefExpr.getName().equalsIgnoreCase("@@global")) { + accept(Token.DOT); + varRefExpr = new SQLVariantRefExpr(lexer.stringVal(), true); + lexer.nextToken(); + } else if (varRefExpr.getName().equals("@") && lexer.token() == Token.LITERAL_CHARS) { + varRefExpr.setName("@'" + lexer.stringVal() + "'"); + lexer.nextToken(); + } else if (varRefExpr.getName().equals("@@") && lexer.token() == Token.LITERAL_CHARS) { + varRefExpr.setName("@@'" + lexer.stringVal() + "'"); + lexer.nextToken(); + } + return primaryRest(varRefExpr); + case VALUES: + lexer.nextToken(); + if (lexer.token() != Token.LPAREN) { + throw new ParserException("Syntax error: " + lexer.token()); + } + return this.methodRest(new SQLIdentifierExpr("VALUES"), true); + case BINARY: + lexer.nextToken(); + if (lexer.token() == Token.COMMA + || lexer.token() == Token.SEMI + || lexer.token() == Token.EOF) { + return new SQLIdentifierExpr("BINARY"); + } else { + SQLUnaryExpr binaryExpr = new SQLUnaryExpr(SQLUnaryOperator.BINARY, expr()); + return primaryRest(binaryExpr); } + case CACHE: + case GROUP: + lexer.nextToken(); + return primaryRest(new SQLIdentifierExpr(lexer.stringVal())); + case DOT: + lexer.nextToken(); + return primaryRest(new SQLIdentifierExpr("." + lexer.stringVal())); + default: + return super.primary(); + } + } - return super.relationalRest(expr); + public final SQLExpr primaryRest(SQLExpr expr) { + if (expr == null) { + throw new IllegalArgumentException("Illegal expression: NULL"); } - public SQLExpr multiplicativeRest(SQLExpr expr) { - if (lexer.token() == Token.IDENTIFIER && "MOD".equalsIgnoreCase(lexer.stringVal())) { + if (lexer.token() == Token.LITERAL_CHARS) { + if (expr instanceof SQLIdentifierExpr) { + SQLIdentifierExpr identExpr = (SQLIdentifierExpr) expr; + String ident = identExpr.getName(); + + if (ident.equalsIgnoreCase("x")) { + String charValue = lexer.stringVal(); + lexer.nextToken(); + expr = new SQLHexExpr(charValue); + + return primaryRest(expr); + } else if (ident.equalsIgnoreCase("b")) { + String charValue = lexer.stringVal(); + lexer.nextToken(); + expr = new SQLBinaryExpr(charValue); + + return primaryRest(expr); + } else if (ident.startsWith("_")) { + String charValue = lexer.stringVal(); + lexer.nextToken(); + + MySqlCharExpr mysqlCharExpr = new MySqlCharExpr(charValue); + mysqlCharExpr.setCharset(identExpr.getName()); + if (identifierEquals("COLLATE")) { lexer.nextToken(); - SQLExpr rightExp = primary(); - rightExp = relationalRest(rightExp); + String collate = lexer.stringVal(); + mysqlCharExpr.setCollate(collate); + accept(Token.IDENTIFIER); + } + + expr = mysqlCharExpr; - return new SQLBinaryOpExpr(expr, SQLBinaryOperator.Modulus, rightExp, JdbcConstants.MYSQL); + return primaryRest(expr); } + } else if (expr instanceof SQLCharExpr) { + SQLMethodInvokeExpr concat = new SQLMethodInvokeExpr("CONCAT"); + concat.addParameter(expr); + do { + String chars = lexer.stringVal(); + concat.addParameter(new SQLCharExpr(chars)); + lexer.nextToken(); + } while (lexer.token() == Token.LITERAL_CHARS || lexer.token() == Token.LITERAL_ALIAS); + expr = concat; + } + } else if (lexer.token() == Token.IDENTIFIER) { + if (expr instanceof SQLHexExpr) { + if ("USING".equalsIgnoreCase(lexer.stringVal())) { + lexer.nextToken(); + if (lexer.token() != Token.IDENTIFIER) { + throw new ParserException("Syntax error: " + lexer.token()); + } + String charSet = lexer.stringVal(); + lexer.nextToken(); + expr.getAttributes().put("USING", charSet); + + return primaryRest(expr); + } + } else if ("COLLATE".equalsIgnoreCase(lexer.stringVal())) { + lexer.nextToken(); - return super.multiplicativeRest(expr); - } + if (lexer.token() == Token.EQ) { + lexer.nextToken(); + } - public SQLExpr notRationalRest(SQLExpr expr) { - if (identifierEquals("REGEXP")) { - lexer.nextToken(); - SQLExpr rightExp = primary(); + if (lexer.token() != Token.IDENTIFIER) { + throw new ParserException("Syntax error: " + lexer.token()); + } - rightExp = relationalRest(rightExp); + String collate = lexer.stringVal(); + lexer.nextToken(); - return new SQLBinaryOpExpr(expr, SQLBinaryOperator.NotRegExp, rightExp, JdbcConstants.MYSQL); - } + expr = + new SQLBinaryOpExpr( + expr, + SQLBinaryOperator.COLLATE, + new SQLIdentifierExpr(collate), + JdbcConstants.MYSQL); - return super.notRationalRest(expr); - } + return primaryRest(expr); + } else if (expr instanceof SQLVariantRefExpr) { + if ("COLLATE".equalsIgnoreCase(lexer.stringVal())) { + lexer.nextToken(); - public SQLExpr primary2() { - final Token tok = lexer.token(); + if (lexer.token() != Token.IDENTIFIER) { + throw new ParserException("Syntax error: " + lexer.token()); + } - if (identifierEquals("outfile")) { - lexer.nextToken(); - SQLExpr file = primary(); - SQLExpr expr = new MySqlOutFileExpr(file); + String collate = lexer.stringVal(); + lexer.nextToken(); - return primaryRest(expr); + expr.putAttribute("COLLATE", collate); + return primaryRest(expr); } - - switch (tok) { - case LITERAL_ALIAS: - String aliasValue = lexer.stringVal(); - lexer.nextToken(); - return primaryRest(new SQLCharExpr(aliasValue)); - case VARIANT: - SQLVariantRefExpr varRefExpr = new SQLVariantRefExpr(lexer.stringVal()); - lexer.nextToken(); - if (varRefExpr.getName().equalsIgnoreCase("@@global")) { - accept(Token.DOT); - varRefExpr = new SQLVariantRefExpr(lexer.stringVal(), true); - lexer.nextToken(); - } else if (varRefExpr.getName().equals("@") && lexer.token() == Token.LITERAL_CHARS) { - varRefExpr.setName("@'" + lexer.stringVal() + "'"); - lexer.nextToken(); - } else if (varRefExpr.getName().equals("@@") && lexer.token() == Token.LITERAL_CHARS) { - varRefExpr.setName("@@'" + lexer.stringVal() + "'"); - lexer.nextToken(); - } - return primaryRest(varRefExpr); - case VALUES: - lexer.nextToken(); - if (lexer.token() != Token.LPAREN) { - throw new ParserException("Syntax error: " + lexer.token()); - } - return this.methodRest(new SQLIdentifierExpr("VALUES"), true); - case BINARY: - lexer.nextToken(); - if (lexer.token() == Token.COMMA || lexer.token() == Token.SEMI || lexer.token() == Token.EOF) { - return new SQLIdentifierExpr("BINARY"); - } else { - SQLUnaryExpr binaryExpr = new SQLUnaryExpr(SQLUnaryOperator.BINARY, expr()); - return primaryRest(binaryExpr); - } - case CACHE: - case GROUP: - lexer.nextToken(); - return primaryRest(new SQLIdentifierExpr(lexer.stringVal())); - case DOT: - lexer.nextToken(); - return primaryRest(new SQLIdentifierExpr("." + lexer.stringVal())); - default: - return super.primary(); + } else if (expr instanceof SQLIntegerExpr) { + SQLIntegerExpr intExpr = (SQLIntegerExpr) expr; + String binaryString = lexer.stringVal(); + if (intExpr.getNumber().intValue() == 0 && binaryString.startsWith("b")) { + lexer.nextToken(); + expr = new SQLBinaryExpr(binaryString.substring(1)); + + return primaryRest(expr); } - + } } + if (lexer.token() == Token.LPAREN && expr instanceof SQLIdentifierExpr) { + SQLIdentifierExpr identExpr = (SQLIdentifierExpr) expr; + String ident = identExpr.getName(); - public final SQLExpr primaryRest(SQLExpr expr) { - if (expr == null) { - throw new IllegalArgumentException("Illegal expression: NULL"); - } + if ("EXTRACT".equalsIgnoreCase(ident)) { + lexer.nextToken(); - if (lexer.token() == Token.LITERAL_CHARS) { - if (expr instanceof SQLIdentifierExpr) { - SQLIdentifierExpr identExpr = (SQLIdentifierExpr) expr; - String ident = identExpr.getName(); - - if (ident.equalsIgnoreCase("x")) { - String charValue = lexer.stringVal(); - lexer.nextToken(); - expr = new SQLHexExpr(charValue); - - return primaryRest(expr); - } else if (ident.equalsIgnoreCase("b")) { - String charValue = lexer.stringVal(); - lexer.nextToken(); - expr = new SQLBinaryExpr(charValue); - - return primaryRest(expr); - } else if (ident.startsWith("_")) { - String charValue = lexer.stringVal(); - lexer.nextToken(); - - MySqlCharExpr mysqlCharExpr = new MySqlCharExpr(charValue); - mysqlCharExpr.setCharset(identExpr.getName()); - if (identifierEquals("COLLATE")) { - lexer.nextToken(); - - String collate = lexer.stringVal(); - mysqlCharExpr.setCollate(collate); - accept(Token.IDENTIFIER); - } - - expr = mysqlCharExpr; - - return primaryRest(expr); - } - } else if (expr instanceof SQLCharExpr) { - SQLMethodInvokeExpr concat = new SQLMethodInvokeExpr("CONCAT"); - concat.addParameter(expr); - do { - String chars = lexer.stringVal(); - concat.addParameter(new SQLCharExpr(chars)); - lexer.nextToken(); - } while (lexer.token() == Token.LITERAL_CHARS || lexer.token() == Token.LITERAL_ALIAS); - expr = concat; - } - } else if (lexer.token() == Token.IDENTIFIER) { - if (expr instanceof SQLHexExpr) { - if ("USING".equalsIgnoreCase(lexer.stringVal())) { - lexer.nextToken(); - if (lexer.token() != Token.IDENTIFIER) { - throw new ParserException("Syntax error: " + lexer.token()); - } - String charSet = lexer.stringVal(); - lexer.nextToken(); - expr.getAttributes().put("USING", charSet); - - return primaryRest(expr); - } - } else if ("COLLATE".equalsIgnoreCase(lexer.stringVal())) { - lexer.nextToken(); - - if (lexer.token() == Token.EQ) { - lexer.nextToken(); - } - - if (lexer.token() != Token.IDENTIFIER) { - throw new ParserException("Syntax error: " + lexer.token()); - } - - String collate = lexer.stringVal(); - lexer.nextToken(); - - expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.COLLATE, - new SQLIdentifierExpr(collate), JdbcConstants.MYSQL); - - return primaryRest(expr); - } else if (expr instanceof SQLVariantRefExpr) { - if ("COLLATE".equalsIgnoreCase(lexer.stringVal())) { - lexer.nextToken(); - - if (lexer.token() != Token.IDENTIFIER) { - throw new ParserException("Syntax error: " + lexer.token()); - } - - String collate = lexer.stringVal(); - lexer.nextToken(); - - expr.putAttribute("COLLATE", collate); - - return primaryRest(expr); - } - } else if (expr instanceof SQLIntegerExpr) { - SQLIntegerExpr intExpr = (SQLIntegerExpr) expr; - String binaryString = lexer.stringVal(); - if (intExpr.getNumber().intValue() == 0 && binaryString.startsWith("b")) { - lexer.nextToken(); - expr = new SQLBinaryExpr(binaryString.substring(1)); - - return primaryRest(expr); - } - } + if (lexer.token() != Token.IDENTIFIER) { + throw new ParserException("Syntax error: " + lexer.token()); } - if (lexer.token() == Token.LPAREN && expr instanceof SQLIdentifierExpr) { - SQLIdentifierExpr identExpr = (SQLIdentifierExpr) expr; - String ident = identExpr.getName(); - - if ("EXTRACT".equalsIgnoreCase(ident)) { - lexer.nextToken(); - - if (lexer.token() != Token.IDENTIFIER) { - throw new ParserException("Syntax error: " + lexer.token()); - } - - String unitVal = lexer.stringVal(); - MySqlIntervalUnit unit = MySqlIntervalUnit.valueOf(unitVal.toUpperCase()); - lexer.nextToken(); - - accept(Token.FROM); - - SQLExpr value = expr(); - - MySqlExtractExpr extract = new MySqlExtractExpr(); - extract.setValue(value); - extract.setUnit(unit); - accept(Token.RPAREN); - - expr = extract; - - return primaryRest(expr); - } else if ("SUBSTRING".equalsIgnoreCase(ident)) { - lexer.nextToken(); - SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr(ident); - for (; ; ) { - SQLExpr param = expr(); - methodInvokeExpr.addParameter(param); - - if (lexer.token() == Token.COMMA) { - lexer.nextToken(); - continue; - } else if (lexer.token() == Token.FROM) { - lexer.nextToken(); - SQLExpr from = expr(); - methodInvokeExpr.addParameter(from); - - if (lexer.token() == Token.FOR) { - lexer.nextToken(); - SQLExpr forExpr = expr(); - methodInvokeExpr.addParameter(forExpr); - } - break; - } else if (lexer.token() == Token.RPAREN) { - break; - } else { - throw new ParserException("Syntax error: " + lexer.token()); - } - } - - accept(Token.RPAREN); - expr = methodInvokeExpr; - - return primaryRest(expr); - } else if ("TRIM".equalsIgnoreCase(ident)) { - lexer.nextToken(); - SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr(ident); - - if (lexer.token() == Token.IDENTIFIER) { - String flagVal = lexer.stringVal(); - if ("LEADING".equalsIgnoreCase(flagVal)) { - lexer.nextToken(); - methodInvokeExpr.getAttributes().put("TRIM_TYPE", "LEADING"); - } else if ("BOTH".equalsIgnoreCase(flagVal)) { - lexer.nextToken(); - methodInvokeExpr.getAttributes().put("TRIM_TYPE", "BOTH"); - } else if ("TRAILING".equalsIgnoreCase(flagVal)) { - lexer.nextToken(); - methodInvokeExpr.putAttribute("TRIM_TYPE", "TRAILING"); - } - } - - SQLExpr param = expr(); - methodInvokeExpr.addParameter(param); - - if (lexer.token() == Token.FROM) { - lexer.nextToken(); - SQLExpr from = expr(); - methodInvokeExpr.putAttribute("FROM", from); - } - - accept(Token.RPAREN); - expr = methodInvokeExpr; - - return primaryRest(expr); - } else if ("MATCH".equalsIgnoreCase(ident)) { - lexer.nextToken(); - MySqlMatchAgainstExpr matchAgainstExpr = new MySqlMatchAgainstExpr(); - - if (lexer.token() == Token.RPAREN) { - lexer.nextToken(); - } else { - exprList(matchAgainstExpr.getColumns(), matchAgainstExpr); - accept(Token.RPAREN); - } - - acceptIdentifier("AGAINST"); - - accept(Token.LPAREN); - SQLExpr against = primary(); - matchAgainstExpr.setAgainst(against); - - if (lexer.token() == Token.IN) { - lexer.nextToken(); - if (identifierEquals("NATURAL")) { - lexer.nextToken(); - acceptIdentifier("LANGUAGE"); - acceptIdentifier("MODE"); - if (lexer.token() == Token.WITH) { - lexer.nextToken(); - acceptIdentifier("QUERY"); - acceptIdentifier("EXPANSION"); - matchAgainstExpr.setSearchModifier( - MySqlMatchAgainstExpr.SearchModifier.IN_NATURAL_LANGUAGE_MODE_WITH_QUERY_EXPANSION); - } else { - matchAgainstExpr.setSearchModifier( - MySqlMatchAgainstExpr.SearchModifier.IN_NATURAL_LANGUAGE_MODE); - } - } else if (identifierEquals("BOOLEAN")) { - lexer.nextToken(); - acceptIdentifier("MODE"); - matchAgainstExpr.setSearchModifier(MySqlMatchAgainstExpr.SearchModifier.IN_BOOLEAN_MODE); - } else { - throw new ParserException("Syntax error: " + lexer.token()); - } - } else if (lexer.token() == Token.WITH) { - throw new ParserException("Syntax error: " + lexer.token()); - } - - accept(Token.RPAREN); - - expr = matchAgainstExpr; - - return primaryRest(expr); - } else if ("CONVERT".equalsIgnoreCase(ident)) { - lexer.nextToken(); - SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr(ident); - - if (lexer.token() != Token.RPAREN) { - exprList(methodInvokeExpr.getParameters(), methodInvokeExpr); - } - - if (identifierEquals("USING")) { - lexer.nextToken(); - if (lexer.token() != Token.IDENTIFIER) { - throw new ParserException("Syntax error: " + lexer.token()); - } - String charset = lexer.stringVal(); - lexer.nextToken(); - methodInvokeExpr.putAttribute("USING", charset); - } - - accept(Token.RPAREN); - - expr = methodInvokeExpr; - - return primaryRest(expr); - } else if ("POSITION".equalsIgnoreCase(ident)) { - accept(Token.LPAREN); - SQLExpr subStr = this.primary(); - accept(Token.IN); - SQLExpr str = this.expr(); - accept(Token.RPAREN); - - SQLMethodInvokeExpr locate = new SQLMethodInvokeExpr("LOCATE"); - locate.addParameter(subStr); - locate.addParameter(str); - - expr = locate; - return primaryRest(expr); - } - } + String unitVal = lexer.stringVal(); + MySqlIntervalUnit unit = MySqlIntervalUnit.valueOf(unitVal.toUpperCase()); + lexer.nextToken(); + + accept(Token.FROM); + + SQLExpr value = expr(); + + MySqlExtractExpr extract = new MySqlExtractExpr(); + extract.setValue(value); + extract.setUnit(unit); + accept(Token.RPAREN); + + expr = extract; + + return primaryRest(expr); + } else if ("SUBSTRING".equalsIgnoreCase(ident)) { + lexer.nextToken(); + SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr(ident); + for (; ; ) { + SQLExpr param = expr(); + methodInvokeExpr.addParameter(param); - if (lexer.token() == Token.VARIANT && "@".equals(lexer.stringVal())) { + if (lexer.token() == Token.COMMA) { lexer.nextToken(); - MySqlUserName userName = new MySqlUserName(); - if (expr instanceof SQLCharExpr) { - userName.setUserName(((SQLCharExpr) expr).toString()); - } else { - userName.setUserName(((SQLIdentifierExpr) expr).getName()); - } + continue; + } else if (lexer.token() == Token.FROM) { + lexer.nextToken(); + SQLExpr from = expr(); + methodInvokeExpr.addParameter(from); - if (lexer.token() == Token.LITERAL_CHARS) { - userName.setHost("'" + lexer.stringVal() + "'"); - } else { - userName.setHost(lexer.stringVal()); + if (lexer.token() == Token.FOR) { + lexer.nextToken(); + SQLExpr forExpr = expr(); + methodInvokeExpr.addParameter(forExpr); } - lexer.nextToken(); - return userName; + break; + } else if (lexer.token() == Token.RPAREN) { + break; + } else { + throw new ParserException("Syntax error: " + lexer.token()); + } } - // - if (expr instanceof SQLMethodInvokeExpr && lexer.token() == Token.LBRACKET) { + accept(Token.RPAREN); + expr = methodInvokeExpr; + + return primaryRest(expr); + } else if ("TRIM".equalsIgnoreCase(ident)) { + lexer.nextToken(); + SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr(ident); + + if (lexer.token() == Token.IDENTIFIER) { + String flagVal = lexer.stringVal(); + if ("LEADING".equalsIgnoreCase(flagVal)) { + lexer.nextToken(); + methodInvokeExpr.getAttributes().put("TRIM_TYPE", "LEADING"); + } else if ("BOTH".equalsIgnoreCase(flagVal)) { + lexer.nextToken(); + methodInvokeExpr.getAttributes().put("TRIM_TYPE", "BOTH"); + } else if ("TRAILING".equalsIgnoreCase(flagVal)) { lexer.nextToken(); - expr = bracketRest(expr); - return primaryRest(expr); + methodInvokeExpr.putAttribute("TRIM_TYPE", "TRAILING"); + } } - if (lexer.token() == Token.ERROR) { - throw new ParserException("Syntax error, token: " + lexer.token() + " " + lexer.stringVal() + ", pos: " - + lexer.pos()); - } + SQLExpr param = expr(); + methodInvokeExpr.addParameter(param); - /** - * When the druid parser parses the quoted field in SELECT clause, e.g. SELECT `b`.`lastname` FROM bank AS `b`, - * "`b`" is recognized as an identifier expr, and the token is DOT, then the next identifier "`lastname`" would - * be recognized as the property name of "`b`". The parser creates a SQLPropertyExpr with owner of "`b`" and - * property name of "`lastname`". - * - * The following block of code prevents this specific case to generate SQLPropertyExpr, but corrects the parser - * to generate a SQLIdentifierExpr with expr = "`b`.`lastname`". - */ - if (lexer.token() == Token.DOT && expr instanceof SQLIdentifierExpr) { - if (isQuoted(((SQLIdentifierExpr) expr).getName(), "`")) { - lexer.nextToken(); - ((SQLIdentifierExpr) expr).setName(((SQLIdentifierExpr) expr).getName() + "." + lexer.stringVal()); - lexer.nextToken(); - } + if (lexer.token() == Token.FROM) { + lexer.nextToken(); + SQLExpr from = expr(); + methodInvokeExpr.putAttribute("FROM", from); } - return super.primaryRest(expr); - } + accept(Token.RPAREN); + expr = methodInvokeExpr; - protected SQLExpr bracketRest(SQLExpr expr) { - Number index; + return primaryRest(expr); + } else if ("MATCH".equalsIgnoreCase(ident)) { + lexer.nextToken(); + MySqlMatchAgainstExpr matchAgainstExpr = new MySqlMatchAgainstExpr(); - if (lexer.token() == Token.LITERAL_INT) { - index = lexer.integerValue(); - lexer.nextToken(); + if (lexer.token() == Token.RPAREN) { + lexer.nextToken(); } else { - throw new ParserException("Syntax error : " + lexer.stringVal()); + exprList(matchAgainstExpr.getColumns(), matchAgainstExpr); + accept(Token.RPAREN); } - if (expr instanceof SQLMethodInvokeExpr) { - SQLMethodInvokeExpr methodInvokeExpr = (SQLMethodInvokeExpr) expr; - methodInvokeExpr.getParameters().add(new SQLIntegerExpr(index)); + acceptIdentifier("AGAINST"); + + accept(Token.LPAREN); + SQLExpr against = primary(); + matchAgainstExpr.setAgainst(against); + + if (lexer.token() == Token.IN) { + lexer.nextToken(); + if (identifierEquals("NATURAL")) { + lexer.nextToken(); + acceptIdentifier("LANGUAGE"); + acceptIdentifier("MODE"); + if (lexer.token() == Token.WITH) { + lexer.nextToken(); + acceptIdentifier("QUERY"); + acceptIdentifier("EXPANSION"); + matchAgainstExpr.setSearchModifier( + MySqlMatchAgainstExpr.SearchModifier + .IN_NATURAL_LANGUAGE_MODE_WITH_QUERY_EXPANSION); + } else { + matchAgainstExpr.setSearchModifier( + MySqlMatchAgainstExpr.SearchModifier.IN_NATURAL_LANGUAGE_MODE); + } + } else if (identifierEquals("BOOLEAN")) { + lexer.nextToken(); + acceptIdentifier("MODE"); + matchAgainstExpr.setSearchModifier( + MySqlMatchAgainstExpr.SearchModifier.IN_BOOLEAN_MODE); + } else { + throw new ParserException("Syntax error: " + lexer.token()); + } + } else if (lexer.token() == Token.WITH) { + throw new ParserException("Syntax error: " + lexer.token()); } + + accept(Token.RPAREN); + + expr = matchAgainstExpr; + + return primaryRest(expr); + } else if ("CONVERT".equalsIgnoreCase(ident)) { lexer.nextToken(); - expr = primaryRest(expr); - return expr; - } + SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr(ident); - public SQLSelectParser createSelectParser() { - return new ElasticSqlSelectParser(this); - } + if (lexer.token() != Token.RPAREN) { + exprList(methodInvokeExpr.getParameters(), methodInvokeExpr); + } - protected SQLExpr parseInterval() { - accept(Token.INTERVAL); + if (identifierEquals("USING")) { + lexer.nextToken(); + if (lexer.token() != Token.IDENTIFIER) { + throw new ParserException("Syntax error: " + lexer.token()); + } + String charset = lexer.stringVal(); + lexer.nextToken(); + methodInvokeExpr.putAttribute("USING", charset); + } - if (lexer.token() == Token.LPAREN) { - lexer.nextToken(); + accept(Token.RPAREN); - SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr("INTERVAL"); - if (lexer.token() != Token.RPAREN) { - exprList(methodInvokeExpr.getParameters(), methodInvokeExpr); - } + expr = methodInvokeExpr; - accept(Token.RPAREN); + return primaryRest(expr); + } else if ("POSITION".equalsIgnoreCase(ident)) { + accept(Token.LPAREN); + SQLExpr subStr = this.primary(); + accept(Token.IN); + SQLExpr str = this.expr(); + accept(Token.RPAREN); - return primaryRest(methodInvokeExpr); - } else { - SQLExpr value = expr(); + SQLMethodInvokeExpr locate = new SQLMethodInvokeExpr("LOCATE"); + locate.addParameter(subStr); + locate.addParameter(str); - if (lexer.token() != Token.IDENTIFIER) { - throw new ParserException("Syntax error: " + lexer.token()); - } + expr = locate; + return primaryRest(expr); + } + } - String unit = lexer.stringVal(); - lexer.nextToken(); + if (lexer.token() == Token.VARIANT && "@".equals(lexer.stringVal())) { + lexer.nextToken(); + MySqlUserName userName = new MySqlUserName(); + if (expr instanceof SQLCharExpr) { + userName.setUserName(((SQLCharExpr) expr).toString()); + } else { + userName.setUserName(((SQLIdentifierExpr) expr).getName()); + } + + if (lexer.token() == Token.LITERAL_CHARS) { + userName.setHost("'" + lexer.stringVal() + "'"); + } else { + userName.setHost(lexer.stringVal()); + } + lexer.nextToken(); + return userName; + } - MySqlIntervalExpr intervalExpr = new MySqlIntervalExpr(); - intervalExpr.setValue(value); - intervalExpr.setUnit(MySqlIntervalUnit.valueOf(unit.toUpperCase())); + // + if (expr instanceof SQLMethodInvokeExpr && lexer.token() == Token.LBRACKET) { + lexer.nextToken(); + expr = bracketRest(expr); + return primaryRest(expr); + } - return intervalExpr; - } + if (lexer.token() == Token.ERROR) { + throw new ParserException( + "Syntax error, token: " + + lexer.token() + + " " + + lexer.stringVal() + + ", pos: " + + lexer.pos()); + } + + /** + * When the druid parser parses the quoted field in SELECT clause, e.g. SELECT `b`.`lastname` + * FROM bank AS `b`, "`b`" is recognized as an identifier expr, and the token is DOT, then the + * next identifier "`lastname`" would be recognized as the property name of "`b`". The parser + * creates a SQLPropertyExpr with owner of "`b`" and property name of "`lastname`". + * + *

The following block of code prevents this specific case to generate SQLPropertyExpr, but + * corrects the parser to generate a SQLIdentifierExpr with expr = "`b`.`lastname`". + */ + if (lexer.token() == Token.DOT && expr instanceof SQLIdentifierExpr) { + if (isQuoted(((SQLIdentifierExpr) expr).getName(), "`")) { + lexer.nextToken(); + ((SQLIdentifierExpr) expr) + .setName(((SQLIdentifierExpr) expr).getName() + "." + lexer.stringVal()); + lexer.nextToken(); + } } - public SQLColumnDefinition parseColumn() { - MySqlSQLColumnDefinition column = new MySqlSQLColumnDefinition(); - column.setName(name()); - column.setDataType(parseDataType()); + return super.primaryRest(expr); + } + + protected SQLExpr bracketRest(SQLExpr expr) { + Number index; - return parseColumnRest(column); + if (lexer.token() == Token.LITERAL_INT) { + index = lexer.integerValue(); + lexer.nextToken(); + } else { + throw new ParserException("Syntax error : " + lexer.stringVal()); } - public SQLColumnDefinition parseColumnRest(SQLColumnDefinition column) { - if (lexer.token() == Token.ON) { - lexer.nextToken(); - accept(Token.UPDATE); - SQLExpr expr = this.expr(); - ((MySqlSQLColumnDefinition) column).setOnUpdate(expr); - } + if (expr instanceof SQLMethodInvokeExpr) { + SQLMethodInvokeExpr methodInvokeExpr = (SQLMethodInvokeExpr) expr; + methodInvokeExpr.getParameters().add(new SQLIntegerExpr(index)); + } + lexer.nextToken(); + expr = primaryRest(expr); + return expr; + } - if (identifierEquals("AUTO_INCREMENT")) { - lexer.nextToken(); - if (column instanceof MySqlSQLColumnDefinition) { - ((MySqlSQLColumnDefinition) column).setAutoIncrement(true); - } - return parseColumnRest(column); - } + public SQLSelectParser createSelectParser() { + return new ElasticSqlSelectParser(this); + } - if (identifierEquals("precision") && column.getDataType().getName().equalsIgnoreCase("double")) { - lexer.nextToken(); - } + protected SQLExpr parseInterval() { + accept(Token.INTERVAL); - if (identifierEquals("PARTITION")) { - throw new ParserException("syntax error " + lexer.token() + " " + lexer.stringVal()); - } + if (lexer.token() == Token.LPAREN) { + lexer.nextToken(); - if (identifierEquals("STORAGE")) { - lexer.nextToken(); - SQLExpr expr = expr(); - if (column instanceof MySqlSQLColumnDefinition) { - ((MySqlSQLColumnDefinition) column).setStorage(expr); - } - } + SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr("INTERVAL"); + if (lexer.token() != Token.RPAREN) { + exprList(methodInvokeExpr.getParameters(), methodInvokeExpr); + } - super.parseColumnRest(column); + accept(Token.RPAREN); - return column; + return primaryRest(methodInvokeExpr); + } else { + SQLExpr value = expr(); + + if (lexer.token() != Token.IDENTIFIER) { + throw new ParserException("Syntax error: " + lexer.token()); + } + + String unit = lexer.stringVal(); + lexer.nextToken(); + + MySqlIntervalExpr intervalExpr = new MySqlIntervalExpr(); + intervalExpr.setValue(value); + intervalExpr.setUnit(MySqlIntervalUnit.valueOf(unit.toUpperCase())); + + return intervalExpr; + } + } + + public SQLColumnDefinition parseColumn() { + MySqlSQLColumnDefinition column = new MySqlSQLColumnDefinition(); + column.setName(name()); + column.setDataType(parseDataType()); + + return parseColumnRest(column); + } + + public SQLColumnDefinition parseColumnRest(SQLColumnDefinition column) { + if (lexer.token() == Token.ON) { + lexer.nextToken(); + accept(Token.UPDATE); + SQLExpr expr = this.expr(); + ((MySqlSQLColumnDefinition) column).setOnUpdate(expr); } - protected SQLDataType parseDataTypeRest(SQLDataType dataType) { - super.parseDataTypeRest(dataType); + if (identifierEquals("AUTO_INCREMENT")) { + lexer.nextToken(); + if (column instanceof MySqlSQLColumnDefinition) { + ((MySqlSQLColumnDefinition) column).setAutoIncrement(true); + } + return parseColumnRest(column); + } - if (identifierEquals("UNSIGNED")) { - lexer.nextToken(); - dataType.getAttributes().put("UNSIGNED", true); - } + if (identifierEquals("precision") + && column.getDataType().getName().equalsIgnoreCase("double")) { + lexer.nextToken(); + } - if (identifierEquals("ZEROFILL")) { - lexer.nextToken(); - dataType.getAttributes().put("ZEROFILL", true); - } + if (identifierEquals("PARTITION")) { + throw new ParserException("syntax error " + lexer.token() + " " + lexer.stringVal()); + } - return dataType; + if (identifierEquals("STORAGE")) { + lexer.nextToken(); + SQLExpr expr = expr(); + if (column instanceof MySqlSQLColumnDefinition) { + ((MySqlSQLColumnDefinition) column).setStorage(expr); + } } - public SQLExpr orRest(SQLExpr expr) { + super.parseColumnRest(column); - for (; ; ) { - if (lexer.token() == Token.OR || lexer.token() == Token.BARBAR) { - lexer.nextToken(); - SQLExpr rightExp = and(); + return column; + } - expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.BooleanOr, rightExp, JdbcConstants.MYSQL); - } else if (lexer.token() == Token.XOR) { - lexer.nextToken(); - SQLExpr rightExp = and(); + protected SQLDataType parseDataTypeRest(SQLDataType dataType) { + super.parseDataTypeRest(dataType); - expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.BooleanXor, rightExp, JdbcConstants.MYSQL); - } else { - break; - } - } + if (identifierEquals("UNSIGNED")) { + lexer.nextToken(); + dataType.getAttributes().put("UNSIGNED", true); + } - return expr; + if (identifierEquals("ZEROFILL")) { + lexer.nextToken(); + dataType.getAttributes().put("ZEROFILL", true); } - public SQLExpr additiveRest(SQLExpr expr) { - if (lexer.token() == Token.PLUS) { - lexer.nextToken(); - SQLExpr rightExp = multiplicative(); + return dataType; + } - expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.Add, rightExp, JdbcConstants.MYSQL); - expr = additiveRest(expr); - } else if (lexer.token() == Token.SUB) { - lexer.nextToken(); - SQLExpr rightExp = multiplicative(); + public SQLExpr orRest(SQLExpr expr) { - expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.Subtract, rightExp, JdbcConstants.MYSQL); - expr = additiveRest(expr); - } + for (; ; ) { + if (lexer.token() == Token.OR || lexer.token() == Token.BARBAR) { + lexer.nextToken(); + SQLExpr rightExp = and(); + + expr = + new SQLBinaryOpExpr(expr, SQLBinaryOperator.BooleanOr, rightExp, JdbcConstants.MYSQL); + } else if (lexer.token() == Token.XOR) { + lexer.nextToken(); + SQLExpr rightExp = and(); - return expr; + expr = + new SQLBinaryOpExpr(expr, SQLBinaryOperator.BooleanXor, rightExp, JdbcConstants.MYSQL); + } else { + break; + } } - public SQLAssignItem parseAssignItem() { - SQLAssignItem item = new SQLAssignItem(); + return expr; + } - SQLExpr var = primary(); + public SQLExpr additiveRest(SQLExpr expr) { + if (lexer.token() == Token.PLUS) { + lexer.nextToken(); + SQLExpr rightExp = multiplicative(); - String ident = null; - if (var instanceof SQLIdentifierExpr) { - ident = ((SQLIdentifierExpr) var).getName(); + expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.Add, rightExp, JdbcConstants.MYSQL); + expr = additiveRest(expr); + } else if (lexer.token() == Token.SUB) { + lexer.nextToken(); + SQLExpr rightExp = multiplicative(); - if ("GLOBAL".equalsIgnoreCase(ident)) { - ident = lexer.stringVal(); - lexer.nextToken(); - var = new SQLVariantRefExpr(ident, true); - } else if ("SESSION".equalsIgnoreCase(ident)) { - ident = lexer.stringVal(); - lexer.nextToken(); - var = new SQLVariantRefExpr(ident, false); - } else { - var = new SQLVariantRefExpr(ident); - } - } + expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.Subtract, rightExp, JdbcConstants.MYSQL); + expr = additiveRest(expr); + } - if ("NAMES".equalsIgnoreCase(ident)) { - // skip - } else if ("CHARACTER".equalsIgnoreCase(ident)) { - var = new SQLIdentifierExpr("CHARACTER SET"); - accept(Token.SET); - if (lexer.token() == Token.EQ) { - lexer.nextToken(); - } - } else { - if (lexer.token() == Token.COLONEQ) { - lexer.nextToken(); - } else { - accept(Token.EQ); - } - } + return expr; + } + + public SQLAssignItem parseAssignItem() { + SQLAssignItem item = new SQLAssignItem(); - item.setValue(this.expr()); + SQLExpr var = primary(); - item.setTarget(var); - return item; + String ident = null; + if (var instanceof SQLIdentifierExpr) { + ident = ((SQLIdentifierExpr) var).getName(); + + if ("GLOBAL".equalsIgnoreCase(ident)) { + ident = lexer.stringVal(); + lexer.nextToken(); + var = new SQLVariantRefExpr(ident, true); + } else if ("SESSION".equalsIgnoreCase(ident)) { + ident = lexer.stringVal(); + lexer.nextToken(); + var = new SQLVariantRefExpr(ident, false); + } else { + var = new SQLVariantRefExpr(ident); + } } - public SQLName nameRest(SQLName name) { - if (lexer.token() == Token.VARIANT && "@".equals(lexer.stringVal())) { - lexer.nextToken(); - MySqlUserName userName = new MySqlUserName(); - userName.setUserName(((SQLIdentifierExpr) name).getName()); + if ("NAMES".equalsIgnoreCase(ident)) { + // skip + } else if ("CHARACTER".equalsIgnoreCase(ident)) { + var = new SQLIdentifierExpr("CHARACTER SET"); + accept(Token.SET); + if (lexer.token() == Token.EQ) { + lexer.nextToken(); + } + } else { + if (lexer.token() == Token.COLONEQ) { + lexer.nextToken(); + } else { + accept(Token.EQ); + } + } - if (lexer.token() == Token.LITERAL_CHARS) { - userName.setHost("'" + lexer.stringVal() + "'"); - } else { - userName.setHost(lexer.stringVal()); - } - lexer.nextToken(); - return userName; - } - return super.nameRest(name); + item.setValue(this.expr()); + + item.setTarget(var); + return item; + } + + public SQLName nameRest(SQLName name) { + if (lexer.token() == Token.VARIANT && "@".equals(lexer.stringVal())) { + lexer.nextToken(); + MySqlUserName userName = new MySqlUserName(); + userName.setUserName(((SQLIdentifierExpr) name).getName()); + + if (lexer.token() == Token.LITERAL_CHARS) { + userName.setHost("'" + lexer.stringVal() + "'"); + } else { + userName.setHost(lexer.stringVal()); + } + lexer.nextToken(); + return userName; } + return super.nameRest(name); + } - public MySqlSelectQueryBlock.Limit parseLimit() { - if (lexer.token() == Token.LIMIT) { - lexer.nextToken(); + public MySqlSelectQueryBlock.Limit parseLimit() { + if (lexer.token() == Token.LIMIT) { + lexer.nextToken(); - MySqlSelectQueryBlock.Limit limit = new MySqlSelectQueryBlock.Limit(); - - SQLExpr temp = this.expr(); - if (lexer.token() == (Token.COMMA)) { - limit.setOffset(temp); - lexer.nextToken(); - limit.setRowCount(this.expr()); - } else if (identifierEquals("OFFSET")) { - limit.setRowCount(temp); - lexer.nextToken(); - limit.setOffset(this.expr()); - } else { - limit.setRowCount(temp); - } - return limit; - } + MySqlSelectQueryBlock.Limit limit = new MySqlSelectQueryBlock.Limit(); - return null; + SQLExpr temp = this.expr(); + if (lexer.token() == (Token.COMMA)) { + limit.setOffset(temp); + lexer.nextToken(); + limit.setRowCount(this.expr()); + } else if (identifierEquals("OFFSET")) { + limit.setRowCount(temp); + lexer.nextToken(); + limit.setOffset(this.expr()); + } else { + limit.setRowCount(temp); + } + return limit; } - @Override - public MySqlPrimaryKey parsePrimaryKey() { - accept(Token.PRIMARY); - accept(Token.KEY); + return null; + } - MySqlPrimaryKey primaryKey = new MySqlPrimaryKey(); + @Override + public MySqlPrimaryKey parsePrimaryKey() { + accept(Token.PRIMARY); + accept(Token.KEY); - if (identifierEquals("USING")) { - lexer.nextToken(); - primaryKey.setIndexType(lexer.stringVal()); - lexer.nextToken(); - } + MySqlPrimaryKey primaryKey = new MySqlPrimaryKey(); - accept(Token.LPAREN); - for (; ; ) { - primaryKey.getColumns().add(this.expr()); - if (!(lexer.token() == (Token.COMMA))) { - break; - } else { - lexer.nextToken(); - } - } - accept(Token.RPAREN); + if (identifierEquals("USING")) { + lexer.nextToken(); + primaryKey.setIndexType(lexer.stringVal()); + lexer.nextToken(); + } - return primaryKey; + accept(Token.LPAREN); + for (; ; ) { + primaryKey.getColumns().add(this.expr()); + if (!(lexer.token() == (Token.COMMA))) { + break; + } else { + lexer.nextToken(); + } } + accept(Token.RPAREN); - public MySqlUnique parseUnique() { - accept(Token.UNIQUE); + return primaryKey; + } - if (lexer.token() == Token.KEY) { - lexer.nextToken(); - } + public MySqlUnique parseUnique() { + accept(Token.UNIQUE); - if (lexer.token() == Token.INDEX) { - lexer.nextToken(); - } + if (lexer.token() == Token.KEY) { + lexer.nextToken(); + } - MySqlUnique unique = new MySqlUnique(); + if (lexer.token() == Token.INDEX) { + lexer.nextToken(); + } - if (lexer.token() != Token.LPAREN) { - SQLName indexName = name(); - unique.setIndexName(indexName); - } + MySqlUnique unique = new MySqlUnique(); - accept(Token.LPAREN); - for (; ; ) { - unique.getColumns().add(this.expr()); - if (!(lexer.token() == (Token.COMMA))) { - break; - } else { - lexer.nextToken(); - } - } - accept(Token.RPAREN); + if (lexer.token() != Token.LPAREN) { + SQLName indexName = name(); + unique.setIndexName(indexName); + } - if (identifierEquals("USING")) { - lexer.nextToken(); - unique.setIndexType(lexer.stringVal()); - lexer.nextToken(); - } + accept(Token.LPAREN); + for (; ; ) { + unique.getColumns().add(this.expr()); + if (!(lexer.token() == (Token.COMMA))) { + break; + } else { + lexer.nextToken(); + } + } + accept(Token.RPAREN); - return unique; + if (identifierEquals("USING")) { + lexer.nextToken(); + unique.setIndexType(lexer.stringVal()); + lexer.nextToken(); } - public MysqlForeignKey parseForeignKey() { - accept(Token.FOREIGN); - accept(Token.KEY); + return unique; + } - MysqlForeignKey fk = new MysqlForeignKey(); + public MysqlForeignKey parseForeignKey() { + accept(Token.FOREIGN); + accept(Token.KEY); - if (lexer.token() != Token.LPAREN) { - SQLName indexName = name(); - fk.setIndexName(indexName); - } + MysqlForeignKey fk = new MysqlForeignKey(); - accept(Token.LPAREN); - this.names(fk.getReferencingColumns()); - accept(Token.RPAREN); + if (lexer.token() != Token.LPAREN) { + SQLName indexName = name(); + fk.setIndexName(indexName); + } - accept(Token.REFERENCES); + accept(Token.LPAREN); + this.names(fk.getReferencingColumns()); + accept(Token.RPAREN); - fk.setReferencedTableName(this.name()); + accept(Token.REFERENCES); - accept(Token.LPAREN); - this.names(fk.getReferencedColumns()); - accept(Token.RPAREN); + fk.setReferencedTableName(this.name()); - if (identifierEquals("MATCH")) { - if (identifierEquals("FULL")) { - fk.setReferenceMatch(MysqlForeignKey.Match.FULL); - } else if (identifierEquals("PARTIAL")) { - fk.setReferenceMatch(MysqlForeignKey.Match.PARTIAL); - } else if (identifierEquals("SIMPLE")) { - fk.setReferenceMatch(MysqlForeignKey.Match.SIMPLE); - } - } + accept(Token.LPAREN); + this.names(fk.getReferencedColumns()); + accept(Token.RPAREN); - if (lexer.token() == Token.ON) { - lexer.nextToken(); - if (lexer.token() == Token.DELETE) { - fk.setReferenceOn(MysqlForeignKey.On.DELETE); - } else if (lexer.token() == Token.UPDATE) { - fk.setReferenceOn(MysqlForeignKey.On.UPDATE); - } else { - throw new ParserException("Syntax error, expect DELETE or UPDATE, actual " + lexer.token() + " " - + lexer.stringVal()); - } - lexer.nextToken(); + if (identifierEquals("MATCH")) { + if (identifierEquals("FULL")) { + fk.setReferenceMatch(MysqlForeignKey.Match.FULL); + } else if (identifierEquals("PARTIAL")) { + fk.setReferenceMatch(MysqlForeignKey.Match.PARTIAL); + } else if (identifierEquals("SIMPLE")) { + fk.setReferenceMatch(MysqlForeignKey.Match.SIMPLE); + } + } - if (lexer.token() == Token.RESTRICT) { - fk.setReferenceOption(MysqlForeignKey.Option.RESTRICT); - } else if (identifierEquals("CASCADE")) { - fk.setReferenceOption(MysqlForeignKey.Option.CASCADE); - } else if (lexer.token() == Token.SET) { - accept(Token.NULL); - fk.setReferenceOption(MysqlForeignKey.Option.SET_NULL); - } else if (identifierEquals("ON")) { - lexer.nextToken(); - if (identifierEquals("ACTION")) { - fk.setReferenceOption(MysqlForeignKey.Option.NO_ACTION); - } else { - throw new ParserException("Syntax error, expect ACTION, actual " + lexer.token() + " " - + lexer.stringVal()); - } - } - lexer.nextToken(); + if (lexer.token() == Token.ON) { + lexer.nextToken(); + if (lexer.token() == Token.DELETE) { + fk.setReferenceOn(MysqlForeignKey.On.DELETE); + } else if (lexer.token() == Token.UPDATE) { + fk.setReferenceOn(MysqlForeignKey.On.UPDATE); + } else { + throw new ParserException( + "Syntax error, expect DELETE or UPDATE, actual " + + lexer.token() + + " " + + lexer.stringVal()); + } + lexer.nextToken(); + + if (lexer.token() == Token.RESTRICT) { + fk.setReferenceOption(MysqlForeignKey.Option.RESTRICT); + } else if (identifierEquals("CASCADE")) { + fk.setReferenceOption(MysqlForeignKey.Option.CASCADE); + } else if (lexer.token() == Token.SET) { + accept(Token.NULL); + fk.setReferenceOption(MysqlForeignKey.Option.SET_NULL); + } else if (identifierEquals("ON")) { + lexer.nextToken(); + if (identifierEquals("ACTION")) { + fk.setReferenceOption(MysqlForeignKey.Option.NO_ACTION); + } else { + throw new ParserException( + "Syntax error, expect ACTION, actual " + lexer.token() + " " + lexer.stringVal()); } - return fk; + } + lexer.nextToken(); } + return fk; + } - protected SQLAggregateExpr parseAggregateExprRest(SQLAggregateExpr aggregateExpr) { - if (lexer.token() == Token.ORDER) { - SQLOrderBy orderBy = this.parseOrderBy(); - aggregateExpr.putAttribute("ORDER BY", orderBy); - } - if (identifierEquals("SEPARATOR")) { - lexer.nextToken(); + protected SQLAggregateExpr parseAggregateExprRest(SQLAggregateExpr aggregateExpr) { + if (lexer.token() == Token.ORDER) { + SQLOrderBy orderBy = this.parseOrderBy(); + aggregateExpr.putAttribute("ORDER BY", orderBy); + } + if (identifierEquals("SEPARATOR")) { + lexer.nextToken(); - SQLExpr seperator = this.primary(); + SQLExpr seperator = this.primary(); - aggregateExpr.putAttribute("SEPARATOR", seperator); - } - return aggregateExpr; + aggregateExpr.putAttribute("SEPARATOR", seperator); } + return aggregateExpr; + } - public MySqlSelectGroupByExpr parseSelectGroupByItem() { - MySqlSelectGroupByExpr item = new MySqlSelectGroupByExpr(); - - item.setExpr(expr()); + public MySqlSelectGroupByExpr parseSelectGroupByItem() { + MySqlSelectGroupByExpr item = new MySqlSelectGroupByExpr(); - if (lexer.token() == Token.ASC) { - lexer.nextToken(); - item.setType(SQLOrderingSpecification.ASC); - } else if (lexer.token() == Token.DESC) { - lexer.nextToken(); - item.setType(SQLOrderingSpecification.DESC); - } + item.setExpr(expr()); - return item; + if (lexer.token() == Token.ASC) { + lexer.nextToken(); + item.setType(SQLOrderingSpecification.ASC); + } else if (lexer.token() == Token.DESC) { + lexer.nextToken(); + item.setType(SQLOrderingSpecification.DESC); } + return item; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/parser/ElasticSqlSelectParser.java b/legacy/src/main/java/org/opensearch/sql/legacy/parser/ElasticSqlSelectParser.java index 2038aa54ef..c405d90878 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/parser/ElasticSqlSelectParser.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/parser/ElasticSqlSelectParser.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.parser; import com.alibaba.druid.sql.ast.SQLExpr; @@ -29,313 +28,311 @@ import com.alibaba.druid.sql.parser.Token; import org.opensearch.sql.legacy.exception.SqlFeatureNotImplementedException; -/** - * Created by allwefantasy on 8/19/16. - */ +/** Created by allwefantasy on 8/19/16. */ public class ElasticSqlSelectParser extends SQLSelectParser { - public ElasticSqlSelectParser(SQLExprParser exprParser) { - super(exprParser); + public ElasticSqlSelectParser(SQLExprParser exprParser) { + super(exprParser); + } + + @Override + public SQLSelectQuery query() { + if (lexer.token() == (Token.LPAREN)) { + lexer.nextToken(); + + SQLSelectQuery select = query(); + accept(Token.RPAREN); + + return queryRest(select); } - @Override - public SQLSelectQuery query() { - if (lexer.token() == (Token.LPAREN)) { - lexer.nextToken(); + MySqlSelectQueryBlock queryBlock = new MySqlSelectQueryBlock(); + + if (lexer.token() == Token.SELECT) { + lexer.nextToken(); + + if (lexer.token() == Token.HINT) { + this.exprParser.parseHints(queryBlock.getHints()); + } + + if (lexer.token() == Token.COMMENT) { + lexer.nextToken(); + } + + if (lexer.token() == (Token.DISTINCT)) { + queryBlock.setDistionOption(SQLSetQuantifier.DISTINCT); + lexer.nextToken(); + } else if (identifierEquals("DISTINCTROW")) { + queryBlock.setDistionOption(SQLSetQuantifier.DISTINCTROW); + lexer.nextToken(); + } else if (lexer.token() == (Token.ALL)) { + queryBlock.setDistionOption(SQLSetQuantifier.ALL); + lexer.nextToken(); + } + + if (identifierEquals("HIGH_PRIORITY")) { + queryBlock.setHignPriority(true); + lexer.nextToken(); + } + + if (identifierEquals("STRAIGHT_JOIN")) { + queryBlock.setStraightJoin(true); + lexer.nextToken(); + } + + if (identifierEquals("SQL_SMALL_RESULT")) { + queryBlock.setSmallResult(true); + lexer.nextToken(); + } + + if (identifierEquals("SQL_BIG_RESULT")) { + queryBlock.setBigResult(true); + lexer.nextToken(); + } + + if (identifierEquals("SQL_BUFFER_RESULT")) { + queryBlock.setBufferResult(true); + lexer.nextToken(); + } + + if (identifierEquals("SQL_CACHE")) { + queryBlock.setCache(true); + lexer.nextToken(); + } + + if (identifierEquals("SQL_NO_CACHE")) { + queryBlock.setCache(false); + lexer.nextToken(); + } + + if (identifierEquals("SQL_CALC_FOUND_ROWS")) { + queryBlock.setCalcFoundRows(true); + lexer.nextToken(); + } + + parseSelectList(queryBlock); + + parseInto(queryBlock); + } - SQLSelectQuery select = query(); - accept(Token.RPAREN); + parseFrom(queryBlock); - return queryRest(select); - } + parseWhere(queryBlock); - MySqlSelectQueryBlock queryBlock = new MySqlSelectQueryBlock(); + parseGroupBy(queryBlock); - if (lexer.token() == Token.SELECT) { - lexer.nextToken(); + queryBlock.setOrderBy(this.exprParser.parseOrderBy()); - if (lexer.token() == Token.HINT) { - this.exprParser.parseHints(queryBlock.getHints()); - } - - if (lexer.token() == Token.COMMENT) { - lexer.nextToken(); - } - - if (lexer.token() == (Token.DISTINCT)) { - queryBlock.setDistionOption(SQLSetQuantifier.DISTINCT); - lexer.nextToken(); - } else if (identifierEquals("DISTINCTROW")) { - queryBlock.setDistionOption(SQLSetQuantifier.DISTINCTROW); - lexer.nextToken(); - } else if (lexer.token() == (Token.ALL)) { - queryBlock.setDistionOption(SQLSetQuantifier.ALL); - lexer.nextToken(); - } - - if (identifierEquals("HIGH_PRIORITY")) { - queryBlock.setHignPriority(true); - lexer.nextToken(); - } - - if (identifierEquals("STRAIGHT_JOIN")) { - queryBlock.setStraightJoin(true); - lexer.nextToken(); - } - - if (identifierEquals("SQL_SMALL_RESULT")) { - queryBlock.setSmallResult(true); - lexer.nextToken(); - } - - if (identifierEquals("SQL_BIG_RESULT")) { - queryBlock.setBigResult(true); - lexer.nextToken(); - } - - if (identifierEquals("SQL_BUFFER_RESULT")) { - queryBlock.setBufferResult(true); - lexer.nextToken(); - } - - if (identifierEquals("SQL_CACHE")) { - queryBlock.setCache(true); - lexer.nextToken(); - } - - if (identifierEquals("SQL_NO_CACHE")) { - queryBlock.setCache(false); - lexer.nextToken(); - } - - if (identifierEquals("SQL_CALC_FOUND_ROWS")) { - queryBlock.setCalcFoundRows(true); - lexer.nextToken(); - } - - parseSelectList(queryBlock); - - parseInto(queryBlock); - } + if (lexer.token() == Token.LIMIT) { + queryBlock.setLimit(parseLimit()); + } - parseFrom(queryBlock); + if (lexer.token() == Token.PROCEDURE) { + lexer.nextToken(); + throw new SqlFeatureNotImplementedException("Unsupported feature: " + Token.PROCEDURE.name); + } - parseWhere(queryBlock); + parseInto(queryBlock); - parseGroupBy(queryBlock); + if (lexer.token() == Token.FOR) { + lexer.nextToken(); + accept(Token.UPDATE); - queryBlock.setOrderBy(this.exprParser.parseOrderBy()); + queryBlock.setForUpdate(true); + } - if (lexer.token() == Token.LIMIT) { - queryBlock.setLimit(parseLimit()); - } + if (lexer.token() == Token.LOCK) { + lexer.nextToken(); + accept(Token.IN); + acceptIdentifier("SHARE"); + acceptIdentifier("MODE"); + queryBlock.setLockInShareMode(true); + } - if (lexer.token() == Token.PROCEDURE) { - lexer.nextToken(); - throw new SqlFeatureNotImplementedException("Unsupported feature: " + Token.PROCEDURE.name); - } + return queryRest(queryBlock); + } + + protected void parseInto(SQLSelectQueryBlock queryBlock) { + if (lexer.token() == (Token.INTO)) { + lexer.nextToken(); + + if (identifierEquals("OUTFILE")) { + lexer.nextToken(); + + MySqlOutFileExpr outFile = new MySqlOutFileExpr(); + outFile.setFile(expr()); + + queryBlock.setInto(outFile); + + if (identifierEquals("FIELDS") || identifierEquals("COLUMNS")) { + lexer.nextToken(); - parseInto(queryBlock); + if (identifierEquals("TERMINATED")) { + lexer.nextToken(); + accept(Token.BY); + } + outFile.setColumnsTerminatedBy((SQLLiteralExpr) expr()); - if (lexer.token() == Token.FOR) { + if (identifierEquals("OPTIONALLY")) { lexer.nextToken(); - accept(Token.UPDATE); + outFile.setColumnsEnclosedOptionally(true); + } - queryBlock.setForUpdate(true); - } + if (identifierEquals("ENCLOSED")) { + lexer.nextToken(); + accept(Token.BY); + outFile.setColumnsEnclosedBy((SQLLiteralExpr) expr()); + } - if (lexer.token() == Token.LOCK) { + if (identifierEquals("ESCAPED")) { lexer.nextToken(); - accept(Token.IN); - acceptIdentifier("SHARE"); - acceptIdentifier("MODE"); - queryBlock.setLockInShareMode(true); + accept(Token.BY); + outFile.setColumnsEscaped((SQLLiteralExpr) expr()); + } } - return queryRest(queryBlock); - } + if (identifierEquals("LINES")) { + lexer.nextToken(); - protected void parseInto(SQLSelectQueryBlock queryBlock) { - if (lexer.token() == (Token.INTO)) { + if (identifierEquals("STARTING")) { lexer.nextToken(); - - if (identifierEquals("OUTFILE")) { - lexer.nextToken(); - - MySqlOutFileExpr outFile = new MySqlOutFileExpr(); - outFile.setFile(expr()); - - queryBlock.setInto(outFile); - - if (identifierEquals("FIELDS") || identifierEquals("COLUMNS")) { - lexer.nextToken(); - - if (identifierEquals("TERMINATED")) { - lexer.nextToken(); - accept(Token.BY); - } - outFile.setColumnsTerminatedBy((SQLLiteralExpr) expr()); - - if (identifierEquals("OPTIONALLY")) { - lexer.nextToken(); - outFile.setColumnsEnclosedOptionally(true); - } - - if (identifierEquals("ENCLOSED")) { - lexer.nextToken(); - accept(Token.BY); - outFile.setColumnsEnclosedBy((SQLLiteralExpr) expr()); - } - - if (identifierEquals("ESCAPED")) { - lexer.nextToken(); - accept(Token.BY); - outFile.setColumnsEscaped((SQLLiteralExpr) expr()); - } - } - - if (identifierEquals("LINES")) { - lexer.nextToken(); - - if (identifierEquals("STARTING")) { - lexer.nextToken(); - accept(Token.BY); - outFile.setLinesStartingBy((SQLLiteralExpr) expr()); - } else { - identifierEquals("TERMINATED"); - lexer.nextToken(); - accept(Token.BY); - outFile.setLinesTerminatedBy((SQLLiteralExpr) expr()); - } - } - } else { - queryBlock.setInto(this.exprParser.name()); - } + accept(Token.BY); + outFile.setLinesStartingBy((SQLLiteralExpr) expr()); + } else { + identifierEquals("TERMINATED"); + lexer.nextToken(); + accept(Token.BY); + outFile.setLinesTerminatedBy((SQLLiteralExpr) expr()); + } } + } else { + queryBlock.setInto(this.exprParser.name()); + } } + } - protected void parseGroupBy(SQLSelectQueryBlock queryBlock) { - SQLSelectGroupByClause groupBy = null; + protected void parseGroupBy(SQLSelectQueryBlock queryBlock) { + SQLSelectGroupByClause groupBy = null; - if (lexer.token() == Token.GROUP) { - groupBy = new SQLSelectGroupByClause(); + if (lexer.token() == Token.GROUP) { + groupBy = new SQLSelectGroupByClause(); - lexer.nextToken(); - accept(Token.BY); + lexer.nextToken(); + accept(Token.BY); - while (true) { - groupBy.addItem(this.getExprParser().parseSelectGroupByItem()); - if (!(lexer.token() == (Token.COMMA))) { - break; - } - lexer.nextToken(); - } - - if (lexer.token() == Token.WITH) { - lexer.nextToken(); - acceptIdentifier("ROLLUP"); - - MySqlSelectGroupBy mySqlGroupBy = new MySqlSelectGroupBy(); - for (SQLExpr sqlExpr : groupBy.getItems()) { - mySqlGroupBy.addItem(sqlExpr); - } - mySqlGroupBy.setRollUp(true); - - groupBy = mySqlGroupBy; - } + while (true) { + groupBy.addItem(this.getExprParser().parseSelectGroupByItem()); + if (!(lexer.token() == (Token.COMMA))) { + break; } + lexer.nextToken(); + } - if (lexer.token() == Token.HAVING) { - lexer.nextToken(); + if (lexer.token() == Token.WITH) { + lexer.nextToken(); + acceptIdentifier("ROLLUP"); - if (groupBy == null) { - groupBy = new SQLSelectGroupByClause(); - } - groupBy.setHaving(this.exprParser.expr()); + MySqlSelectGroupBy mySqlGroupBy = new MySqlSelectGroupBy(); + for (SQLExpr sqlExpr : groupBy.getItems()) { + mySqlGroupBy.addItem(sqlExpr); } + mySqlGroupBy.setRollUp(true); - queryBlock.setGroupBy(groupBy); + groupBy = mySqlGroupBy; + } } - protected SQLTableSource parseTableSourceRest(SQLTableSource tableSource) { - if (identifierEquals("USING")) { - return tableSource; - } + if (lexer.token() == Token.HAVING) { + lexer.nextToken(); - if (lexer.token() == Token.USE) { - lexer.nextToken(); - MySqlUseIndexHint hint = new MySqlUseIndexHint(); - parseIndexHint(hint); - tableSource.getHints().add(hint); - } + if (groupBy == null) { + groupBy = new SQLSelectGroupByClause(); + } + groupBy.setHaving(this.exprParser.expr()); + } - if (identifierEquals("IGNORE")) { - lexer.nextToken(); - MySqlIgnoreIndexHint hint = new MySqlIgnoreIndexHint(); - parseIndexHint(hint); - tableSource.getHints().add(hint); - } + queryBlock.setGroupBy(groupBy); + } - if (identifierEquals("FORCE")) { - lexer.nextToken(); - MySqlForceIndexHint hint = new MySqlForceIndexHint(); - parseIndexHint(hint); - tableSource.getHints().add(hint); - } + protected SQLTableSource parseTableSourceRest(SQLTableSource tableSource) { + if (identifierEquals("USING")) { + return tableSource; + } - return super.parseTableSourceRest(tableSource); + if (lexer.token() == Token.USE) { + lexer.nextToken(); + MySqlUseIndexHint hint = new MySqlUseIndexHint(); + parseIndexHint(hint); + tableSource.getHints().add(hint); } - private void parseIndexHint(MySqlIndexHintImpl hint) { - if (lexer.token() == Token.INDEX) { - lexer.nextToken(); - } else { - accept(Token.KEY); - } + if (identifierEquals("IGNORE")) { + lexer.nextToken(); + MySqlIgnoreIndexHint hint = new MySqlIgnoreIndexHint(); + parseIndexHint(hint); + tableSource.getHints().add(hint); + } - if (lexer.token() == Token.FOR) { - lexer.nextToken(); + if (identifierEquals("FORCE")) { + lexer.nextToken(); + MySqlForceIndexHint hint = new MySqlForceIndexHint(); + parseIndexHint(hint); + tableSource.getHints().add(hint); + } - if (lexer.token() == Token.JOIN) { - lexer.nextToken(); - hint.setOption(MySqlIndexHint.Option.JOIN); - } else if (lexer.token() == Token.ORDER) { - lexer.nextToken(); - accept(Token.BY); - hint.setOption(MySqlIndexHint.Option.ORDER_BY); - } else { - accept(Token.GROUP); - accept(Token.BY); - hint.setOption(MySqlIndexHint.Option.GROUP_BY); - } - } + return super.parseTableSourceRest(tableSource); + } - accept(Token.LPAREN); - if (lexer.token() == Token.PRIMARY) { - lexer.nextToken(); - hint.getIndexList().add(new SQLIdentifierExpr("PRIMARY")); - } else { - this.exprParser.names(hint.getIndexList()); - } - accept(Token.RPAREN); + private void parseIndexHint(MySqlIndexHintImpl hint) { + if (lexer.token() == Token.INDEX) { + lexer.nextToken(); + } else { + accept(Token.KEY); } - protected MySqlUnionQuery createSQLUnionQuery() { - return new MySqlUnionQuery(); + if (lexer.token() == Token.FOR) { + lexer.nextToken(); + + if (lexer.token() == Token.JOIN) { + lexer.nextToken(); + hint.setOption(MySqlIndexHint.Option.JOIN); + } else if (lexer.token() == Token.ORDER) { + lexer.nextToken(); + accept(Token.BY); + hint.setOption(MySqlIndexHint.Option.ORDER_BY); + } else { + accept(Token.GROUP); + accept(Token.BY); + hint.setOption(MySqlIndexHint.Option.GROUP_BY); + } } - public SQLUnionQuery unionRest(SQLUnionQuery union) { - if (lexer.token() == Token.LIMIT) { - MySqlUnionQuery mysqlUnionQuery = (MySqlUnionQuery) union; - mysqlUnionQuery.setLimit(parseLimit()); - } - return super.unionRest(union); + accept(Token.LPAREN); + if (lexer.token() == Token.PRIMARY) { + lexer.nextToken(); + hint.getIndexList().add(new SQLIdentifierExpr("PRIMARY")); + } else { + this.exprParser.names(hint.getIndexList()); } + accept(Token.RPAREN); + } - public MySqlSelectQueryBlock.Limit parseLimit() { - return ((ElasticSqlExprParser) this.exprParser).parseLimit(); - } + protected MySqlUnionQuery createSQLUnionQuery() { + return new MySqlUnionQuery(); + } - public ElasticSqlExprParser getExprParser() { - return (ElasticSqlExprParser) exprParser; + public SQLUnionQuery unionRest(SQLUnionQuery union) { + if (lexer.token() == Token.LIMIT) { + MySqlUnionQuery mysqlUnionQuery = (MySqlUnionQuery) union; + mysqlUnionQuery.setLimit(parseLimit()); } + return super.unionRest(union); + } + + public MySqlSelectQueryBlock.Limit parseLimit() { + return ((ElasticSqlExprParser) this.exprParser).parseLimit(); + } + + public ElasticSqlExprParser getExprParser() { + return (ElasticSqlExprParser) exprParser; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/parser/FieldMaker.java b/legacy/src/main/java/org/opensearch/sql/legacy/parser/FieldMaker.java index 89e9a16d1c..da08f81453 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/parser/FieldMaker.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/parser/FieldMaker.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.parser; import com.alibaba.druid.sql.ast.SQLExpr; @@ -41,369 +40,396 @@ import org.opensearch.sql.legacy.utils.Util; /** - * - * * @author ansj */ public class FieldMaker { - private SQLFunctions sqlFunctions = new SQLFunctions(); - - public Field makeField(SQLExpr expr, String alias, String tableAlias) throws SqlParseException { - Field field = makeFieldImpl(expr, alias, tableAlias); - addGroupByForDistinctFieldsInSelect(expr, field); + private SQLFunctions sqlFunctions = new SQLFunctions(); - // why we may get null as a field??? - if (field != null) { - field.setExpression(expr); - } + public Field makeField(SQLExpr expr, String alias, String tableAlias) throws SqlParseException { + Field field = makeFieldImpl(expr, alias, tableAlias); + addGroupByForDistinctFieldsInSelect(expr, field); - return field; + // why we may get null as a field??? + if (field != null) { + field.setExpression(expr); } - private Field makeFieldImpl(SQLExpr expr, String alias, String tableAlias) throws SqlParseException { - if (expr instanceof SQLIdentifierExpr || expr instanceof SQLPropertyExpr || expr instanceof SQLVariantRefExpr) { - return handleIdentifier(expr, alias, tableAlias); - } else if (expr instanceof SQLQueryExpr) { - throw new SqlParseException("unknown field name : " + expr); - } else if (expr instanceof SQLBinaryOpExpr) { - //make a SCRIPT method field; - return makeFieldImpl(makeBinaryMethodField((SQLBinaryOpExpr) expr, alias, true), alias, tableAlias); - } else if (expr instanceof SQLAllColumnExpr) { - return Field.STAR; - } else if (expr instanceof SQLMethodInvokeExpr) { - SQLMethodInvokeExpr mExpr = (SQLMethodInvokeExpr) expr; - - String methodName = mExpr.getMethodName(); - - if (methodName.equalsIgnoreCase("nested") || methodName.equalsIgnoreCase("reverse_nested")) { - NestedType nestedType = new NestedType(); - if (nestedType.tryFillFromExpr(mExpr)) { - return handleIdentifier(nestedType, alias, tableAlias); - } - } else if (methodName.equalsIgnoreCase("children")) { - ChildrenType childrenType = new ChildrenType(); - if (childrenType.tryFillFromExpr(mExpr)) { - return handleIdentifier(childrenType, alias, tableAlias); - } - } else if (methodName.equalsIgnoreCase("filter")) { - return makeFilterMethodField(mExpr, alias); - } - - if ((SQLFunctions.builtInFunctions.contains(methodName.toLowerCase())) && Strings.isNullOrEmpty(alias)) { - alias = mExpr.toString(); - } - return makeMethodField(methodName, mExpr.getParameters(), null, alias, tableAlias, true); - } else if (expr instanceof SQLAggregateExpr) { - SQLAggregateExpr sExpr = (SQLAggregateExpr) expr; - return makeMethodField(sExpr.getMethodName(), sExpr.getArguments(), sExpr.getOption(), - alias, tableAlias, true); - } else if (expr instanceof SQLCaseExpr) { - String scriptCode = new CaseWhenParser((SQLCaseExpr) expr, alias, tableAlias).parse(); - List methodParameters = new ArrayList<>(); - methodParameters.add(new KVValue(alias)); - methodParameters.add(new KVValue(scriptCode)); - return new MethodField("script", methodParameters, null, alias); - } else if (expr instanceof SQLCastExpr) { - SQLCastExpr castExpr = (SQLCastExpr) expr; - if (alias == null) { - alias = "cast_" + castExpr.getExpr().toString(); - } - ArrayList methodParameters = new ArrayList<>(); - methodParameters.add(((SQLCastExpr) expr).getExpr()); - return makeMethodField("CAST", methodParameters, null, alias, tableAlias, true); - } else if (expr instanceof SQLNumericLiteralExpr) { - SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr("assign", null); - methodInvokeExpr.addParameter(expr); - return makeMethodField(methodInvokeExpr.getMethodName(), methodInvokeExpr.getParameters(), - null, alias, tableAlias, true); - } else { - throw new SqlParseException("unknown field name : " + expr); + return field; + } + + private Field makeFieldImpl(SQLExpr expr, String alias, String tableAlias) + throws SqlParseException { + if (expr instanceof SQLIdentifierExpr + || expr instanceof SQLPropertyExpr + || expr instanceof SQLVariantRefExpr) { + return handleIdentifier(expr, alias, tableAlias); + } else if (expr instanceof SQLQueryExpr) { + throw new SqlParseException("unknown field name : " + expr); + } else if (expr instanceof SQLBinaryOpExpr) { + // make a SCRIPT method field; + return makeFieldImpl( + makeBinaryMethodField((SQLBinaryOpExpr) expr, alias, true), alias, tableAlias); + } else if (expr instanceof SQLAllColumnExpr) { + return Field.STAR; + } else if (expr instanceof SQLMethodInvokeExpr) { + SQLMethodInvokeExpr mExpr = (SQLMethodInvokeExpr) expr; + + String methodName = mExpr.getMethodName(); + + if (methodName.equalsIgnoreCase("nested") || methodName.equalsIgnoreCase("reverse_nested")) { + NestedType nestedType = new NestedType(); + if (nestedType.tryFillFromExpr(mExpr)) { + return handleIdentifier(nestedType, alias, tableAlias); } - } - - private void addGroupByForDistinctFieldsInSelect(SQLExpr expr, Field field) { - if (expr.getParent() != null && expr.getParent() instanceof SQLSelectItem - && expr.getParent().getParent() != null - && expr.getParent().getParent() instanceof SQLSelectQueryBlock) { - SQLSelectQueryBlock queryBlock = (SQLSelectQueryBlock) expr.getParent().getParent(); - if (queryBlock.getDistionOption() == SQLSetQuantifier.DISTINCT) { - SQLAggregateOption option = SQLAggregateOption.DISTINCT; - field.setAggregationOption(option); - if (queryBlock.getGroupBy() == null) { - queryBlock.setGroupBy(new SQLSelectGroupByClause()); - } - SQLSelectGroupByClause groupByClause = queryBlock.getGroupBy(); - groupByClause.addItem(expr); - queryBlock.setGroupBy(groupByClause); - } + } else if (methodName.equalsIgnoreCase("children")) { + ChildrenType childrenType = new ChildrenType(); + if (childrenType.tryFillFromExpr(mExpr)) { + return handleIdentifier(childrenType, alias, tableAlias); } + } else if (methodName.equalsIgnoreCase("filter")) { + return makeFilterMethodField(mExpr, alias); + } + + if ((SQLFunctions.builtInFunctions.contains(methodName.toLowerCase())) + && Strings.isNullOrEmpty(alias)) { + alias = mExpr.toString(); + } + return makeMethodField(methodName, mExpr.getParameters(), null, alias, tableAlias, true); + } else if (expr instanceof SQLAggregateExpr) { + SQLAggregateExpr sExpr = (SQLAggregateExpr) expr; + return makeMethodField( + sExpr.getMethodName(), sExpr.getArguments(), sExpr.getOption(), alias, tableAlias, true); + } else if (expr instanceof SQLCaseExpr) { + String scriptCode = new CaseWhenParser((SQLCaseExpr) expr, alias, tableAlias).parse(); + List methodParameters = new ArrayList<>(); + methodParameters.add(new KVValue(alias)); + methodParameters.add(new KVValue(scriptCode)); + return new MethodField("script", methodParameters, null, alias); + } else if (expr instanceof SQLCastExpr) { + SQLCastExpr castExpr = (SQLCastExpr) expr; + if (alias == null) { + alias = "cast_" + castExpr.getExpr().toString(); + } + ArrayList methodParameters = new ArrayList<>(); + methodParameters.add(((SQLCastExpr) expr).getExpr()); + return makeMethodField("CAST", methodParameters, null, alias, tableAlias, true); + } else if (expr instanceof SQLNumericLiteralExpr) { + SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr("assign", null); + methodInvokeExpr.addParameter(expr); + return makeMethodField( + methodInvokeExpr.getMethodName(), + methodInvokeExpr.getParameters(), + null, + alias, + tableAlias, + true); + } else { + throw new SqlParseException("unknown field name : " + expr); } - - private static Object getScriptValue(SQLExpr expr) throws SqlParseException { - return Util.getScriptValue(expr); - } - - private Field makeScriptMethodField(SQLBinaryOpExpr binaryExpr, String alias, String tableAlias) - throws SqlParseException { - List params = new ArrayList<>(); - - String scriptFieldAlias; - if (alias == null || alias.equals("")) { - scriptFieldAlias = binaryExpr.toString(); - } else { - scriptFieldAlias = alias; + } + + private void addGroupByForDistinctFieldsInSelect(SQLExpr expr, Field field) { + if (expr.getParent() != null + && expr.getParent() instanceof SQLSelectItem + && expr.getParent().getParent() != null + && expr.getParent().getParent() instanceof SQLSelectQueryBlock) { + SQLSelectQueryBlock queryBlock = (SQLSelectQueryBlock) expr.getParent().getParent(); + if (queryBlock.getDistionOption() == SQLSetQuantifier.DISTINCT) { + SQLAggregateOption option = SQLAggregateOption.DISTINCT; + field.setAggregationOption(option); + if (queryBlock.getGroupBy() == null) { + queryBlock.setGroupBy(new SQLSelectGroupByClause()); } - params.add(new SQLCharExpr(scriptFieldAlias)); + SQLSelectGroupByClause groupByClause = queryBlock.getGroupBy(); + groupByClause.addItem(expr); + queryBlock.setGroupBy(groupByClause); + } + } + } - Object left = getScriptValue(binaryExpr.getLeft()); - Object right = getScriptValue(binaryExpr.getRight()); - String script = String.format("%s %s %s", left, binaryExpr.getOperator().getName(), right); + private static Object getScriptValue(SQLExpr expr) throws SqlParseException { + return Util.getScriptValue(expr); + } - params.add(new SQLCharExpr(script)); + private Field makeScriptMethodField(SQLBinaryOpExpr binaryExpr, String alias, String tableAlias) + throws SqlParseException { + List params = new ArrayList<>(); - return makeMethodField("script", params, null, null, tableAlias, false); + String scriptFieldAlias; + if (alias == null || alias.equals("")) { + scriptFieldAlias = binaryExpr.toString(); + } else { + scriptFieldAlias = alias; } + params.add(new SQLCharExpr(scriptFieldAlias)); + Object left = getScriptValue(binaryExpr.getLeft()); + Object right = getScriptValue(binaryExpr.getRight()); + String script = String.format("%s %s %s", left, binaryExpr.getOperator().getName(), right); - private static Field makeFilterMethodField(SQLMethodInvokeExpr filterMethod, String alias) - throws SqlParseException { - List parameters = filterMethod.getParameters(); - int parametersSize = parameters.size(); - if (parametersSize != 1 && parametersSize != 2) { - throw new SqlParseException("filter group by field should only have one or 2 parameters" - + " filter(Expr) or filter(name,Expr)"); - } - String filterAlias = filterMethod.getMethodName(); - SQLExpr exprToCheck = null; - if (parametersSize == 1) { - exprToCheck = parameters.get(0); - filterAlias = "filter(" + exprToCheck.toString().replaceAll("\n", " ") + ")"; - } - if (parametersSize == 2) { - filterAlias = Util.extendedToString(parameters.get(0)); - exprToCheck = parameters.get(1); - } - Where where = Where.newInstance(); - new WhereParser(new SqlParser()).parseWhere(exprToCheck, where); - if (where.getWheres().size() == 0) { - throw new SqlParseException("Failed to parse filter condition"); - } - List methodParameters = new ArrayList<>(); - methodParameters.add(new KVValue("where", where)); - methodParameters.add(new KVValue("alias", filterAlias + "@FILTER")); - return new MethodField("filter", methodParameters, null, alias); - } + params.add(new SQLCharExpr(script)); + return makeMethodField("script", params, null, null, tableAlias, false); + } - private static Field handleIdentifier(NestedType nestedType, String alias, String tableAlias) { - Field field = handleIdentifier(new SQLIdentifierExpr(nestedType.field), alias, tableAlias); - field.setNested(nestedType); - field.setChildren(null); - return field; + private static Field makeFilterMethodField(SQLMethodInvokeExpr filterMethod, String alias) + throws SqlParseException { + List parameters = filterMethod.getParameters(); + int parametersSize = parameters.size(); + if (parametersSize != 1 && parametersSize != 2) { + throw new SqlParseException( + "filter group by field should only have one or 2 parameters" + + " filter(Expr) or filter(name,Expr)"); } - - private static Field handleIdentifier(ChildrenType childrenType, String alias, String tableAlias) { - Field field = handleIdentifier(new SQLIdentifierExpr(childrenType.field), alias, tableAlias); - field.setNested(null); - field.setChildren(childrenType); - return field; + String filterAlias = filterMethod.getMethodName(); + SQLExpr exprToCheck = null; + if (parametersSize == 1) { + exprToCheck = parameters.get(0); + filterAlias = "filter(" + exprToCheck.toString().replaceAll("\n", " ") + ")"; } - - - //binary method can nested - public SQLMethodInvokeExpr makeBinaryMethodField(SQLBinaryOpExpr expr, String alias, boolean first) - throws SqlParseException { - List params = new ArrayList<>(); - - String scriptFieldAlias; - if (first && (alias == null || alias.equals(""))) { - scriptFieldAlias = sqlFunctions.nextId("field"); - } else { - scriptFieldAlias = alias; - } - params.add(new SQLCharExpr(scriptFieldAlias)); - - switch (expr.getOperator()) { - case Add: - return convertBinaryOperatorToMethod("add", expr); - case Multiply: - return convertBinaryOperatorToMethod("multiply", expr); - - case Divide: - return convertBinaryOperatorToMethod("divide", expr); - - case Modulus: - return convertBinaryOperatorToMethod("modulus", expr); - - case Subtract: - return convertBinaryOperatorToMethod("subtract", expr); - default: - throw new SqlParseException("Unsupported operator: " + expr.getOperator().getName()); - } + if (parametersSize == 2) { + filterAlias = Util.extendedToString(parameters.get(0)); + exprToCheck = parameters.get(1); } - - private static SQLMethodInvokeExpr convertBinaryOperatorToMethod(String operator, SQLBinaryOpExpr expr) { - SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr(operator, null); - methodInvokeExpr.addParameter(expr.getLeft()); - methodInvokeExpr.addParameter(expr.getRight()); - methodInvokeExpr.putAttribute("source", expr); - return methodInvokeExpr; + Where where = Where.newInstance(); + new WhereParser(new SqlParser()).parseWhere(exprToCheck, where); + if (where.getWheres().size() == 0) { + throw new SqlParseException("Failed to parse filter condition"); + } + List methodParameters = new ArrayList<>(); + methodParameters.add(new KVValue("where", where)); + methodParameters.add(new KVValue("alias", filterAlias + "@FILTER")); + return new MethodField("filter", methodParameters, null, alias); + } + + private static Field handleIdentifier(NestedType nestedType, String alias, String tableAlias) { + Field field = handleIdentifier(new SQLIdentifierExpr(nestedType.field), alias, tableAlias); + field.setNested(nestedType); + field.setChildren(null); + return field; + } + + private static Field handleIdentifier( + ChildrenType childrenType, String alias, String tableAlias) { + Field field = handleIdentifier(new SQLIdentifierExpr(childrenType.field), alias, tableAlias); + field.setNested(null); + field.setChildren(childrenType); + return field; + } + + // binary method can nested + public SQLMethodInvokeExpr makeBinaryMethodField( + SQLBinaryOpExpr expr, String alias, boolean first) throws SqlParseException { + List params = new ArrayList<>(); + + String scriptFieldAlias; + if (first && (alias == null || alias.equals(""))) { + scriptFieldAlias = sqlFunctions.nextId("field"); + } else { + scriptFieldAlias = alias; } + params.add(new SQLCharExpr(scriptFieldAlias)); + switch (expr.getOperator()) { + case Add: + return convertBinaryOperatorToMethod("add", expr); + case Multiply: + return convertBinaryOperatorToMethod("multiply", expr); - private static Field handleIdentifier(SQLExpr expr, String alias, String tableAlias) { - String name = expr.toString().replace("`", ""); - String newFieldName = name; - Field field = null; - if (tableAlias != null) { - String aliasPrefix = tableAlias + "."; - if (name.startsWith(aliasPrefix)) { - newFieldName = name.replaceFirst(aliasPrefix, ""); - field = new Field(newFieldName, alias); - } - } + case Divide: + return convertBinaryOperatorToMethod("divide", expr); - if (tableAlias == null) { - field = new Field(newFieldName, alias); - } + case Modulus: + return convertBinaryOperatorToMethod("modulus", expr); - return field; + case Subtract: + return convertBinaryOperatorToMethod("subtract", expr); + default: + throw new SqlParseException("Unsupported operator: " + expr.getOperator().getName()); + } + } + + private static SQLMethodInvokeExpr convertBinaryOperatorToMethod( + String operator, SQLBinaryOpExpr expr) { + SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr(operator, null); + methodInvokeExpr.addParameter(expr.getLeft()); + methodInvokeExpr.addParameter(expr.getRight()); + methodInvokeExpr.putAttribute("source", expr); + return methodInvokeExpr; + } + + private static Field handleIdentifier(SQLExpr expr, String alias, String tableAlias) { + String name = expr.toString().replace("`", ""); + String newFieldName = name; + Field field = null; + if (tableAlias != null) { + String aliasPrefix = tableAlias + "."; + if (name.startsWith(aliasPrefix)) { + newFieldName = name.replaceFirst(aliasPrefix, ""); + field = new Field(newFieldName, alias); + } } - public MethodField makeMethodField(String name, List arguments, SQLAggregateOption option, - String alias, String tableAlias, boolean first) throws SqlParseException { - List paramers = new LinkedList<>(); - - for (SQLExpr object : arguments) { - - if (object instanceof SQLBinaryOpExpr) { - - SQLBinaryOpExpr binaryOpExpr = (SQLBinaryOpExpr) object; - - if (SQLFunctions.isFunctionTranslatedToScript(binaryOpExpr.getOperator().toString())) { - SQLMethodInvokeExpr mExpr = makeBinaryMethodField(binaryOpExpr, alias, first); - MethodField abc = makeMethodField(mExpr.getMethodName(), mExpr.getParameters(), - null, null, tableAlias, false); - paramers.add(new KVValue(abc.getParams().get(0).toString(), - new SQLCharExpr(abc.getParams().get(1).toString()))); - } else { - if (!binaryOpExpr.getOperator().getName().equals("=")) { - paramers.add(new KVValue("script", makeScriptMethodField(binaryOpExpr, null, tableAlias))); - } else { - SQLExpr right = binaryOpExpr.getRight(); - Object value = Util.expr2Object(right); - paramers.add(new KVValue(binaryOpExpr.getLeft().toString(), value)); - } - } - - } else if (object instanceof SQLMethodInvokeExpr) { - SQLMethodInvokeExpr mExpr = (SQLMethodInvokeExpr) object; - String methodName = mExpr.getMethodName().toLowerCase(); - if (methodName.equals("script")) { - KVValue script = new KVValue("script", makeMethodField(mExpr.getMethodName(), mExpr.getParameters(), - null, alias, tableAlias, true)); - paramers.add(script); - } else if (methodName.equals("nested") || methodName.equals("reverse_nested")) { - NestedType nestedType = new NestedType(); - - if (!nestedType.tryFillFromExpr(object)) { - throw new SqlParseException("Failed to parse nested expression: " + object); - } - - // Fix bug: method name of reversed_nested() was set to "nested" wrongly - paramers.add(new KVValue(methodName, nestedType)); - } else if (methodName.equals("children")) { - ChildrenType childrenType = new ChildrenType(); - - if (!childrenType.tryFillFromExpr(object)) { - throw new SqlParseException("Failed to parse children expression: " + object); - } - - paramers.add(new KVValue("children", childrenType)); - } else if (SQLFunctions.isFunctionTranslatedToScript(methodName)) { - //throw new SqlParseException("only support script/nested as inner functions"); - MethodField abc = makeMethodField(methodName, mExpr.getParameters(), null, null, tableAlias, false); - paramers.add(new KVValue(abc.getParams().get(0).toString(), - new SQLCharExpr(abc.getParams().get(1).toString()))); - } else { - throw new SqlParseException("only support script/nested/children as inner functions"); - } - } else if (object instanceof SQLCaseExpr) { - String scriptCode = new CaseWhenParser((SQLCaseExpr) object, alias, tableAlias).parse(); - paramers.add(new KVValue("script", new SQLCharExpr(scriptCode))); - } else if (object instanceof SQLCastExpr) { - String castName = sqlFunctions.nextId("cast"); - List methodParameters = new ArrayList<>(); - methodParameters.add(new KVValue(((SQLCastExpr) object).getExpr().toString())); - String castType = ((SQLCastExpr) object).getDataType().getName(); - String scriptCode = sqlFunctions.getCastScriptStatement(castName, castType, methodParameters); - - // Parameter "first" indicates if return statement is required. Take CAST statement nested in - // aggregate function SUM(CAST...) for example, return statement is required in this case. - // Otherwise DSL with metric aggregation always returns 0 as result. And this works also because - // the caller makeFieldImpl(SQLExpr("SUM...")) does pass first=true to here. - if (first) { - scriptCode += "; return " + castName; - } - methodParameters.add(new KVValue(scriptCode)); - paramers.add(new KVValue("script", new SQLCharExpr(scriptCode))); - } else if (object instanceof SQLAggregateExpr) { - SQLObject parent = object.getParent(); - SQLExpr source = (SQLExpr) parent.getAttribute("source"); - - if (parent instanceof SQLMethodInvokeExpr && source == null) { - throw new SqlFeatureNotImplementedException( - "Function calls of form '" - + ((SQLMethodInvokeExpr) parent).getMethodName() - + "(" - + ((SQLAggregateExpr) object).getMethodName() - + "(...))' are not implemented yet"); - } - - throw new SqlFeatureNotImplementedException( - "The complex aggregate expressions are not implemented yet: " + source); - } else { - paramers.add(new KVValue(Util.removeTableAilasFromField(object, tableAlias))); - } + if (tableAlias == null) { + field = new Field(newFieldName, alias); + } + return field; + } + + public MethodField makeMethodField( + String name, + List arguments, + SQLAggregateOption option, + String alias, + String tableAlias, + boolean first) + throws SqlParseException { + List paramers = new LinkedList<>(); + + for (SQLExpr object : arguments) { + + if (object instanceof SQLBinaryOpExpr) { + + SQLBinaryOpExpr binaryOpExpr = (SQLBinaryOpExpr) object; + + if (SQLFunctions.isFunctionTranslatedToScript(binaryOpExpr.getOperator().toString())) { + SQLMethodInvokeExpr mExpr = makeBinaryMethodField(binaryOpExpr, alias, first); + MethodField abc = + makeMethodField( + mExpr.getMethodName(), mExpr.getParameters(), null, null, tableAlias, false); + paramers.add( + new KVValue( + abc.getParams().get(0).toString(), + new SQLCharExpr(abc.getParams().get(1).toString()))); + } else { + if (!binaryOpExpr.getOperator().getName().equals("=")) { + paramers.add( + new KVValue("script", makeScriptMethodField(binaryOpExpr, null, tableAlias))); + } else { + SQLExpr right = binaryOpExpr.getRight(); + Object value = Util.expr2Object(right); + paramers.add(new KVValue(binaryOpExpr.getLeft().toString(), value)); + } } - //just check we can find the function - boolean builtInScriptFunction = SQLFunctions.isFunctionTranslatedToScript(name); - if (builtInScriptFunction) { - if (alias == null && first) { - alias = sqlFunctions.nextId(name); - } - //should check if field and first . - Tuple newFunctions = sqlFunctions.function(name.toLowerCase(), paramers, - paramers.isEmpty() ? null : paramers.get(0).key, first); - paramers.clear(); - if (!first) { - //variance - paramers.add(new KVValue(newFunctions.v1())); - } else { - paramers.add(new KVValue(alias)); - } - - paramers.add(new KVValue(newFunctions.v2())); + } else if (object instanceof SQLMethodInvokeExpr) { + SQLMethodInvokeExpr mExpr = (SQLMethodInvokeExpr) object; + String methodName = mExpr.getMethodName().toLowerCase(); + if (methodName.equals("script")) { + KVValue script = + new KVValue( + "script", + makeMethodField( + mExpr.getMethodName(), mExpr.getParameters(), null, alias, tableAlias, true)); + paramers.add(script); + } else if (methodName.equals("nested") || methodName.equals("reverse_nested")) { + NestedType nestedType = new NestedType(); + + if (!nestedType.tryFillFromExpr(object)) { + throw new SqlParseException("Failed to parse nested expression: " + object); + } + + // Fix bug: method name of reversed_nested() was set to "nested" wrongly + paramers.add(new KVValue(methodName, nestedType)); + } else if (methodName.equals("children")) { + ChildrenType childrenType = new ChildrenType(); + + if (!childrenType.tryFillFromExpr(object)) { + throw new SqlParseException("Failed to parse children expression: " + object); + } + + paramers.add(new KVValue("children", childrenType)); + } else if (SQLFunctions.isFunctionTranslatedToScript(methodName)) { + // throw new SqlParseException("only support script/nested as inner functions"); + MethodField abc = + makeMethodField(methodName, mExpr.getParameters(), null, null, tableAlias, false); + paramers.add( + new KVValue( + abc.getParams().get(0).toString(), + new SQLCharExpr(abc.getParams().get(1).toString()))); + } else { + throw new SqlParseException("only support script/nested/children as inner functions"); } + } else if (object instanceof SQLCaseExpr) { + String scriptCode = new CaseWhenParser((SQLCaseExpr) object, alias, tableAlias).parse(); + paramers.add(new KVValue("script", new SQLCharExpr(scriptCode))); + } else if (object instanceof SQLCastExpr) { + String castName = sqlFunctions.nextId("cast"); + List methodParameters = new ArrayList<>(); + methodParameters.add(new KVValue(((SQLCastExpr) object).getExpr().toString())); + String castType = ((SQLCastExpr) object).getDataType().getName(); + String scriptCode = + sqlFunctions.getCastScriptStatement(castName, castType, methodParameters); + + // Parameter "first" indicates if return statement is required. Take CAST statement nested + // in + // aggregate function SUM(CAST...) for example, return statement is required in this case. + // Otherwise DSL with metric aggregation always returns 0 as result. And this works also + // because + // the caller makeFieldImpl(SQLExpr("SUM...")) does pass first=true to here. if (first) { - List tempParamers = new LinkedList<>(); - for (KVValue temp : paramers) { - if (temp.value instanceof SQLExpr) { - tempParamers.add(new KVValue(temp.key, Util.expr2Object((SQLExpr) temp.value))); - } else { - tempParamers.add(new KVValue(temp.key, temp.value)); - } - } - paramers.clear(); - paramers.addAll(tempParamers); + scriptCode += "; return " + castName; + } + methodParameters.add(new KVValue(scriptCode)); + paramers.add(new KVValue("script", new SQLCharExpr(scriptCode))); + } else if (object instanceof SQLAggregateExpr) { + SQLObject parent = object.getParent(); + SQLExpr source = (SQLExpr) parent.getAttribute("source"); + + if (parent instanceof SQLMethodInvokeExpr && source == null) { + throw new SqlFeatureNotImplementedException( + "Function calls of form '" + + ((SQLMethodInvokeExpr) parent).getMethodName() + + "(" + + ((SQLAggregateExpr) object).getMethodName() + + "(...))' are not implemented yet"); } - if (builtInScriptFunction) { - return new ScriptMethodField(name, paramers, option, alias); + throw new SqlFeatureNotImplementedException( + "The complex aggregate expressions are not implemented yet: " + source); + } else { + paramers.add(new KVValue(Util.removeTableAilasFromField(object, tableAlias))); + } + } + + // just check we can find the function + boolean builtInScriptFunction = SQLFunctions.isFunctionTranslatedToScript(name); + if (builtInScriptFunction) { + if (alias == null && first) { + alias = sqlFunctions.nextId(name); + } + // should check if field and first . + Tuple newFunctions = + sqlFunctions.function( + name.toLowerCase(), paramers, paramers.isEmpty() ? null : paramers.get(0).key, first); + paramers.clear(); + if (!first) { + // variance + paramers.add(new KVValue(newFunctions.v1())); + } else { + paramers.add(new KVValue(alias)); + } + + paramers.add(new KVValue(newFunctions.v2())); + } + if (first) { + List tempParamers = new LinkedList<>(); + for (KVValue temp : paramers) { + if (temp.value instanceof SQLExpr) { + tempParamers.add(new KVValue(temp.key, Util.expr2Object((SQLExpr) temp.value))); } else { - return new MethodField(name, paramers, option, alias); + tempParamers.add(new KVValue(temp.key, temp.value)); } + } + paramers.clear(); + paramers.addAll(tempParamers); + } + + if (builtInScriptFunction) { + return new ScriptMethodField(name, paramers, option, alias); + } else { + return new MethodField(name, paramers, option, alias); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/parser/HavingParser.java b/legacy/src/main/java/org/opensearch/sql/legacy/parser/HavingParser.java index 307d87f6e8..e0d933a405 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/parser/HavingParser.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/parser/HavingParser.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.parser; import com.alibaba.druid.sql.ast.SQLExpr; @@ -21,93 +20,89 @@ import org.opensearch.sql.legacy.domain.Where; import org.opensearch.sql.legacy.exception.SqlParseException; -/** - * Parse expression in the Having clause. - */ +/** Parse expression in the Having clause. */ public class HavingParser { - private final WhereParser whereParser; - private final List havingFields; - private final HavingConditionRewriter havingConditionRewriter; + private final WhereParser whereParser; + private final List havingFields; + private final HavingConditionRewriter havingConditionRewriter; - public HavingParser(WhereParser whereParser) { - this.whereParser = whereParser; - this.havingFields = new ArrayList<>(); - this.havingConditionRewriter = new HavingConditionRewriter(); - } + public HavingParser(WhereParser whereParser) { + this.whereParser = whereParser; + this.havingFields = new ArrayList<>(); + this.havingConditionRewriter = new HavingConditionRewriter(); + } - public void parseWhere(SQLExpr expr, Where where) throws SqlParseException { - expr.accept(havingConditionRewriter); - whereParser.parseWhere(expr, where); - } + public void parseWhere(SQLExpr expr, Where where) throws SqlParseException { + expr.accept(havingConditionRewriter); + whereParser.parseWhere(expr, where); + } - public List getHavingFields() { - return havingFields; - } - - private class HavingConditionRewriter extends MySqlASTVisitorAdapter { - private int aliasSuffix = 0; + public List getHavingFields() { + return havingFields; + } - @Override - public boolean visit(SQLAggregateExpr expr) { - SQLIdentifierExpr translatedExpr = translateAggExpr(expr); - SQLObject parent = expr.getParent(); - // Rewrite {@link SQLAggregateExpr} in {@link SQLBinaryOpExpr}, e.g. HAVING AVG(age) > 30) - if (parent instanceof SQLBinaryOpExpr) { - SQLBinaryOpExpr parentOpExpr = (SQLBinaryOpExpr) parent; - if (parentOpExpr.getLeft() == expr) { - parentOpExpr.setLeft(translatedExpr); - } else { - parentOpExpr.setRight(translatedExpr); - } - // Rewrite {@link SQLAggregateExpr} in {@link SQLNotExpr}, e.g. HAVING NOT (AVG(a) > 30) - } else if (parent instanceof SQLNotExpr) { - SQLNotExpr parentNotExpr = (SQLNotExpr) parent; - parentNotExpr.setExpr(translatedExpr); - // Rewrite {@link SQLAggregateExpr} in {@link SQLInListExpr}, e.g. HAVING AVG(a) IN (30, 40, 50) - } else if (parent instanceof SQLInListExpr) { - SQLInListExpr parentInListExpr = (SQLInListExpr) parent; - parentInListExpr.setExpr(translatedExpr); - // Rewrite {@link SQLAggregateExpr} in {@link SQLBetweenExpr}, e.g. HAVING AVG(a) BETWEEN 30, 40 - } else if (parent instanceof SQLBetweenExpr) { - SQLBetweenExpr parentBetweenExpr = (SQLBetweenExpr) parent; - parentBetweenExpr.setTestExpr(translatedExpr); - } else { - throw new IllegalStateException("Unsupported aggregation function in having clause " - + parent.getClass()); - } + private class HavingConditionRewriter extends MySqlASTVisitorAdapter { + private int aliasSuffix = 0; - return true; + @Override + public boolean visit(SQLAggregateExpr expr) { + SQLIdentifierExpr translatedExpr = translateAggExpr(expr); + SQLObject parent = expr.getParent(); + // Rewrite {@link SQLAggregateExpr} in {@link SQLBinaryOpExpr}, e.g. HAVING AVG(age) > 30) + if (parent instanceof SQLBinaryOpExpr) { + SQLBinaryOpExpr parentOpExpr = (SQLBinaryOpExpr) parent; + if (parentOpExpr.getLeft() == expr) { + parentOpExpr.setLeft(translatedExpr); + } else { + parentOpExpr.setRight(translatedExpr); } + // Rewrite {@link SQLAggregateExpr} in {@link SQLNotExpr}, e.g. HAVING NOT (AVG(a) > 30) + } else if (parent instanceof SQLNotExpr) { + SQLNotExpr parentNotExpr = (SQLNotExpr) parent; + parentNotExpr.setExpr(translatedExpr); + // Rewrite {@link SQLAggregateExpr} in {@link SQLInListExpr}, e.g. HAVING AVG(a) IN (30, 40, + // 50) + } else if (parent instanceof SQLInListExpr) { + SQLInListExpr parentInListExpr = (SQLInListExpr) parent; + parentInListExpr.setExpr(translatedExpr); + // Rewrite {@link SQLAggregateExpr} in {@link SQLBetweenExpr}, e.g. HAVING AVG(a) BETWEEN + // 30, 40 + } else if (parent instanceof SQLBetweenExpr) { + SQLBetweenExpr parentBetweenExpr = (SQLBetweenExpr) parent; + parentBetweenExpr.setTestExpr(translatedExpr); + } else { + throw new IllegalStateException( + "Unsupported aggregation function in having clause " + parent.getClass()); + } - /** - * If the expr is {@link SQLAggregateExpr} - * 1) rewrite as {@link SQLIdentifierExpr} - * 2) add the {@link SQLIdentifierExpr} to the havingFields - *

- * For example, the COUNT(age) is the {@link SQLAggregateExpr} in expression COUNT(age) > 1 - * 1) parsing COUNT(age) as {@link SQLIdentifierExpr} count_1 - * 2) return {@link SQLIdentifierExpr} count_1 to the havingFields - */ - private SQLIdentifierExpr translateAggExpr(SQLAggregateExpr expr) { - String methodAlias = methodAlias(expr.getMethodName()); - SQLIdentifierExpr sqlExpr = new SQLIdentifierExpr(methodAlias); - try { - havingFields.add(new FieldMaker().makeField( - expr, - methodAlias, - null)); - return sqlExpr; - } catch (SqlParseException e) { - throw new IllegalStateException(e); - } - } + return true; + } - private String methodAlias(String methodName) { - return String.format("%s_%d", methodName.toLowerCase(), nextAlias()); - } + /** + * If the expr is {@link SQLAggregateExpr} 1) rewrite as {@link SQLIdentifierExpr} 2) add the + * {@link SQLIdentifierExpr} to the havingFields + * + *

For example, the COUNT(age) is the {@link SQLAggregateExpr} in expression COUNT(age) > 1 + * 1) parsing COUNT(age) as {@link SQLIdentifierExpr} count_1 2) return {@link + * SQLIdentifierExpr} count_1 to the havingFields + */ + private SQLIdentifierExpr translateAggExpr(SQLAggregateExpr expr) { + String methodAlias = methodAlias(expr.getMethodName()); + SQLIdentifierExpr sqlExpr = new SQLIdentifierExpr(methodAlias); + try { + havingFields.add(new FieldMaker().makeField(expr, methodAlias, null)); + return sqlExpr; + } catch (SqlParseException e) { + throw new IllegalStateException(e); + } + } - private Integer nextAlias() { - return aliasSuffix++; - } + private String methodAlias(String methodName) { + return String.format("%s_%d", methodName.toLowerCase(), nextAlias()); + } + + private Integer nextAlias() { + return aliasSuffix++; } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/parser/NestedType.java b/legacy/src/main/java/org/opensearch/sql/legacy/parser/NestedType.java index d9b7886310..8b0cd026e2 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/parser/NestedType.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/parser/NestedType.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.parser; import com.alibaba.druid.sql.ast.SQLExpr; @@ -18,128 +17,112 @@ import org.opensearch.sql.legacy.exception.SqlParseException; import org.opensearch.sql.legacy.utils.Util; -/** - * Created by Eliran on 12/11/2015. - */ +/** Created by Eliran on 12/11/2015. */ public class NestedType { - public String field; - public String path; - public Where where; - private boolean reverse; - private boolean simple; - private final BucketPath bucketPath = new BucketPath(); - - public boolean tryFillFromExpr(SQLExpr expr) throws SqlParseException { - if (!(expr instanceof SQLMethodInvokeExpr)) { - return false; - } - SQLMethodInvokeExpr method = (SQLMethodInvokeExpr) expr; - String methodNameLower = method.getMethodName().toLowerCase(); - if (!(methodNameLower.equals("nested") || methodNameLower.equals("reverse_nested"))) { - return false; - } - - reverse = methodNameLower.equals("reverse_nested"); - - List parameters = method.getParameters(); - if (parameters.size() != 2 && parameters.size() != 1) { - throw new IllegalArgumentException("on nested object only allowed 2 parameters " - + "(field,path)/(path,conditions..) or 1 parameter (field) "); - } - - String field = Util.extendedToString(parameters.get(0)); - this.field = field; - if (parameters.size() == 1) { - //calc path myself.. - if (!field.contains(".")) { - if (!reverse) { - throw new IllegalArgumentException("Illegal nested field name: " + field); - } else { - this.path = null; - this.simple = true; - } - } else { - int lastDot = field.lastIndexOf("."); - this.path = field.substring(0, lastDot); - this.simple = true; - - } - - } else if (parameters.size() == 2) { - SQLExpr secondParameter = parameters.get(1); - if (secondParameter instanceof SQLTextLiteralExpr || secondParameter instanceof SQLIdentifierExpr - || secondParameter instanceof SQLPropertyExpr) { - - String pathString = Util.extendedToString(secondParameter); - if (pathString.equals("")) { - this.path = null; - } else { - this.path = pathString; - } - this.simple = true; - } else { - this.path = field; - Where where = Where.newInstance(); - new WhereParser(new SqlParser()).parseWhere(secondParameter, where); - if (where.getWheres().size() == 0) { - throw new SqlParseException("Failed to parse filter condition"); - } - this.where = where; - simple = false; - } - } - - return true; + public String field; + public String path; + public Where where; + private boolean reverse; + private boolean simple; + private final BucketPath bucketPath = new BucketPath(); + + public boolean tryFillFromExpr(SQLExpr expr) throws SqlParseException { + if (!(expr instanceof SQLMethodInvokeExpr)) { + return false; } - - public boolean isSimple() { - return simple; - } - - public boolean isReverse() { - return reverse; - } - - /** - * Return the name of the Nested Aggregation. - */ - public String getNestedAggName() { - return field + "@NESTED"; + SQLMethodInvokeExpr method = (SQLMethodInvokeExpr) expr; + String methodNameLower = method.getMethodName().toLowerCase(); + if (!(methodNameLower.equals("nested") || methodNameLower.equals("reverse_nested"))) { + return false; } - /** - * Return the name of the Filter Aggregation - */ - public String getFilterAggName() { - return field + "@FILTER"; - } + reverse = methodNameLower.equals("reverse_nested"); - public void addBucketPath(Path path) { - bucketPath.add(path); + List parameters = method.getParameters(); + if (parameters.size() != 2 && parameters.size() != 1) { + throw new IllegalArgumentException( + "on nested object only allowed 2 parameters " + + "(field,path)/(path,conditions..) or 1 parameter (field) "); } - public String getBucketPath() { - return bucketPath.getBucketPath(); + String field = Util.extendedToString(parameters.get(0)); + this.field = field; + if (parameters.size() == 1) { + // calc path myself.. + if (!field.contains(".")) { + if (!reverse) { + throw new IllegalArgumentException("Illegal nested field name: " + field); + } else { + this.path = null; + this.simple = true; + } + } else { + int lastDot = field.lastIndexOf("."); + this.path = field.substring(0, lastDot); + this.simple = true; + } + + } else if (parameters.size() == 2) { + SQLExpr secondParameter = parameters.get(1); + if (secondParameter instanceof SQLTextLiteralExpr + || secondParameter instanceof SQLIdentifierExpr + || secondParameter instanceof SQLPropertyExpr) { + + String pathString = Util.extendedToString(secondParameter); + if (pathString.equals("")) { + this.path = null; + } else { + this.path = pathString; + } + this.simple = true; + } else { + this.path = field; + Where where = Where.newInstance(); + new WhereParser(new SqlParser()).parseWhere(secondParameter, where); + if (where.getWheres().size() == 0) { + throw new SqlParseException("Failed to parse filter condition"); + } + this.where = where; + simple = false; + } } - /** - * Return true if the filed is the nested filed. - * For example, the mapping - * { - * "projects":{ - * "type": "nested" - * "properties": { - * "name": { - * "type": "text" - * } - * } - * } - * } - *

- * If the filed is projects, return true. - * If the filed is projects.name, return false. - */ - public boolean isNestedField() { - return !field.contains(".") && field.equalsIgnoreCase(path); - } + return true; + } + + public boolean isSimple() { + return simple; + } + + public boolean isReverse() { + return reverse; + } + + /** Return the name of the Nested Aggregation. */ + public String getNestedAggName() { + return field + "@NESTED"; + } + + /** Return the name of the Filter Aggregation */ + public String getFilterAggName() { + return field + "@FILTER"; + } + + public void addBucketPath(Path path) { + bucketPath.add(path); + } + + public String getBucketPath() { + return bucketPath.getBucketPath(); + } + + /** + * Return true if the filed is the nested filed. For example, the mapping { "projects":{ "type": + * "nested" "properties": { "name": { "type": "text" } } } } + * + *

If the filed is projects, return true. If the filed is projects.name, return false. + */ + public boolean isNestedField() { + return !field.contains(".") && field.equalsIgnoreCase(path); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/parser/SQLOdbcExpr.java b/legacy/src/main/java/org/opensearch/sql/legacy/parser/SQLOdbcExpr.java index ed03051b66..64d1235f4d 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/parser/SQLOdbcExpr.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/parser/SQLOdbcExpr.java @@ -3,50 +3,44 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.parser; import com.alibaba.druid.sql.ast.expr.SQLCharExpr; import com.alibaba.druid.sql.visitor.SQLASTVisitor; - -/** - * Created by jheimbouch on 3/17/15. - */ +/** Created by jheimbouch on 3/17/15. */ public class SQLOdbcExpr extends SQLCharExpr { - private static final long serialVersionUID = 1L; - - public SQLOdbcExpr() { - - } + private static final long serialVersionUID = 1L; - public SQLOdbcExpr(String text) { - super(text); - } + public SQLOdbcExpr() {} - @Override - public void output(StringBuffer buf) { - if ((this.text == null) || (this.text.length() == 0)) { - buf.append("NULL"); - } else { - buf.append("{ts '"); - buf.append(this.text.replaceAll("'", "''")); - buf.append("'}"); - } - } - - @Override - public String getText() { - StringBuilder sb = new StringBuilder(); - sb.append("{ts '"); - sb.append(this.text); - sb.append("'}"); - return sb.toString(); - } + public SQLOdbcExpr(String text) { + super(text); + } - protected void accept0(SQLASTVisitor visitor) { - visitor.visit(this); - visitor.endVisit(this); + @Override + public void output(StringBuffer buf) { + if ((this.text == null) || (this.text.length() == 0)) { + buf.append("NULL"); + } else { + buf.append("{ts '"); + buf.append(this.text.replaceAll("'", "''")); + buf.append("'}"); } + } + + @Override + public String getText() { + StringBuilder sb = new StringBuilder(); + sb.append("{ts '"); + sb.append(this.text); + sb.append("'}"); + return sb.toString(); + } + + protected void accept0(SQLASTVisitor visitor) { + visitor.visit(this); + visitor.endVisit(this); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/parser/SQLParensIdentifierExpr.java b/legacy/src/main/java/org/opensearch/sql/legacy/parser/SQLParensIdentifierExpr.java index b9682ce84a..8eaae81dca 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/parser/SQLParensIdentifierExpr.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/parser/SQLParensIdentifierExpr.java @@ -3,27 +3,24 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.parser; import com.alibaba.druid.sql.ast.expr.SQLIdentifierExpr; - /** - * An Identifier that is wrapped in parenthesis. - * This is for tracking in group bys the difference between "group by state, age" and "group by (state), (age)". - * For non group by identifiers, it acts as a normal SQLIdentifierExpr. + * An Identifier that is wrapped in parenthesis. This is for tracking in group bys the difference + * between "group by state, age" and "group by (state), (age)". For non group by identifiers, it + * acts as a normal SQLIdentifierExpr. */ public class SQLParensIdentifierExpr extends SQLIdentifierExpr { - public SQLParensIdentifierExpr() { - } + public SQLParensIdentifierExpr() {} - public SQLParensIdentifierExpr(String name) { - super(name); - } + public SQLParensIdentifierExpr(String name) { + super(name); + } - public SQLParensIdentifierExpr(SQLIdentifierExpr expr) { - super(expr.getName()); - } + public SQLParensIdentifierExpr(SQLIdentifierExpr expr) { + super(expr.getName()); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/parser/ScriptFilter.java b/legacy/src/main/java/org/opensearch/sql/legacy/parser/ScriptFilter.java index 3eb4fecf67..3f9b12ca84 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/parser/ScriptFilter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/parser/ScriptFilter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.parser; import com.alibaba.druid.sql.ast.SQLExpr; @@ -16,96 +15,92 @@ import org.opensearch.sql.legacy.exception.SqlParseException; import org.opensearch.sql.legacy.utils.Util; -/** - * Created by Eliran on 11/12/2015. - */ +/** Created by Eliran on 11/12/2015. */ public class ScriptFilter { - private String script; - private Map args; - private ScriptType scriptType; + private String script; + private Map args; + private ScriptType scriptType; - public ScriptFilter() { + public ScriptFilter() { - args = null; - scriptType = ScriptType.INLINE; - } + args = null; + scriptType = ScriptType.INLINE; + } - public ScriptFilter(String script, Map args, ScriptType scriptType) { - this.script = script; - this.args = args; - this.scriptType = scriptType; - } + public ScriptFilter(String script, Map args, ScriptType scriptType) { + this.script = script; + this.args = args; + this.scriptType = scriptType; + } - public boolean tryParseFromMethodExpr(SQLMethodInvokeExpr expr) throws SqlParseException { - if (!expr.getMethodName().toLowerCase().equals("script")) { - return false; - } - List methodParameters = expr.getParameters(); - if (methodParameters.size() == 0) { - return false; - } - script = Util.extendedToString(methodParameters.get(0)); - - if (methodParameters.size() == 1) { - return true; - } - - args = new HashMap<>(); - for (int i = 1; i < methodParameters.size(); i++) { - - SQLExpr innerExpr = methodParameters.get(i); - if (!(innerExpr instanceof SQLBinaryOpExpr)) { - return false; - } - SQLBinaryOpExpr binaryOpExpr = (SQLBinaryOpExpr) innerExpr; - if (!binaryOpExpr.getOperator().getName().equals("=")) { - return false; - } - - SQLExpr right = binaryOpExpr.getRight(); - Object value = Util.expr2Object(right); - String key = Util.extendedToString(binaryOpExpr.getLeft()); - if (key.equals("script_type")) { - parseAndUpdateScriptType(value.toString()); - } else { - args.put(key, value); - } - - } - return true; + public boolean tryParseFromMethodExpr(SQLMethodInvokeExpr expr) throws SqlParseException { + if (!expr.getMethodName().toLowerCase().equals("script")) { + return false; } - - private void parseAndUpdateScriptType(String scriptType) { - String scriptTypeUpper = scriptType.toUpperCase(); - switch (scriptTypeUpper) { - case "INLINE": - this.scriptType = ScriptType.INLINE; - break; - case "INDEXED": - case "STORED": - this.scriptType = ScriptType.STORED; - break; - } + List methodParameters = expr.getParameters(); + if (methodParameters.size() == 0) { + return false; } + script = Util.extendedToString(methodParameters.get(0)); - public boolean containsParameters() { - return args != null && args.size() > 0; + if (methodParameters.size() == 1) { + return true; } - public String getScript() { - return script; + args = new HashMap<>(); + for (int i = 1; i < methodParameters.size(); i++) { + + SQLExpr innerExpr = methodParameters.get(i); + if (!(innerExpr instanceof SQLBinaryOpExpr)) { + return false; + } + SQLBinaryOpExpr binaryOpExpr = (SQLBinaryOpExpr) innerExpr; + if (!binaryOpExpr.getOperator().getName().equals("=")) { + return false; + } + + SQLExpr right = binaryOpExpr.getRight(); + Object value = Util.expr2Object(right); + String key = Util.extendedToString(binaryOpExpr.getLeft()); + if (key.equals("script_type")) { + parseAndUpdateScriptType(value.toString()); + } else { + args.put(key, value); + } } - - public ScriptType getScriptType() { - return scriptType; + return true; + } + + private void parseAndUpdateScriptType(String scriptType) { + String scriptTypeUpper = scriptType.toUpperCase(); + switch (scriptTypeUpper) { + case "INLINE": + this.scriptType = ScriptType.INLINE; + break; + case "INDEXED": + case "STORED": + this.scriptType = ScriptType.STORED; + break; } + } - public Map getArgs() { - return args; - } + public boolean containsParameters() { + return args != null && args.size() > 0; + } - public void setArgs(Map args) { - this.args = args; - } + public String getScript() { + return script; + } + + public ScriptType getScriptType() { + return scriptType; + } + + public Map getArgs() { + return args; + } + public void setArgs(Map args) { + this.args = args; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/parser/SelectParser.java b/legacy/src/main/java/org/opensearch/sql/legacy/parser/SelectParser.java index 85becdaa53..62a63b320f 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/parser/SelectParser.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/parser/SelectParser.java @@ -3,11 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.parser; -/** - * Created by allwefantasy on 9/2/16. - */ -public class SelectParser { -} +/** Created by allwefantasy on 9/2/16. */ +public class SelectParser {} diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/parser/SqlParser.java b/legacy/src/main/java/org/opensearch/sql/legacy/parser/SqlParser.java index cf184750f2..947533630b 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/parser/SqlParser.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/parser/SqlParser.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.parser; import static org.opensearch.sql.legacy.utils.Util.NESTED_JOIN_TYPE; @@ -50,546 +49,571 @@ import org.opensearch.sql.legacy.exception.SqlParseException; import org.opensearch.sql.legacy.query.multi.MultiQuerySelect; - /** * OpenSearch sql support * * @author ansj */ public class SqlParser { - private FieldMaker fieldMaker = new FieldMaker(); + private FieldMaker fieldMaker = new FieldMaker(); - public SqlParser() { + public SqlParser() {} + public Select parseSelect(SQLQueryExpr mySqlExpr) throws SqlParseException { + MySqlSelectQueryBlock query = (MySqlSelectQueryBlock) mySqlExpr.getSubQuery().getQuery(); + SubQueryParser subQueryParser = new SubQueryParser(this); + if (subQueryParser.containSubqueryInFrom(query)) { + return subQueryParser.parseSubQueryInFrom(query); + } else { + return parseSelect(query); } + } - public Select parseSelect(SQLQueryExpr mySqlExpr) throws SqlParseException { - MySqlSelectQueryBlock query = (MySqlSelectQueryBlock) mySqlExpr.getSubQuery().getQuery(); - SubQueryParser subQueryParser = new SubQueryParser(this); - if (subQueryParser.containSubqueryInFrom(query)) { - return subQueryParser.parseSubQueryInFrom(query); - } else { - return parseSelect(query); - } - } + public Select parseSelect(MySqlSelectQueryBlock query) throws SqlParseException { - public Select parseSelect(MySqlSelectQueryBlock query) throws SqlParseException { + Select select = new Select(); + WhereParser whereParser = new WhereParser(this, query, fieldMaker); - Select select = new Select(); - WhereParser whereParser = new WhereParser(this, query, fieldMaker); + if (query.getAttribute(NESTED_JOIN_TYPE) != null) { + select.setNestedJoinType((SQLJoinTableSource.JoinType) query.getAttribute(NESTED_JOIN_TYPE)); + } - if (query.getAttribute(NESTED_JOIN_TYPE) != null) { - select.setNestedJoinType((SQLJoinTableSource.JoinType) query.getAttribute(NESTED_JOIN_TYPE)); - } + findSelect(query, select, query.getFrom().getAlias()); - findSelect(query, select, query.getFrom().getAlias()); + select.getFrom().addAll(findFrom(query.getFrom())); - select.getFrom().addAll(findFrom(query.getFrom())); + select.setWhere(whereParser.findWhere()); - select.setWhere(whereParser.findWhere()); + select.fillSubQueries(); - select.fillSubQueries(); + select.getHints().addAll(parseHints(query.getHints())); - select.getHints().addAll(parseHints(query.getHints())); + findLimit(query.getLimit(), select); - findLimit(query.getLimit(), select); + if (query.getOrderBy() != null) { + addOrderByToSelect(select, query, query.getOrderBy().getItems(), null); + } - if (query.getOrderBy() != null) { - addOrderByToSelect(select, query, query.getOrderBy().getItems(), null); - } + if (query.getGroupBy() != null) { + findGroupBy(query, select); + } - if (query.getGroupBy() != null) { - findGroupBy(query, select); - } + return select; + } - return select; - } + public Delete parseDelete(SQLDeleteStatement deleteStatement) throws SqlParseException { + Delete delete = new Delete(); + WhereParser whereParser = new WhereParser(this, deleteStatement); - public Delete parseDelete(SQLDeleteStatement deleteStatement) throws SqlParseException { - Delete delete = new Delete(); - WhereParser whereParser = new WhereParser(this, deleteStatement); + delete.getFrom().addAll(findFrom(deleteStatement.getTableSource())); - delete.getFrom().addAll(findFrom(deleteStatement.getTableSource())); + delete.setWhere(whereParser.findWhere()); - delete.setWhere(whereParser.findWhere()); + return delete; + } - return delete; - } + public MultiQuerySelect parseMultiSelect(SQLUnionQuery query) throws SqlParseException { + Select firstTableSelect = this.parseSelect((MySqlSelectQueryBlock) query.getLeft()); + Select secondTableSelect = this.parseSelect((MySqlSelectQueryBlock) query.getRight()); + return new MultiQuerySelect(query.getOperator(), firstTableSelect, secondTableSelect); + } - public MultiQuerySelect parseMultiSelect(SQLUnionQuery query) throws SqlParseException { - Select firstTableSelect = this.parseSelect((MySqlSelectQueryBlock) query.getLeft()); - Select secondTableSelect = this.parseSelect((MySqlSelectQueryBlock) query.getRight()); - return new MultiQuerySelect(query.getOperator(), firstTableSelect, secondTableSelect); + private void findSelect(MySqlSelectQueryBlock query, Select select, String tableAlias) + throws SqlParseException { + List selectList = query.getSelectList(); + for (SQLSelectItem sqlSelectItem : selectList) { + Field field = + fieldMaker.makeField(sqlSelectItem.getExpr(), sqlSelectItem.getAlias(), tableAlias); + select.addField(field); } - - private void findSelect(MySqlSelectQueryBlock query, Select select, String tableAlias) throws SqlParseException { - List selectList = query.getSelectList(); - for (SQLSelectItem sqlSelectItem : selectList) { - Field field = fieldMaker.makeField(sqlSelectItem.getExpr(), sqlSelectItem.getAlias(), tableAlias); - select.addField(field); - } + } + + private void findGroupBy(MySqlSelectQueryBlock query, Select select) throws SqlParseException { + Map aliasesToExperssions = + query.getSelectList().stream() + .filter(item -> item.getAlias() != null) + .collect(Collectors.toMap(SQLSelectItem::getAlias, SQLSelectItem::getExpr)); + + SQLSelectGroupByClause groupBy = query.getGroupBy(); + SQLTableSource sqlTableSource = query.getFrom(); + + findHaving(query, select); + + List items = groupBy.getItems(); + + List standardGroupBys = new ArrayList<>(); + for (SQLExpr sqlExpr : items) { + // todo: mysql expr patch + if (sqlExpr instanceof MySqlSelectGroupByExpr) { + MySqlSelectGroupByExpr sqlSelectGroupByExpr = (MySqlSelectGroupByExpr) sqlExpr; + sqlExpr = sqlSelectGroupByExpr.getExpr(); + } + + if ((sqlExpr instanceof SQLParensIdentifierExpr + || !(sqlExpr instanceof SQLIdentifierExpr || sqlExpr instanceof SQLMethodInvokeExpr)) + && !standardGroupBys.isEmpty()) { + // flush the standard group bys + select.addGroupBy(convertExprsToFields(standardGroupBys, sqlTableSource)); + standardGroupBys = new ArrayList<>(); + } + + if (sqlExpr instanceof SQLParensIdentifierExpr) { + // single item with parens (should get its own aggregation) + select.addGroupBy(fieldMaker.makeField(sqlExpr, null, sqlTableSource.getAlias())); + } else if (sqlExpr instanceof SQLListExpr) { + // multiple items in their own list + SQLListExpr listExpr = (SQLListExpr) sqlExpr; + select.addGroupBy(convertExprsToFields(listExpr.getItems(), sqlTableSource)); + } else { + // check if field is actually alias + if (aliasesToExperssions.containsKey(sqlExpr.toString())) { + sqlExpr = aliasesToExperssions.get(sqlExpr.toString()); + } + standardGroupBys.add(sqlExpr); + } } - - private void findGroupBy(MySqlSelectQueryBlock query, Select select) throws SqlParseException { - Map aliasesToExperssions = query - .getSelectList() - .stream() - .filter(item -> item.getAlias() != null) - .collect(Collectors.toMap(SQLSelectItem::getAlias, SQLSelectItem::getExpr)); - - SQLSelectGroupByClause groupBy = query.getGroupBy(); - SQLTableSource sqlTableSource = query.getFrom(); - - findHaving(query, select); - - List items = groupBy.getItems(); - - List standardGroupBys = new ArrayList<>(); - for (SQLExpr sqlExpr : items) { - //todo: mysql expr patch - if (sqlExpr instanceof MySqlSelectGroupByExpr) { - MySqlSelectGroupByExpr sqlSelectGroupByExpr = (MySqlSelectGroupByExpr) sqlExpr; - sqlExpr = sqlSelectGroupByExpr.getExpr(); - } - - if ((sqlExpr instanceof SQLParensIdentifierExpr || !(sqlExpr instanceof SQLIdentifierExpr - || sqlExpr instanceof SQLMethodInvokeExpr)) && !standardGroupBys.isEmpty()) { - // flush the standard group bys - select.addGroupBy(convertExprsToFields(standardGroupBys, sqlTableSource)); - standardGroupBys = new ArrayList<>(); - } - - if (sqlExpr instanceof SQLParensIdentifierExpr) { - // single item with parens (should get its own aggregation) - select.addGroupBy(fieldMaker.makeField(sqlExpr, null, sqlTableSource.getAlias())); - } else if (sqlExpr instanceof SQLListExpr) { - // multiple items in their own list - SQLListExpr listExpr = (SQLListExpr) sqlExpr; - select.addGroupBy(convertExprsToFields(listExpr.getItems(), sqlTableSource)); - } else { - // check if field is actually alias - if (aliasesToExperssions.containsKey(sqlExpr.toString())) { - sqlExpr = aliasesToExperssions.get(sqlExpr.toString()); - } - standardGroupBys.add(sqlExpr); - } - } - if (!standardGroupBys.isEmpty()) { - select.addGroupBy(convertExprsToFields(standardGroupBys, sqlTableSource)); - } + if (!standardGroupBys.isEmpty()) { + select.addGroupBy(convertExprsToFields(standardGroupBys, sqlTableSource)); } - - private void findHaving(MySqlSelectQueryBlock query, Select select) throws SqlParseException { - select.setHaving(new Having(query.getGroupBy(), new WhereParser(this, query, fieldMaker))); + } + + private void findHaving(MySqlSelectQueryBlock query, Select select) throws SqlParseException { + select.setHaving(new Having(query.getGroupBy(), new WhereParser(this, query, fieldMaker))); + } + + private List convertExprsToFields( + List exprs, SQLTableSource sqlTableSource) throws SqlParseException { + List fields = new ArrayList<>(exprs.size()); + for (SQLExpr expr : exprs) { + // here we suppose groupby field will not have alias,so set null in second parameter + fields.add(fieldMaker.makeField(expr, null, sqlTableSource.getAlias())); } + return fields; + } - private List convertExprsToFields(List exprs, SQLTableSource sqlTableSource) - throws SqlParseException { - List fields = new ArrayList<>(exprs.size()); - for (SQLExpr expr : exprs) { - //here we suppose groupby field will not have alias,so set null in second parameter - fields.add(fieldMaker.makeField(expr, null, sqlTableSource.getAlias())); - } - return fields; + private String sameAliasWhere(Where where, String... aliases) throws SqlParseException { + if (where == null) { + return null; } - private String sameAliasWhere(Where where, String... aliases) throws SqlParseException { - if (where == null) { - return null; - } - - if (where instanceof Condition) { - Condition condition = (Condition) where; - String fieldName = condition.getName(); - for (String alias : aliases) { - String prefix = alias + "."; - if (fieldName.startsWith(prefix)) { - return alias; - } - } - throw new SqlParseException(String.format("Field [%s] with condition [%s] does not contain an alias", - fieldName, condition.toString())); - } - List sameAliases = new ArrayList<>(); - if (where.getWheres() != null && where.getWheres().size() > 0) { - for (Where innerWhere : where.getWheres()) { - sameAliases.add(sameAliasWhere(innerWhere, aliases)); - } - } - - if (sameAliases.contains(null)) { - return null; - } - String firstAlias = sameAliases.get(0); - //return null if more than one alias - for (String alias : sameAliases) { - if (!alias.equals(firstAlias)) { - return null; - } - } - return firstAlias; - } - - private void addOrderByToSelect(Select select, MySqlSelectQueryBlock queryBlock, List items, - String alias) - throws SqlParseException { - - Map aliasesToExpressions = queryBlock - .getSelectList() - .stream() - .filter(item -> item.getAlias() != null) - .collect(Collectors.toMap(SQLSelectItem::getAlias, SQLSelectItem::getExpr)); - - for (SQLSelectOrderByItem sqlSelectOrderByItem : items) { - if (sqlSelectOrderByItem.getType() == null) { - sqlSelectOrderByItem.setType(SQLOrderingSpecification.ASC); - } - String type = sqlSelectOrderByItem.getType().toString(); - SQLExpr expr = extractExprFromOrderExpr(sqlSelectOrderByItem); - - if (expr instanceof SQLIdentifierExpr) { - if (queryBlock.getGroupBy() == null || queryBlock.getGroupBy().getItems().isEmpty()) { - if (aliasesToExpressions.containsKey(((SQLIdentifierExpr) expr).getName())) { - expr = aliasesToExpressions.get(((SQLIdentifierExpr) expr).getName()); - } - } - } - - Field field = fieldMaker.makeField(expr, null, null); - - SQLExpr sqlExpr = sqlSelectOrderByItem.getExpr(); - if (sqlExpr instanceof SQLBinaryOpExpr && hasNullOrderInBinaryOrderExpr(sqlExpr)) { - // override Field.expression to SQLBinaryOpExpr, - // which was set by FieldMaker.makeField() to SQLIdentifierExpr above - field.setExpression(sqlExpr); - } - - String orderByName; - if (field.isScriptField()) { - MethodField methodField = (MethodField) field; - - // 0 - generated field name - final int SCRIPT_CONTENT_INDEX = 1; - orderByName = methodField.getParams().get(SCRIPT_CONTENT_INDEX).toString(); - - } else { - orderByName = field.toString(); - } - - orderByName = orderByName.replace("`", ""); - if (alias != null) { - orderByName = orderByName.replaceFirst(alias + "\\.", ""); - } - select.addOrderBy(field.getNestedPath(), orderByName, type, field); - } + if (where instanceof Condition) { + Condition condition = (Condition) where; + String fieldName = condition.getName(); + for (String alias : aliases) { + String prefix = alias + "."; + if (fieldName.startsWith(prefix)) { + return alias; + } + } + throw new SqlParseException( + String.format( + "Field [%s] with condition [%s] does not contain an alias", + fieldName, condition.toString())); } - - private SQLExpr extractExprFromOrderExpr(SQLSelectOrderByItem sqlSelectOrderByItem) { - SQLExpr expr = sqlSelectOrderByItem.getExpr(); - - // extract SQLIdentifier from Order IS NULL/NOT NULL expression to generate Field - // else passing SQLBinaryOpExpr to FieldMaker.makeFieldImpl tries to convert to SQLMethodInvokeExpr - // and throws SQLParserException - if (hasNullOrderInBinaryOrderExpr(expr)) { - return ((SQLBinaryOpExpr) expr).getLeft(); - } - return expr; - } - - private boolean hasNullOrderInBinaryOrderExpr(SQLExpr expr) { - /** - * Valid AST that meets ORDER BY IS NULL/NOT NULL condition (true) - * - * SQLSelectOrderByItem - * | - * SQLBinaryOpExpr (Is || IsNot) - * / \ - * SQLIdentifierExpr SQLNullExpr - */ - if (!(expr instanceof SQLBinaryOpExpr)) { - return false; - } - - // check "shape of expression": - SQLBinaryOpExpr binaryExpr = (SQLBinaryOpExpr) expr; - if (!(binaryExpr.getLeft() instanceof SQLIdentifierExpr)|| !(binaryExpr.getRight() instanceof SQLNullExpr)) { - return false; - } - - // check that operator IS or IS NOT - SQLBinaryOperator operator = binaryExpr.getOperator(); - return operator == SQLBinaryOperator.Is || operator == SQLBinaryOperator.IsNot; - + List sameAliases = new ArrayList<>(); + if (where.getWheres() != null && where.getWheres().size() > 0) { + for (Where innerWhere : where.getWheres()) { + sameAliases.add(sameAliasWhere(innerWhere, aliases)); + } } - private void findLimit(MySqlSelectQueryBlock.Limit limit, Select select) { - - if (limit == null) { - return; - } + if (sameAliases.contains(null)) { + return null; + } + String firstAlias = sameAliases.get(0); + // return null if more than one alias + for (String alias : sameAliases) { + if (!alias.equals(firstAlias)) { + return null; + } + } + return firstAlias; + } + + private void addOrderByToSelect( + Select select, + MySqlSelectQueryBlock queryBlock, + List items, + String alias) + throws SqlParseException { + + Map aliasesToExpressions = + queryBlock.getSelectList().stream() + .filter(item -> item.getAlias() != null) + .collect(Collectors.toMap(SQLSelectItem::getAlias, SQLSelectItem::getExpr)); + + for (SQLSelectOrderByItem sqlSelectOrderByItem : items) { + if (sqlSelectOrderByItem.getType() == null) { + sqlSelectOrderByItem.setType(SQLOrderingSpecification.ASC); + } + String type = sqlSelectOrderByItem.getType().toString(); + SQLExpr expr = extractExprFromOrderExpr(sqlSelectOrderByItem); + + if (expr instanceof SQLIdentifierExpr) { + if (queryBlock.getGroupBy() == null || queryBlock.getGroupBy().getItems().isEmpty()) { + if (aliasesToExpressions.containsKey(((SQLIdentifierExpr) expr).getName())) { + expr = aliasesToExpressions.get(((SQLIdentifierExpr) expr).getName()); + } + } + } + + Field field = fieldMaker.makeField(expr, null, null); + + SQLExpr sqlExpr = sqlSelectOrderByItem.getExpr(); + if (sqlExpr instanceof SQLBinaryOpExpr && hasNullOrderInBinaryOrderExpr(sqlExpr)) { + // override Field.expression to SQLBinaryOpExpr, + // which was set by FieldMaker.makeField() to SQLIdentifierExpr above + field.setExpression(sqlExpr); + } + + String orderByName; + if (field.isScriptField()) { + MethodField methodField = (MethodField) field; + + // 0 - generated field name + final int SCRIPT_CONTENT_INDEX = 1; + orderByName = methodField.getParams().get(SCRIPT_CONTENT_INDEX).toString(); + + } else { + orderByName = field.toString(); + } + + orderByName = orderByName.replace("`", ""); + if (alias != null) { + orderByName = orderByName.replaceFirst(alias + "\\.", ""); + } + select.addOrderBy(field.getNestedPath(), orderByName, type, field); + } + } - select.setRowCount(Integer.parseInt(limit.getRowCount().toString())); + private SQLExpr extractExprFromOrderExpr(SQLSelectOrderByItem sqlSelectOrderByItem) { + SQLExpr expr = sqlSelectOrderByItem.getExpr(); - if (limit.getOffset() != null) { - select.setOffset(Integer.parseInt(limit.getOffset().toString())); - } + // extract SQLIdentifier from Order IS NULL/NOT NULL expression to generate Field + // else passing SQLBinaryOpExpr to FieldMaker.makeFieldImpl tries to convert to + // SQLMethodInvokeExpr + // and throws SQLParserException + if (hasNullOrderInBinaryOrderExpr(expr)) { + return ((SQLBinaryOpExpr) expr).getLeft(); } + return expr; + } + private boolean hasNullOrderInBinaryOrderExpr(SQLExpr expr) { /** - * Parse the from clause + * Valid AST that meets ORDER BY IS NULL/NOT NULL condition (true) * - * @param from the from clause. - * @return list of From objects represents all the sources. + *

SQLSelectOrderByItem | SQLBinaryOpExpr (Is || IsNot) / \ SQLIdentifierExpr SQLNullExpr */ - private List findFrom(SQLTableSource from) { - boolean isSqlExprTable = from.getClass().isAssignableFrom(SQLExprTableSource.class); - - if (isSqlExprTable) { - SQLExprTableSource fromExpr = (SQLExprTableSource) from; - String[] split = fromExpr.getExpr().toString().split(","); - - ArrayList fromList = new ArrayList<>(); - for (String source : split) { - fromList.add(new From(source.trim(), fromExpr.getAlias())); - } - return fromList; - } + if (!(expr instanceof SQLBinaryOpExpr)) { + return false; + } - SQLJoinTableSource joinTableSource = ((SQLJoinTableSource) from); - List fromList = new ArrayList<>(); - fromList.addAll(findFrom(joinTableSource.getLeft())); - fromList.addAll(findFrom(joinTableSource.getRight())); - return fromList; + // check "shape of expression": + SQLBinaryOpExpr binaryExpr = (SQLBinaryOpExpr) expr; + if (!(binaryExpr.getLeft() instanceof SQLIdentifierExpr) + || !(binaryExpr.getRight() instanceof SQLNullExpr)) { + return false; } - public JoinSelect parseJoinSelect(SQLQueryExpr sqlExpr) throws SqlParseException { + // check that operator IS or IS NOT + SQLBinaryOperator operator = binaryExpr.getOperator(); + return operator == SQLBinaryOperator.Is || operator == SQLBinaryOperator.IsNot; + } - MySqlSelectQueryBlock query = (MySqlSelectQueryBlock) sqlExpr.getSubQuery().getQuery(); + private void findLimit(MySqlSelectQueryBlock.Limit limit, Select select) { - List joinedFrom = findJoinedFrom(query.getFrom()); - if (joinedFrom.size() != 2) { - throw new RuntimeException("currently supports only 2 tables join"); - } + if (limit == null) { + return; + } - JoinSelect joinSelect = createBasicJoinSelectAccordingToTableSource((SQLJoinTableSource) query.getFrom()); - List hints = parseHints(query.getHints()); - joinSelect.setHints(hints); - String firstTableAlias = joinedFrom.get(0).getAlias(); - String secondTableAlias = joinedFrom.get(1).getAlias(); - Map aliasToWhere = splitAndFindWhere(query.getWhere(), firstTableAlias, secondTableAlias); - Map> aliasToOrderBy = splitAndFindOrder(query.getOrderBy(), firstTableAlias, - secondTableAlias); - List connectedConditions = getConditionsFlatten(joinSelect.getConnectedWhere()); - joinSelect.setConnectedConditions(connectedConditions); - fillTableSelectedJoin(joinSelect.getFirstTable(), query, joinedFrom.get(0), - aliasToWhere.get(firstTableAlias), aliasToOrderBy.get(firstTableAlias), connectedConditions); - fillTableSelectedJoin(joinSelect.getSecondTable(), query, joinedFrom.get(1), - aliasToWhere.get(secondTableAlias), aliasToOrderBy.get(secondTableAlias), connectedConditions); - - updateJoinLimit(query.getLimit(), joinSelect); - - //todo: throw error feature not supported: no group bys on joins ? - return joinSelect; - } - - private Map> splitAndFindOrder(SQLOrderBy orderBy, String firstTableAlias, - String secondTableAlias) - throws SqlParseException { - Map> aliasToOrderBys = new HashMap<>(); - aliasToOrderBys.put(firstTableAlias, new ArrayList<>()); - aliasToOrderBys.put(secondTableAlias, new ArrayList<>()); - if (orderBy == null) { - return aliasToOrderBys; - } - List orderByItems = orderBy.getItems(); - for (SQLSelectOrderByItem orderByItem : orderByItems) { - if (orderByItem.getExpr().toString().startsWith(firstTableAlias + ".")) { - aliasToOrderBys.get(firstTableAlias).add(orderByItem); - } else if (orderByItem.getExpr().toString().startsWith(secondTableAlias + ".")) { - aliasToOrderBys.get(secondTableAlias).add(orderByItem); - } else { - throw new SqlParseException("order by field on join request should have alias before, got " - + orderByItem.getExpr().toString()); - } + select.setRowCount(Integer.parseInt(limit.getRowCount().toString())); - } - return aliasToOrderBys; + if (limit.getOffset() != null) { + select.setOffset(Integer.parseInt(limit.getOffset().toString())); } - - private void updateJoinLimit(MySqlSelectQueryBlock.Limit limit, JoinSelect joinSelect) { - if (limit != null && limit.getRowCount() != null) { - int sizeLimit = Integer.parseInt(limit.getRowCount().toString()); - joinSelect.setTotalLimit(sizeLimit); - } + } + + /** + * Parse the from clause + * + * @param from the from clause. + * @return list of From objects represents all the sources. + */ + private List findFrom(SQLTableSource from) { + boolean isSqlExprTable = from.getClass().isAssignableFrom(SQLExprTableSource.class); + + if (isSqlExprTable) { + SQLExprTableSource fromExpr = (SQLExprTableSource) from; + String[] split = fromExpr.getExpr().toString().split(","); + + ArrayList fromList = new ArrayList<>(); + for (String source : split) { + fromList.add(new From(source.trim(), fromExpr.getAlias())); + } + return fromList; } - private List parseHints(List sqlHints) throws SqlParseException { - List hints = new ArrayList<>(); - for (SQLCommentHint sqlHint : sqlHints) { - Hint hint = HintFactory.getHintFromString(sqlHint.getText()); - if (hint != null) { - hints.add(hint); - } - } - return hints; - } + SQLJoinTableSource joinTableSource = ((SQLJoinTableSource) from); + List fromList = new ArrayList<>(); + fromList.addAll(findFrom(joinTableSource.getLeft())); + fromList.addAll(findFrom(joinTableSource.getRight())); + return fromList; + } - private JoinSelect createBasicJoinSelectAccordingToTableSource(SQLJoinTableSource joinTableSource) - throws SqlParseException { - JoinSelect joinSelect = new JoinSelect(); - if (joinTableSource.getCondition() != null) { - Where where = Where.newInstance(); - WhereParser whereParser = new WhereParser(this, joinTableSource.getCondition()); - whereParser.parseWhere(joinTableSource.getCondition(), where); - joinSelect.setConnectedWhere(where); - } - SQLJoinTableSource.JoinType joinType = joinTableSource.getJoinType(); - joinSelect.setJoinType(joinType); - return joinSelect; - } + public JoinSelect parseJoinSelect(SQLQueryExpr sqlExpr) throws SqlParseException { - private Map splitAndFindWhere(SQLExpr whereExpr, String firstTableAlias, String secondTableAlias) - throws SqlParseException { - WhereParser whereParser = new WhereParser(this, whereExpr); - Where where = whereParser.findWhere(); - return splitWheres(where, firstTableAlias, secondTableAlias); - } + MySqlSelectQueryBlock query = (MySqlSelectQueryBlock) sqlExpr.getSubQuery().getQuery(); - private void fillTableSelectedJoin(TableOnJoinSelect tableOnJoin, MySqlSelectQueryBlock query, From tableFrom, - Where where, List orderBys, List conditions) - throws SqlParseException { - String alias = tableFrom.getAlias(); - fillBasicTableSelectJoin(tableOnJoin, tableFrom, where, orderBys, query); - tableOnJoin.setConnectedFields(getConnectedFields(conditions, alias)); - tableOnJoin.setSelectedFields(new ArrayList<>(tableOnJoin.getFields())); - tableOnJoin.setAlias(alias); - tableOnJoin.fillSubQueries(); + List joinedFrom = findJoinedFrom(query.getFrom()); + if (joinedFrom.size() != 2) { + throw new RuntimeException("currently supports only 2 tables join"); } - private List getConnectedFields(List conditions, String alias) throws SqlParseException { - List fields = new ArrayList<>(); - String prefix = alias + "."; - for (Condition condition : conditions) { - if (condition.getName().startsWith(prefix)) { - fields.add(new Field(condition.getName().replaceFirst(prefix, ""), null)); - } else { - if (!((condition.getValue() instanceof SQLPropertyExpr) - || (condition.getValue() instanceof SQLIdentifierExpr) - || (condition.getValue() instanceof String))) { - throw new SqlParseException("Illegal condition content: " + condition.toString()); - } - String aliasDotValue = condition.getValue().toString(); - int indexOfDot = aliasDotValue.indexOf("."); - String owner = aliasDotValue.substring(0, indexOfDot); - if (owner.equals(alias)) { - fields.add(new Field(aliasDotValue.substring(indexOfDot + 1), null)); - } - } - } - return fields; + JoinSelect joinSelect = + createBasicJoinSelectAccordingToTableSource((SQLJoinTableSource) query.getFrom()); + List hints = parseHints(query.getHints()); + joinSelect.setHints(hints); + String firstTableAlias = joinedFrom.get(0).getAlias(); + String secondTableAlias = joinedFrom.get(1).getAlias(); + Map aliasToWhere = + splitAndFindWhere(query.getWhere(), firstTableAlias, secondTableAlias); + Map> aliasToOrderBy = + splitAndFindOrder(query.getOrderBy(), firstTableAlias, secondTableAlias); + List connectedConditions = getConditionsFlatten(joinSelect.getConnectedWhere()); + joinSelect.setConnectedConditions(connectedConditions); + fillTableSelectedJoin( + joinSelect.getFirstTable(), + query, + joinedFrom.get(0), + aliasToWhere.get(firstTableAlias), + aliasToOrderBy.get(firstTableAlias), + connectedConditions); + fillTableSelectedJoin( + joinSelect.getSecondTable(), + query, + joinedFrom.get(1), + aliasToWhere.get(secondTableAlias), + aliasToOrderBy.get(secondTableAlias), + connectedConditions); + + updateJoinLimit(query.getLimit(), joinSelect); + + // todo: throw error feature not supported: no group bys on joins ? + return joinSelect; + } + + private Map> splitAndFindOrder( + SQLOrderBy orderBy, String firstTableAlias, String secondTableAlias) + throws SqlParseException { + Map> aliasToOrderBys = new HashMap<>(); + aliasToOrderBys.put(firstTableAlias, new ArrayList<>()); + aliasToOrderBys.put(secondTableAlias, new ArrayList<>()); + if (orderBy == null) { + return aliasToOrderBys; } - - private void fillBasicTableSelectJoin(TableOnJoinSelect select, From from, Where where, - List orderBys, MySqlSelectQueryBlock query) - throws SqlParseException { - select.getFrom().add(from); - findSelect(query, select, from.getAlias()); - select.setWhere(where); - addOrderByToSelect(select, query, orderBys, from.getAlias()); + List orderByItems = orderBy.getItems(); + for (SQLSelectOrderByItem orderByItem : orderByItems) { + if (orderByItem.getExpr().toString().startsWith(firstTableAlias + ".")) { + aliasToOrderBys.get(firstTableAlias).add(orderByItem); + } else if (orderByItem.getExpr().toString().startsWith(secondTableAlias + ".")) { + aliasToOrderBys.get(secondTableAlias).add(orderByItem); + } else { + throw new SqlParseException( + "order by field on join request should have alias before, got " + + orderByItem.getExpr().toString()); + } } + return aliasToOrderBys; + } - private List getJoinConditionsFlatten(SQLJoinTableSource from) throws SqlParseException { - List conditions = new ArrayList<>(); - if (from.getCondition() == null) { - return conditions; - } - Where where = Where.newInstance(); - WhereParser whereParser = new WhereParser(this, from.getCondition()); - whereParser.parseWhere(from.getCondition(), where); - addIfConditionRecursive(where, conditions); - return conditions; + private void updateJoinLimit(MySqlSelectQueryBlock.Limit limit, JoinSelect joinSelect) { + if (limit != null && limit.getRowCount() != null) { + int sizeLimit = Integer.parseInt(limit.getRowCount().toString()); + joinSelect.setTotalLimit(sizeLimit); } - - private List getConditionsFlatten(Where where) throws SqlParseException { - List conditions = new ArrayList<>(); - if (where == null) { - return conditions; - } - addIfConditionRecursive(where, conditions); - return conditions; + } + + private List parseHints(List sqlHints) throws SqlParseException { + List hints = new ArrayList<>(); + for (SQLCommentHint sqlHint : sqlHints) { + Hint hint = HintFactory.getHintFromString(sqlHint.getText()); + if (hint != null) { + hints.add(hint); + } + } + return hints; + } + + private JoinSelect createBasicJoinSelectAccordingToTableSource(SQLJoinTableSource joinTableSource) + throws SqlParseException { + JoinSelect joinSelect = new JoinSelect(); + if (joinTableSource.getCondition() != null) { + Where where = Where.newInstance(); + WhereParser whereParser = new WhereParser(this, joinTableSource.getCondition()); + whereParser.parseWhere(joinTableSource.getCondition(), where); + joinSelect.setConnectedWhere(where); + } + SQLJoinTableSource.JoinType joinType = joinTableSource.getJoinType(); + joinSelect.setJoinType(joinType); + return joinSelect; + } + + private Map splitAndFindWhere( + SQLExpr whereExpr, String firstTableAlias, String secondTableAlias) throws SqlParseException { + WhereParser whereParser = new WhereParser(this, whereExpr); + Where where = whereParser.findWhere(); + return splitWheres(where, firstTableAlias, secondTableAlias); + } + + private void fillTableSelectedJoin( + TableOnJoinSelect tableOnJoin, + MySqlSelectQueryBlock query, + From tableFrom, + Where where, + List orderBys, + List conditions) + throws SqlParseException { + String alias = tableFrom.getAlias(); + fillBasicTableSelectJoin(tableOnJoin, tableFrom, where, orderBys, query); + tableOnJoin.setConnectedFields(getConnectedFields(conditions, alias)); + tableOnJoin.setSelectedFields(new ArrayList<>(tableOnJoin.getFields())); + tableOnJoin.setAlias(alias); + tableOnJoin.fillSubQueries(); + } + + private List getConnectedFields(List conditions, String alias) + throws SqlParseException { + List fields = new ArrayList<>(); + String prefix = alias + "."; + for (Condition condition : conditions) { + if (condition.getName().startsWith(prefix)) { + fields.add(new Field(condition.getName().replaceFirst(prefix, ""), null)); + } else { + if (!((condition.getValue() instanceof SQLPropertyExpr) + || (condition.getValue() instanceof SQLIdentifierExpr) + || (condition.getValue() instanceof String))) { + throw new SqlParseException("Illegal condition content: " + condition.toString()); + } + String aliasDotValue = condition.getValue().toString(); + int indexOfDot = aliasDotValue.indexOf("."); + String owner = aliasDotValue.substring(0, indexOfDot); + if (owner.equals(alias)) { + fields.add(new Field(aliasDotValue.substring(indexOfDot + 1), null)); + } + } + } + return fields; + } + + private void fillBasicTableSelectJoin( + TableOnJoinSelect select, + From from, + Where where, + List orderBys, + MySqlSelectQueryBlock query) + throws SqlParseException { + select.getFrom().add(from); + findSelect(query, select, from.getAlias()); + select.setWhere(where); + addOrderByToSelect(select, query, orderBys, from.getAlias()); + } + + private List getJoinConditionsFlatten(SQLJoinTableSource from) + throws SqlParseException { + List conditions = new ArrayList<>(); + if (from.getCondition() == null) { + return conditions; + } + Where where = Where.newInstance(); + WhereParser whereParser = new WhereParser(this, from.getCondition()); + whereParser.parseWhere(from.getCondition(), where); + addIfConditionRecursive(where, conditions); + return conditions; + } + + private List getConditionsFlatten(Where where) throws SqlParseException { + List conditions = new ArrayList<>(); + if (where == null) { + return conditions; + } + addIfConditionRecursive(where, conditions); + return conditions; + } + + private Map splitWheres(Where where, String... aliases) throws SqlParseException { + Map aliasToWhere = new HashMap<>(); + for (String alias : aliases) { + aliasToWhere.put(alias, null); + } + if (where == null) { + return aliasToWhere; } + String allWhereFromSameAlias = sameAliasWhere(where, aliases); + if (allWhereFromSameAlias != null) { + removeAliasPrefix(where, allWhereFromSameAlias); + aliasToWhere.put(allWhereFromSameAlias, where); + return aliasToWhere; + } + for (Where innerWhere : where.getWheres()) { + String sameAlias = sameAliasWhere(innerWhere, aliases); + if (sameAlias == null) { + throw new SqlParseException( + "Currently support only one hierarchy on different tables where"); + } + removeAliasPrefix(innerWhere, sameAlias); + Where aliasCurrentWhere = aliasToWhere.get(sameAlias); + if (aliasCurrentWhere == null) { + aliasToWhere.put(sameAlias, innerWhere); + } else { + Where andWhereContainer = Where.newInstance(); + andWhereContainer.addWhere(aliasCurrentWhere); + andWhereContainer.addWhere(innerWhere); + aliasToWhere.put(sameAlias, andWhereContainer); + } + } - private Map splitWheres(Where where, String... aliases) throws SqlParseException { - Map aliasToWhere = new HashMap<>(); - for (String alias : aliases) { - aliasToWhere.put(alias, null); - } - if (where == null) { - return aliasToWhere; - } + return aliasToWhere; + } - String allWhereFromSameAlias = sameAliasWhere(where, aliases); - if (allWhereFromSameAlias != null) { - removeAliasPrefix(where, allWhereFromSameAlias); - aliasToWhere.put(allWhereFromSameAlias, where); - return aliasToWhere; - } - for (Where innerWhere : where.getWheres()) { - String sameAlias = sameAliasWhere(innerWhere, aliases); - if (sameAlias == null) { - throw new SqlParseException("Currently support only one hierarchy on different tables where"); - } - removeAliasPrefix(innerWhere, sameAlias); - Where aliasCurrentWhere = aliasToWhere.get(sameAlias); - if (aliasCurrentWhere == null) { - aliasToWhere.put(sameAlias, innerWhere); - } else { - Where andWhereContainer = Where.newInstance(); - andWhereContainer.addWhere(aliasCurrentWhere); - andWhereContainer.addWhere(innerWhere); - aliasToWhere.put(sameAlias, andWhereContainer); - } - } + private void removeAliasPrefix(Where where, String alias) { - return aliasToWhere; + if (where instanceof Condition) { + Condition cond = (Condition) where; + String aliasPrefix = alias + "."; + cond.setName(cond.getName().replaceFirst(aliasPrefix, "")); + return; } - - private void removeAliasPrefix(Where where, String alias) { - - if (where instanceof Condition) { - Condition cond = (Condition) where; - String aliasPrefix = alias + "."; - cond.setName(cond.getName().replaceFirst(aliasPrefix, "")); - return; - } - for (Where innerWhere : where.getWheres()) { - removeAliasPrefix(innerWhere, alias); - } + for (Where innerWhere : where.getWheres()) { + removeAliasPrefix(innerWhere, alias); } - - private void addIfConditionRecursive(Where where, List conditions) throws SqlParseException { - if (where instanceof Condition) { - Condition cond = (Condition) where; - if (!((cond.getValue() instanceof SQLIdentifierExpr) || (cond.getValue() instanceof SQLPropertyExpr) - || (cond.getValue() instanceof String))) { - throw new SqlParseException("conditions on join should be one side is secondTable OPEAR firstTable, " - + "condition was:" + cond.toString()); - } - conditions.add(cond); - } - for (Where innerWhere : where.getWheres()) { - addIfConditionRecursive(innerWhere, conditions); - } + } + + private void addIfConditionRecursive(Where where, List conditions) + throws SqlParseException { + if (where instanceof Condition) { + Condition cond = (Condition) where; + if (!((cond.getValue() instanceof SQLIdentifierExpr) + || (cond.getValue() instanceof SQLPropertyExpr) + || (cond.getValue() instanceof String))) { + throw new SqlParseException( + "conditions on join should be one side is secondTable OPEAR firstTable, " + + "condition was:" + + cond.toString()); + } + conditions.add(cond); } - - private List findJoinedFrom(SQLTableSource from) { - SQLJoinTableSource joinTableSource = ((SQLJoinTableSource) from); - List fromList = new ArrayList<>(); - fromList.addAll(findFrom(joinTableSource.getLeft())); - fromList.addAll(findFrom(joinTableSource.getRight())); - return fromList; + for (Where innerWhere : where.getWheres()) { + addIfConditionRecursive(innerWhere, conditions); } - - + } + + private List findJoinedFrom(SQLTableSource from) { + SQLJoinTableSource joinTableSource = ((SQLJoinTableSource) from); + List fromList = new ArrayList<>(); + fromList.addAll(findFrom(joinTableSource.getLeft())); + fromList.addAll(findFrom(joinTableSource.getRight())); + return fromList; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/parser/SubQueryExpression.java b/legacy/src/main/java/org/opensearch/sql/legacy/parser/SubQueryExpression.java index 168318c490..e9b0797d00 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/parser/SubQueryExpression.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/parser/SubQueryExpression.java @@ -3,42 +3,39 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.parser; import org.opensearch.sql.legacy.domain.Select; -/** - * Created by Eliran on 3/10/2015. - */ +/** Created by Eliran on 3/10/2015. */ public class SubQueryExpression { - private Object[] values; - private Select select; - private String returnField; - - public SubQueryExpression(Select innerSelect) { - this.select = innerSelect; - this.returnField = select.getFields().get(0).getName(); - values = null; - } - - public Object[] getValues() { - return values; - } - - public void setValues(Object[] values) { - this.values = values; - } - - public Select getSelect() { - return select; - } - - public void setSelect(Select select) { - this.select = select; - } - - public String getReturnField() { - return returnField; - } + private Object[] values; + private Select select; + private String returnField; + + public SubQueryExpression(Select innerSelect) { + this.select = innerSelect; + this.returnField = select.getFields().get(0).getName(); + values = null; + } + + public Object[] getValues() { + return values; + } + + public void setValues(Object[] values) { + this.values = values; + } + + public Select getSelect() { + return select; + } + + public void setSelect(Select select) { + this.select = select; + } + + public String getReturnField() { + return returnField; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/parser/SubQueryParser.java b/legacy/src/main/java/org/opensearch/sql/legacy/parser/SubQueryParser.java index 71b19db0cf..b6d04ddd54 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/parser/SubQueryParser.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/parser/SubQueryParser.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.parser; import com.alibaba.druid.sql.ast.SQLExpr; @@ -23,88 +22,89 @@ import org.opensearch.sql.legacy.domain.Where; import org.opensearch.sql.legacy.exception.SqlParseException; -/** - * Definition of SubQuery Parser - */ +/** Definition of SubQuery Parser */ public class SubQueryParser { - private final SqlParser sqlParser; + private final SqlParser sqlParser; - public SubQueryParser(SqlParser sqlParser) { - this.sqlParser = sqlParser; - } + public SubQueryParser(SqlParser sqlParser) { + this.sqlParser = sqlParser; + } - public boolean containSubqueryInFrom(MySqlSelectQueryBlock query) { - return query.getFrom() instanceof SQLSubqueryTableSource; - } + public boolean containSubqueryInFrom(MySqlSelectQueryBlock query) { + return query.getFrom() instanceof SQLSubqueryTableSource; + } - public Select parseSubQueryInFrom(MySqlSelectQueryBlock query) throws SqlParseException { - assert query.getFrom() instanceof SQLSubqueryTableSource; + public Select parseSubQueryInFrom(MySqlSelectQueryBlock query) throws SqlParseException { + assert query.getFrom() instanceof SQLSubqueryTableSource; - Select select = sqlParser.parseSelect( - (MySqlSelectQueryBlock) ((SQLSubqueryTableSource) query.getFrom()).getSelect() - .getQuery()); - String subQueryAlias = query.getFrom().getAlias(); - return pushSelect(query.getSelectList(), select, subQueryAlias); - } + Select select = + sqlParser.parseSelect( + (MySqlSelectQueryBlock) + ((SQLSubqueryTableSource) query.getFrom()).getSelect().getQuery()); + String subQueryAlias = query.getFrom().getAlias(); + return pushSelect(query.getSelectList(), select, subQueryAlias); + } - private Select pushSelect(List selectItems, Select subquerySelect, String subQueryAlias) { - Map> fieldAliasRewriter = prepareFieldAliasRewriter( - selectItems, - subQueryAlias); + private Select pushSelect( + List selectItems, Select subquerySelect, String subQueryAlias) { + Map> fieldAliasRewriter = + prepareFieldAliasRewriter(selectItems, subQueryAlias); - //1. rewrite field in select list - Iterator fieldIterator = subquerySelect.getFields().iterator(); - while (fieldIterator.hasNext()) { - Field field = fieldIterator.next(); - /* - * return true if the subquerySelectItem in the final select list. - * for example, subquerySelectItem is "SUM(emp.empno) as TEMP", - * and final select list is TEMP. then return true. - */ - String fieldIdentifier = Strings.isNullOrEmpty(field.getAlias()) ? field.getName() : field.getAlias(); - if (fieldAliasRewriter.containsKey(fieldIdentifier)) { - field.setAlias(fieldAliasRewriter.get(fieldIdentifier).apply(fieldIdentifier)); - } else { - fieldIterator.remove(); - } - } - - //2. rewrite field in order by - for (Order orderBy : subquerySelect.getOrderBys()) { - if (fieldAliasRewriter.containsKey(orderBy.getName())) { - String replaceOrderName = fieldAliasRewriter.get(orderBy.getName()).apply(orderBy.getName()); - orderBy.setName(replaceOrderName); - orderBy.getSortField().setName(replaceOrderName); - } - } + // 1. rewrite field in select list + Iterator fieldIterator = subquerySelect.getFields().iterator(); + while (fieldIterator.hasNext()) { + Field field = fieldIterator.next(); + /* + * return true if the subquerySelectItem in the final select list. + * for example, subquerySelectItem is "SUM(emp.empno) as TEMP", + * and final select list is TEMP. then return true. + */ + String fieldIdentifier = + Strings.isNullOrEmpty(field.getAlias()) ? field.getName() : field.getAlias(); + if (fieldAliasRewriter.containsKey(fieldIdentifier)) { + field.setAlias(fieldAliasRewriter.get(fieldIdentifier).apply(fieldIdentifier)); + } else { + fieldIterator.remove(); + } + } - // 3. rewrite field in having - if (subquerySelect.getHaving() != null) { - for (Where condition : subquerySelect.getHaving().getConditions()) { - Condition cond = (Condition) condition; - if (fieldAliasRewriter.containsKey(cond.getName())) { - String replaceOrderName = fieldAliasRewriter.get(cond.getName()).apply(cond.getName()); - cond.setName(replaceOrderName); - } - } - } - return subquerySelect; + // 2. rewrite field in order by + for (Order orderBy : subquerySelect.getOrderBys()) { + if (fieldAliasRewriter.containsKey(orderBy.getName())) { + String replaceOrderName = + fieldAliasRewriter.get(orderBy.getName()).apply(orderBy.getName()); + orderBy.setName(replaceOrderName); + orderBy.getSortField().setName(replaceOrderName); + } } - private Map> prepareFieldAliasRewriter(List selectItems, - String owner) { - HashMap> selectMap = new HashMap<>(); - for (SQLSelectItem item : selectItems) { - if (Strings.isNullOrEmpty(item.getAlias())) { - selectMap.put(getFieldName(item.getExpr(), owner), Function.identity()); - } else { - selectMap.put(getFieldName(item.getExpr(), owner), s -> item.getAlias()); - } + // 3. rewrite field in having + if (subquerySelect.getHaving() != null) { + for (Where condition : subquerySelect.getHaving().getConditions()) { + Condition cond = (Condition) condition; + if (fieldAliasRewriter.containsKey(cond.getName())) { + String replaceOrderName = fieldAliasRewriter.get(cond.getName()).apply(cond.getName()); + cond.setName(replaceOrderName); } - return selectMap; + } } + return subquerySelect; + } - private String getFieldName(SQLExpr expr, String owner) { - return expr.toString().replace(String.format("%s.", owner), ""); + private Map> prepareFieldAliasRewriter( + List selectItems, String owner) { + HashMap> selectMap = new HashMap<>(); + for (SQLSelectItem item : selectItems) { + if (Strings.isNullOrEmpty(item.getAlias())) { + selectMap.put(getFieldName(item.getExpr(), owner), Function.identity()); + } else { + selectMap.put(getFieldName(item.getExpr(), owner), s -> item.getAlias()); + } } + return selectMap; + } + + private String getFieldName(SQLExpr expr, String owner) { + return expr.toString().replace(String.format("%s.", owner), ""); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/parser/WhereParser.java b/legacy/src/main/java/org/opensearch/sql/legacy/parser/WhereParser.java index c3ea5270e3..a329d1ed52 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/parser/WhereParser.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/parser/WhereParser.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.parser; import com.alibaba.druid.sql.ast.SQLExpr; @@ -43,637 +42,808 @@ import org.opensearch.sql.legacy.utils.SQLFunctions; import org.opensearch.sql.legacy.utils.Util; -/** - * Created by allwefantasy on 9/2/16. - */ +/** Created by allwefantasy on 9/2/16. */ public class WhereParser { - private FieldMaker fieldMaker; + private FieldMaker fieldMaker; - private MySqlSelectQueryBlock query; - private SQLDeleteStatement delete; - private SQLExpr where; - private SqlParser sqlParser; + private MySqlSelectQueryBlock query; + private SQLDeleteStatement delete; + private SQLExpr where; + private SqlParser sqlParser; - public WhereParser(SqlParser sqlParser, MySqlSelectQueryBlock query, FieldMaker fieldMaker) { - this.sqlParser = sqlParser; - this.where = query.getWhere(); - - this.query = query; - this.fieldMaker = fieldMaker; - } + public WhereParser(SqlParser sqlParser, MySqlSelectQueryBlock query, FieldMaker fieldMaker) { + this.sqlParser = sqlParser; + this.where = query.getWhere(); - public WhereParser(SqlParser sqlParser, SQLDeleteStatement delete) { - this(sqlParser, delete.getWhere()); + this.query = query; + this.fieldMaker = fieldMaker; + } - this.delete = delete; - } + public WhereParser(SqlParser sqlParser, SQLDeleteStatement delete) { + this(sqlParser, delete.getWhere()); - public WhereParser(SqlParser sqlParser, SQLExpr expr) { - this(sqlParser); - this.where = expr; - } + this.delete = delete; + } - public WhereParser(SqlParser sqlParser) { - this.sqlParser = sqlParser; - this.fieldMaker = new FieldMaker(); - } + public WhereParser(SqlParser sqlParser, SQLExpr expr) { + this(sqlParser); + this.where = expr; + } - public Where findWhere() throws SqlParseException { - if (where == null) { - return null; - } + public WhereParser(SqlParser sqlParser) { + this.sqlParser = sqlParser; + this.fieldMaker = new FieldMaker(); + } - Where myWhere = Where.newInstance(); - parseWhere(where, myWhere); - return myWhere; + public Where findWhere() throws SqlParseException { + if (where == null) { + return null; } - public void parseWhere(SQLExpr expr, Where where) throws SqlParseException { - if (expr instanceof SQLBinaryOpExpr) { - SQLBinaryOpExpr bExpr = (SQLBinaryOpExpr) expr; - if (explainSpecialCondWithBothSidesAreLiterals(bExpr, where)) { - return; - } - if (explainSpecialCondWithBothSidesAreProperty(bExpr, where)) { - return; - } - } - - if (expr instanceof SQLBinaryOpExpr && !isCond((SQLBinaryOpExpr) expr)) { - SQLBinaryOpExpr bExpr = (SQLBinaryOpExpr) expr; - routeCond(bExpr, bExpr.getLeft(), where); - routeCond(bExpr, bExpr.getRight(), where); - } else if (expr instanceof SQLNotExpr) { - parseWhere(((SQLNotExpr) expr).getExpr(), where); - negateWhere(where); - } else { - explainCond("AND", expr, where); - } + Where myWhere = Where.newInstance(); + parseWhere(where, myWhere); + return myWhere; + } + + public void parseWhere(SQLExpr expr, Where where) throws SqlParseException { + if (expr instanceof SQLBinaryOpExpr) { + SQLBinaryOpExpr bExpr = (SQLBinaryOpExpr) expr; + if (explainSpecialCondWithBothSidesAreLiterals(bExpr, where)) { + return; + } + if (explainSpecialCondWithBothSidesAreProperty(bExpr, where)) { + return; + } } - private void negateWhere(Where where) throws SqlParseException { - for (Where sub : where.getWheres()) { - if (sub instanceof Condition) { - Condition cond = (Condition) sub; - cond.setOPERATOR(cond.getOPERATOR().negative()); - } else { - negateWhere(sub); - } - sub.setConn(sub.getConn().negative()); - } + if (expr instanceof SQLBinaryOpExpr && !isCond((SQLBinaryOpExpr) expr)) { + SQLBinaryOpExpr bExpr = (SQLBinaryOpExpr) expr; + routeCond(bExpr, bExpr.getLeft(), where); + routeCond(bExpr, bExpr.getRight(), where); + } else if (expr instanceof SQLNotExpr) { + parseWhere(((SQLNotExpr) expr).getExpr(), where); + negateWhere(where); + } else { + explainCond("AND", expr, where); } - - //some where conditions eg. 1=1 or 3>2 or 'a'='b' - private boolean explainSpecialCondWithBothSidesAreLiterals(SQLBinaryOpExpr bExpr, Where where) - throws SqlParseException { - if ((bExpr.getLeft() instanceof SQLNumericLiteralExpr || bExpr.getLeft() instanceof SQLCharExpr) - && (bExpr.getRight() instanceof SQLNumericLiteralExpr || bExpr.getRight() instanceof SQLCharExpr) - ) { - SQLMethodInvokeExpr sqlMethodInvokeExpr = new SQLMethodInvokeExpr("script", null); - String operator = bExpr.getOperator().getName(); - if (operator.equals("=")) { - operator = "=="; - } - sqlMethodInvokeExpr.addParameter( - new SQLCharExpr(Util.expr2Object(bExpr.getLeft(), "'") - + " " + operator + " " + Util.expr2Object(bExpr.getRight(), "'")) - ); - - explainCond("AND", sqlMethodInvokeExpr, where); - return true; - } - return false; + } + + private void negateWhere(Where where) throws SqlParseException { + for (Where sub : where.getWheres()) { + if (sub instanceof Condition) { + Condition cond = (Condition) sub; + cond.setOPERATOR(cond.getOPERATOR().negative()); + } else { + negateWhere(sub); + } + sub.setConn(sub.getConn().negative()); } - - //some where conditions eg. field1=field2 or field1>field2 - private boolean explainSpecialCondWithBothSidesAreProperty(SQLBinaryOpExpr bExpr, Where where) - throws SqlParseException { - //join is not support - if ((bExpr.getLeft() instanceof SQLPropertyExpr || bExpr.getLeft() instanceof SQLIdentifierExpr) - && (bExpr.getRight() instanceof SQLPropertyExpr || bExpr.getRight() instanceof SQLIdentifierExpr) - && Sets.newHashSet("=", "<", ">", ">=", "<=").contains(bExpr.getOperator().getName()) - && !Util.isFromJoinOrUnionTable(bExpr) - ) { - SQLMethodInvokeExpr sqlMethodInvokeExpr = new SQLMethodInvokeExpr("script", null); - String operator = bExpr.getOperator().getName(); - if (operator.equals("=")) { - operator = "=="; - } - - String leftProperty = Util.expr2Object(bExpr.getLeft()).toString(); - String rightProperty = Util.expr2Object(bExpr.getRight()).toString(); - if (leftProperty.split("\\.").length > 1) { - - leftProperty = leftProperty.substring(leftProperty.split("\\.")[0].length() + 1); - } - - if (rightProperty.split("\\.").length > 1) { - rightProperty = rightProperty.substring(rightProperty.split("\\.")[0].length() + 1); - } - - sqlMethodInvokeExpr.addParameter(new SQLCharExpr( - "doc['" + leftProperty + "'].value " + operator + " doc['" + rightProperty + "'].value")); - - explainCond("AND", sqlMethodInvokeExpr, where); - return true; - } - return false; + } + + // some where conditions eg. 1=1 or 3>2 or 'a'='b' + private boolean explainSpecialCondWithBothSidesAreLiterals(SQLBinaryOpExpr bExpr, Where where) + throws SqlParseException { + if ((bExpr.getLeft() instanceof SQLNumericLiteralExpr || bExpr.getLeft() instanceof SQLCharExpr) + && (bExpr.getRight() instanceof SQLNumericLiteralExpr + || bExpr.getRight() instanceof SQLCharExpr)) { + SQLMethodInvokeExpr sqlMethodInvokeExpr = new SQLMethodInvokeExpr("script", null); + String operator = bExpr.getOperator().getName(); + if (operator.equals("=")) { + operator = "=="; + } + sqlMethodInvokeExpr.addParameter( + new SQLCharExpr( + Util.expr2Object(bExpr.getLeft(), "'") + + " " + + operator + + " " + + Util.expr2Object(bExpr.getRight(), "'"))); + + explainCond("AND", sqlMethodInvokeExpr, where); + return true; } - - - private boolean isCond(SQLBinaryOpExpr expr) { - SQLExpr leftSide = expr.getLeft(); - if (leftSide instanceof SQLMethodInvokeExpr) { - return isAllowedMethodOnConditionLeft((SQLMethodInvokeExpr) leftSide, expr.getOperator()); - } - return leftSide instanceof SQLIdentifierExpr - || leftSide instanceof SQLPropertyExpr - || leftSide instanceof SQLVariantRefExpr - || leftSide instanceof SQLCastExpr; + return false; + } + + // some where conditions eg. field1=field2 or field1>field2 + private boolean explainSpecialCondWithBothSidesAreProperty(SQLBinaryOpExpr bExpr, Where where) + throws SqlParseException { + // join is not support + if ((bExpr.getLeft() instanceof SQLPropertyExpr || bExpr.getLeft() instanceof SQLIdentifierExpr) + && (bExpr.getRight() instanceof SQLPropertyExpr + || bExpr.getRight() instanceof SQLIdentifierExpr) + && Sets.newHashSet("=", "<", ">", ">=", "<=").contains(bExpr.getOperator().getName()) + && !Util.isFromJoinOrUnionTable(bExpr)) { + SQLMethodInvokeExpr sqlMethodInvokeExpr = new SQLMethodInvokeExpr("script", null); + String operator = bExpr.getOperator().getName(); + if (operator.equals("=")) { + operator = "=="; + } + + String leftProperty = Util.expr2Object(bExpr.getLeft()).toString(); + String rightProperty = Util.expr2Object(bExpr.getRight()).toString(); + if (leftProperty.split("\\.").length > 1) { + + leftProperty = leftProperty.substring(leftProperty.split("\\.")[0].length() + 1); + } + + if (rightProperty.split("\\.").length > 1) { + rightProperty = rightProperty.substring(rightProperty.split("\\.")[0].length() + 1); + } + + sqlMethodInvokeExpr.addParameter( + new SQLCharExpr( + "doc['" + + leftProperty + + "'].value " + + operator + + " doc['" + + rightProperty + + "'].value")); + + explainCond("AND", sqlMethodInvokeExpr, where); + return true; } + return false; + } - private boolean isAllowedMethodOnConditionLeft(SQLMethodInvokeExpr method, SQLBinaryOperator operator) { - return (method.getMethodName().toLowerCase().equals("nested") - || method.getMethodName().toLowerCase().equals("children") - || SQLFunctions.isFunctionTranslatedToScript(method.getMethodName()) - ) && !operator.isLogical(); + private boolean isCond(SQLBinaryOpExpr expr) { + SQLExpr leftSide = expr.getLeft(); + if (leftSide instanceof SQLMethodInvokeExpr) { + return isAllowedMethodOnConditionLeft((SQLMethodInvokeExpr) leftSide, expr.getOperator()); } - - - private void routeCond(SQLBinaryOpExpr bExpr, SQLExpr sub, Where where) throws SqlParseException { - if (sub instanceof SQLBinaryOpExpr && !isCond((SQLBinaryOpExpr) sub)) { - SQLBinaryOpExpr binarySub = (SQLBinaryOpExpr) sub; - if (binarySub.getOperator().priority != bExpr.getOperator().priority) { - Where subWhere = new Where(bExpr.getOperator().name); - where.addWhere(subWhere); - parseWhere(binarySub, subWhere); - } else { - parseWhere(binarySub, where); - } - } else if (sub instanceof SQLNotExpr) { - Where subWhere = new Where(bExpr.getOperator().name); - where.addWhere(subWhere); - parseWhere(((SQLNotExpr) sub).getExpr(), subWhere); - negateWhere(subWhere); - } else { - explainCond(bExpr.getOperator().name, sub, where); - } + return leftSide instanceof SQLIdentifierExpr + || leftSide instanceof SQLPropertyExpr + || leftSide instanceof SQLVariantRefExpr + || leftSide instanceof SQLCastExpr; + } + + private boolean isAllowedMethodOnConditionLeft( + SQLMethodInvokeExpr method, SQLBinaryOperator operator) { + return (method.getMethodName().toLowerCase().equals("nested") + || method.getMethodName().toLowerCase().equals("children") + || SQLFunctions.isFunctionTranslatedToScript(method.getMethodName())) + && !operator.isLogical(); + } + + private void routeCond(SQLBinaryOpExpr bExpr, SQLExpr sub, Where where) throws SqlParseException { + if (sub instanceof SQLBinaryOpExpr && !isCond((SQLBinaryOpExpr) sub)) { + SQLBinaryOpExpr binarySub = (SQLBinaryOpExpr) sub; + if (binarySub.getOperator().priority != bExpr.getOperator().priority) { + Where subWhere = new Where(bExpr.getOperator().name); + where.addWhere(subWhere); + parseWhere(binarySub, subWhere); + } else { + parseWhere(binarySub, where); + } + } else if (sub instanceof SQLNotExpr) { + Where subWhere = new Where(bExpr.getOperator().name); + where.addWhere(subWhere); + parseWhere(((SQLNotExpr) sub).getExpr(), subWhere); + negateWhere(subWhere); + } else { + explainCond(bExpr.getOperator().name, sub, where); } - - private void explainCond(String opear, SQLExpr expr, Where where) throws SqlParseException { - if (expr instanceof SQLBinaryOpExpr) { - SQLBinaryOpExpr soExpr = (SQLBinaryOpExpr) expr; - - boolean methodAsOpear = false; - - boolean isNested = false; - boolean isChildren = false; - - NestedType nestedType = new NestedType(); - if (nestedType.tryFillFromExpr(soExpr.getLeft())) { - soExpr.setLeft(new SQLIdentifierExpr(nestedType.field)); - isNested = true; - } - - ChildrenType childrenType = new ChildrenType(); - if (childrenType.tryFillFromExpr(soExpr.getLeft())) { - soExpr.setLeft(new SQLIdentifierExpr(childrenType.field)); - isChildren = true; - } - - if (soExpr.getRight() instanceof SQLMethodInvokeExpr) { - SQLMethodInvokeExpr method = (SQLMethodInvokeExpr) soExpr.getRight(); - String methodName = method.getMethodName().toLowerCase(); - - if (Condition.OPERATOR.methodNameToOpear.containsKey(methodName)) { - Object[] methodParametersValue = getMethodValuesWithSubQueries(method); - - final Condition condition; - // fix OPEAR - Condition.OPERATOR oper = Condition.OPERATOR.methodNameToOpear.get(methodName); - if (soExpr.getOperator() == SQLBinaryOperator.LessThanOrGreater - || soExpr.getOperator() == SQLBinaryOperator.NotEqual) { - oper = oper.negative(); - } - if (isNested) { - condition = new Condition(Where.CONN.valueOf(opear), soExpr.getLeft().toString(), - soExpr.getLeft(), oper, methodParametersValue, soExpr.getRight(), nestedType); - } else if (isChildren) { - condition = new Condition(Where.CONN.valueOf(opear), soExpr.getLeft().toString(), - soExpr.getLeft(), oper, methodParametersValue, soExpr.getRight(), childrenType); - } else { - condition = new Condition(Where.CONN.valueOf(opear), soExpr.getLeft().toString(), - soExpr.getLeft(), oper, methodParametersValue, soExpr.getRight(), null); - } - - where.addWhere(condition); - methodAsOpear = true; - } - } - - if (!methodAsOpear) { - final Condition condition; - - if (isNested) { - condition = new Condition(Where.CONN.valueOf(opear), soExpr.getLeft().toString(), soExpr.getLeft(), - soExpr.getOperator().name, parseValue(soExpr.getRight()), soExpr.getRight(), nestedType); - } else if (isChildren) { - condition = new Condition(Where.CONN.valueOf(opear), soExpr.getLeft().toString(), soExpr.getLeft(), - soExpr.getOperator().name, parseValue(soExpr.getRight()), soExpr.getRight(), childrenType); - } else { - SQLMethodInvokeExpr sqlMethodInvokeExpr = parseSQLBinaryOpExprWhoIsConditionInWhere(soExpr); - if (sqlMethodInvokeExpr == null) { - condition = new Condition(Where.CONN.valueOf(opear), soExpr.getLeft().toString(), - soExpr.getLeft(), soExpr.getOperator().name, parseValue(soExpr.getRight()), - soExpr.getRight(), null); - } else { - ScriptFilter scriptFilter = new ScriptFilter(); - if (!scriptFilter.tryParseFromMethodExpr(sqlMethodInvokeExpr)) { - throw new SqlParseException("could not parse script filter"); - } - condition = new Condition(Where.CONN.valueOf(opear), null, soExpr.getLeft(), - "SCRIPT", scriptFilter, soExpr.getRight()); - - } - - } - where.addWhere(condition); + } + + private void explainCond(String opear, SQLExpr expr, Where where) throws SqlParseException { + if (expr instanceof SQLBinaryOpExpr) { + SQLBinaryOpExpr soExpr = (SQLBinaryOpExpr) expr; + + boolean methodAsOpear = false; + + boolean isNested = false; + boolean isChildren = false; + + NestedType nestedType = new NestedType(); + if (nestedType.tryFillFromExpr(soExpr.getLeft())) { + soExpr.setLeft(new SQLIdentifierExpr(nestedType.field)); + isNested = true; + } + + ChildrenType childrenType = new ChildrenType(); + if (childrenType.tryFillFromExpr(soExpr.getLeft())) { + soExpr.setLeft(new SQLIdentifierExpr(childrenType.field)); + isChildren = true; + } + + if (soExpr.getRight() instanceof SQLMethodInvokeExpr) { + SQLMethodInvokeExpr method = (SQLMethodInvokeExpr) soExpr.getRight(); + String methodName = method.getMethodName().toLowerCase(); + + if (Condition.OPERATOR.methodNameToOpear.containsKey(methodName)) { + Object[] methodParametersValue = getMethodValuesWithSubQueries(method); + + final Condition condition; + // fix OPEAR + Condition.OPERATOR oper = Condition.OPERATOR.methodNameToOpear.get(methodName); + if (soExpr.getOperator() == SQLBinaryOperator.LessThanOrGreater + || soExpr.getOperator() == SQLBinaryOperator.NotEqual) { + oper = oper.negative(); + } + if (isNested) { + condition = + new Condition( + Where.CONN.valueOf(opear), + soExpr.getLeft().toString(), + soExpr.getLeft(), + oper, + methodParametersValue, + soExpr.getRight(), + nestedType); + } else if (isChildren) { + condition = + new Condition( + Where.CONN.valueOf(opear), + soExpr.getLeft().toString(), + soExpr.getLeft(), + oper, + methodParametersValue, + soExpr.getRight(), + childrenType); + } else { + condition = + new Condition( + Where.CONN.valueOf(opear), + soExpr.getLeft().toString(), + soExpr.getLeft(), + oper, + methodParametersValue, + soExpr.getRight(), + null); + } + + where.addWhere(condition); + methodAsOpear = true; + } + } + + if (!methodAsOpear) { + final Condition condition; + + if (isNested) { + condition = + new Condition( + Where.CONN.valueOf(opear), + soExpr.getLeft().toString(), + soExpr.getLeft(), + soExpr.getOperator().name, + parseValue(soExpr.getRight()), + soExpr.getRight(), + nestedType); + } else if (isChildren) { + condition = + new Condition( + Where.CONN.valueOf(opear), + soExpr.getLeft().toString(), + soExpr.getLeft(), + soExpr.getOperator().name, + parseValue(soExpr.getRight()), + soExpr.getRight(), + childrenType); + } else { + SQLMethodInvokeExpr sqlMethodInvokeExpr = + parseSQLBinaryOpExprWhoIsConditionInWhere(soExpr); + if (sqlMethodInvokeExpr == null) { + condition = + new Condition( + Where.CONN.valueOf(opear), + soExpr.getLeft().toString(), + soExpr.getLeft(), + soExpr.getOperator().name, + parseValue(soExpr.getRight()), + soExpr.getRight(), + null); + } else { + ScriptFilter scriptFilter = new ScriptFilter(); + if (!scriptFilter.tryParseFromMethodExpr(sqlMethodInvokeExpr)) { + throw new SqlParseException("could not parse script filter"); } - } else if (expr instanceof SQLInListExpr) { - SQLInListExpr siExpr = (SQLInListExpr) expr; - String leftSide = siExpr.getExpr().toString(); - - boolean isNested = false; - boolean isChildren = false; + condition = + new Condition( + Where.CONN.valueOf(opear), + null, + soExpr.getLeft(), + "SCRIPT", + scriptFilter, + soExpr.getRight()); + } + } + where.addWhere(condition); + } + } else if (expr instanceof SQLInListExpr) { + SQLInListExpr siExpr = (SQLInListExpr) expr; + String leftSide = siExpr.getExpr().toString(); - NestedType nestedType = new NestedType(); - if (nestedType.tryFillFromExpr(siExpr.getExpr())) { - leftSide = nestedType.field; + boolean isNested = false; + boolean isChildren = false; - isNested = false; - } + NestedType nestedType = new NestedType(); + if (nestedType.tryFillFromExpr(siExpr.getExpr())) { + leftSide = nestedType.field; - ChildrenType childrenType = new ChildrenType(); - if (childrenType.tryFillFromExpr(siExpr.getExpr())) { - leftSide = childrenType.field; + isNested = false; + } - isChildren = true; - } + ChildrenType childrenType = new ChildrenType(); + if (childrenType.tryFillFromExpr(siExpr.getExpr())) { + leftSide = childrenType.field; - final Condition condition; - - if (isNested) { - condition = new Condition(Where.CONN.valueOf(opear), leftSide, null, siExpr.isNot() ? "NOT IN" : "IN", - parseValue(siExpr.getTargetList()), null, nestedType); - } else if (isChildren) { - condition = new Condition(Where.CONN.valueOf(opear), leftSide, null, siExpr.isNot() ? "NOT IN" : "IN", - parseValue(siExpr.getTargetList()), null, childrenType); - } else { - condition = new Condition(Where.CONN.valueOf(opear), leftSide, null, siExpr.isNot() ? "NOT IN" : "IN", - parseValue(siExpr.getTargetList()), null); - } + isChildren = true; + } - where.addWhere(condition); - } else if (expr instanceof SQLBetweenExpr) { - SQLBetweenExpr between = ((SQLBetweenExpr) expr); - String leftSide = between.getTestExpr().toString(); + final Condition condition; - boolean isNested = false; - boolean isChildren = false; + if (isNested) { + condition = + new Condition( + Where.CONN.valueOf(opear), + leftSide, + null, + siExpr.isNot() ? "NOT IN" : "IN", + parseValue(siExpr.getTargetList()), + null, + nestedType); + } else if (isChildren) { + condition = + new Condition( + Where.CONN.valueOf(opear), + leftSide, + null, + siExpr.isNot() ? "NOT IN" : "IN", + parseValue(siExpr.getTargetList()), + null, + childrenType); + } else { + condition = + new Condition( + Where.CONN.valueOf(opear), + leftSide, + null, + siExpr.isNot() ? "NOT IN" : "IN", + parseValue(siExpr.getTargetList()), + null); + } - NestedType nestedType = new NestedType(); - if (nestedType.tryFillFromExpr(between.getTestExpr())) { - leftSide = nestedType.field; + where.addWhere(condition); + } else if (expr instanceof SQLBetweenExpr) { + SQLBetweenExpr between = ((SQLBetweenExpr) expr); + String leftSide = between.getTestExpr().toString(); - isNested = true; - } + boolean isNested = false; + boolean isChildren = false; - ChildrenType childrenType = new ChildrenType(); - if (childrenType.tryFillFromExpr(between.getTestExpr())) { - leftSide = childrenType.field; + NestedType nestedType = new NestedType(); + if (nestedType.tryFillFromExpr(between.getTestExpr())) { + leftSide = nestedType.field; - isChildren = true; - } + isNested = true; + } - final Condition condition; - - if (isNested) { - condition = new Condition(Where.CONN.valueOf(opear), leftSide, null, - between.isNot() ? "NOT BETWEEN" : "BETWEEN", new Object[]{parseValue(between.beginExpr), - parseValue(between.endExpr)}, null, nestedType); - } else if (isChildren) { - condition = new Condition(Where.CONN.valueOf(opear), leftSide, null, - between.isNot() ? "NOT BETWEEN" : "BETWEEN", new Object[]{parseValue(between.beginExpr), - parseValue(between.endExpr)}, null, childrenType); - } else { - condition = new Condition(Where.CONN.valueOf(opear), leftSide, null, - between.isNot() ? "NOT BETWEEN" : "BETWEEN", new Object[]{parseValue(between.beginExpr), - parseValue(between.endExpr)}, null, null); - } + ChildrenType childrenType = new ChildrenType(); + if (childrenType.tryFillFromExpr(between.getTestExpr())) { + leftSide = childrenType.field; - where.addWhere(condition); - } else if (expr instanceof SQLMethodInvokeExpr) { - - SQLMethodInvokeExpr methodExpr = (SQLMethodInvokeExpr) expr; - List methodParameters = methodExpr.getParameters(); - - String methodName = methodExpr.getMethodName(); - if (SpatialParamsFactory.isAllowedMethod(methodName)) { - String fieldName = methodParameters.get(0).toString(); - - boolean isNested = false; - boolean isChildren = false; - - NestedType nestedType = new NestedType(); - if (nestedType.tryFillFromExpr(methodParameters.get(0))) { - fieldName = nestedType.field; - - isNested = true; - } - - ChildrenType childrenType = new ChildrenType(); - if (childrenType.tryFillFromExpr(methodParameters.get(0))) { - fieldName = childrenType.field; - - isChildren = true; - } - - Object spatialParamsObject = SpatialParamsFactory.generateSpatialParamsObject(methodName, - methodParameters); - - final Condition condition; - - if (isNested) { - condition = new Condition(Where.CONN.valueOf(opear), fieldName, null, methodName, - spatialParamsObject, null, nestedType); - } else if (isChildren) { - condition = new Condition(Where.CONN.valueOf(opear), fieldName, null, methodName, - spatialParamsObject, null, childrenType); - } else { - condition = new Condition(Where.CONN.valueOf(opear), fieldName, null, methodName, - spatialParamsObject, null, null); - } - - where.addWhere(condition); - } else if (methodName.toLowerCase().equals("nested")) { - NestedType nestedType = new NestedType(); - - if (!nestedType.tryFillFromExpr(expr)) { - throw new SqlParseException("could not fill nested from expr:" + expr); - } - - Condition condition = new Condition(Where.CONN.valueOf(opear), nestedType.path, null, - methodName.toUpperCase(), nestedType.where, null); - - where.addWhere(condition); - } else if (methodName.toLowerCase().equals("children")) { - ChildrenType childrenType = new ChildrenType(); - - if (!childrenType.tryFillFromExpr(expr)) { - throw new SqlParseException("could not fill children from expr:" + expr); - } - - Condition condition = new Condition(Where.CONN.valueOf(opear), childrenType.childType, null, - methodName.toUpperCase(), childrenType.where, null); - - where.addWhere(condition); - } else if (methodName.toLowerCase().equals("script")) { - ScriptFilter scriptFilter = new ScriptFilter(); - if (!scriptFilter.tryParseFromMethodExpr(methodExpr)) { - throw new SqlParseException("could not parse script filter"); - } - Condition condition = new Condition(Where.CONN.valueOf(opear), null, null, "SCRIPT", - scriptFilter, null); - where.addWhere(condition); - } else if (Maker.isQueryFunction(methodName)) { - Condition condition = getConditionForMethod(expr, Where.CONN.valueOf(opear)); - - where.addWhere(condition); - } else { - throw new SqlParseException("unsupported method: " + methodName); - } - } else if (expr instanceof SQLInSubQueryExpr) { - SQLInSubQueryExpr sqlIn = (SQLInSubQueryExpr) expr; + isChildren = true; + } - Select innerSelect = sqlParser.parseSelect((MySqlSelectQueryBlock) sqlIn.getSubQuery().getQuery()); + final Condition condition; - if (innerSelect.getFields() == null || innerSelect.getFields().size() != 1) { - throw new SqlParseException("should only have one return field in subQuery"); - } + if (isNested) { + condition = + new Condition( + Where.CONN.valueOf(opear), + leftSide, + null, + between.isNot() ? "NOT BETWEEN" : "BETWEEN", + new Object[] {parseValue(between.beginExpr), parseValue(between.endExpr)}, + null, + nestedType); + } else if (isChildren) { + condition = + new Condition( + Where.CONN.valueOf(opear), + leftSide, + null, + between.isNot() ? "NOT BETWEEN" : "BETWEEN", + new Object[] {parseValue(between.beginExpr), parseValue(between.endExpr)}, + null, + childrenType); + } else { + condition = + new Condition( + Where.CONN.valueOf(opear), + leftSide, + null, + between.isNot() ? "NOT BETWEEN" : "BETWEEN", + new Object[] {parseValue(between.beginExpr), parseValue(between.endExpr)}, + null, + null); + } - SubQueryExpression subQueryExpression = new SubQueryExpression(innerSelect); + where.addWhere(condition); + } else if (expr instanceof SQLMethodInvokeExpr) { - String leftSide = sqlIn.getExpr().toString(); + SQLMethodInvokeExpr methodExpr = (SQLMethodInvokeExpr) expr; + List methodParameters = methodExpr.getParameters(); - boolean isNested = false; - boolean isChildren = false; + String methodName = methodExpr.getMethodName(); + if (SpatialParamsFactory.isAllowedMethod(methodName)) { + String fieldName = methodParameters.get(0).toString(); - NestedType nestedType = new NestedType(); - if (nestedType.tryFillFromExpr(sqlIn.getExpr())) { - leftSide = nestedType.field; + boolean isNested = false; + boolean isChildren = false; - isNested = true; - } + NestedType nestedType = new NestedType(); + if (nestedType.tryFillFromExpr(methodParameters.get(0))) { + fieldName = nestedType.field; - ChildrenType childrenType = new ChildrenType(); - if (childrenType.tryFillFromExpr(sqlIn.getExpr())) { - leftSide = childrenType.field; + isNested = true; + } - isChildren = true; - } + ChildrenType childrenType = new ChildrenType(); + if (childrenType.tryFillFromExpr(methodParameters.get(0))) { + fieldName = childrenType.field; - final Condition condition; - - if (isNested) { - condition = new Condition(Where.CONN.valueOf(opear), leftSide, null, sqlIn.isNot() ? "NOT IN" : "IN", - subQueryExpression, null, nestedType); - } else if (isChildren) { - condition = new Condition(Where.CONN.valueOf(opear), leftSide, null, sqlIn.isNot() ? "NOT IN" : "IN", - subQueryExpression, null, childrenType); - } else { - condition = new Condition(Where.CONN.valueOf(opear), leftSide, null, sqlIn.isNot() ? "NOT IN" : "IN", - subQueryExpression, null, null); - } + isChildren = true; + } - where.addWhere(condition); + Object spatialParamsObject = + SpatialParamsFactory.generateSpatialParamsObject(methodName, methodParameters); + + final Condition condition; + + if (isNested) { + condition = + new Condition( + Where.CONN.valueOf(opear), + fieldName, + null, + methodName, + spatialParamsObject, + null, + nestedType); + } else if (isChildren) { + condition = + new Condition( + Where.CONN.valueOf(opear), + fieldName, + null, + methodName, + spatialParamsObject, + null, + childrenType); } else { - throw new SqlParseException("err find condition " + expr.getClass()); + condition = + new Condition( + Where.CONN.valueOf(opear), + fieldName, + null, + methodName, + spatialParamsObject, + null, + null); } - } - private MethodField parseSQLMethodInvokeExprWithFunctionInWhere(SQLMethodInvokeExpr soExpr) - throws SqlParseException { + where.addWhere(condition); + } else if (methodName.toLowerCase().equals("nested")) { + NestedType nestedType = new NestedType(); - MethodField methodField = fieldMaker.makeMethodField(soExpr.getMethodName(), - soExpr.getParameters(), - null, - null, - query != null ? query.getFrom().getAlias() : null, - false); - return methodField; - } + if (!nestedType.tryFillFromExpr(expr)) { + throw new SqlParseException("could not fill nested from expr:" + expr); + } - private MethodField parseSQLCastExprWithFunctionInWhere(SQLCastExpr soExpr) throws SqlParseException { - ArrayList parameters = new ArrayList<>(); - parameters.add(soExpr.getExpr()); - return fieldMaker.makeMethodField( - "CAST", - parameters, + Condition condition = + new Condition( + Where.CONN.valueOf(opear), + nestedType.path, null, - null, - query != null ? query.getFrom().getAlias() : null, - false - ); - } + methodName.toUpperCase(), + nestedType.where, + null); - private SQLMethodInvokeExpr parseSQLBinaryOpExprWhoIsConditionInWhere(SQLBinaryOpExpr soExpr) - throws SqlParseException { - - if (bothSideAreNotFunction(soExpr) && bothSidesAreNotCast(soExpr)) { - return null; - } + where.addWhere(condition); + } else if (methodName.toLowerCase().equals("children")) { + ChildrenType childrenType = new ChildrenType(); - if (soExpr.getLeft() instanceof SQLMethodInvokeExpr) { - if (!SQLFunctions.isFunctionTranslatedToScript(((SQLMethodInvokeExpr) soExpr.getLeft()).getMethodName())) { - return null; - } + if (!childrenType.tryFillFromExpr(expr)) { + throw new SqlParseException("could not fill children from expr:" + expr); } - if (soExpr.getRight() instanceof SQLMethodInvokeExpr) { - if (!SQLFunctions.isFunctionTranslatedToScript(((SQLMethodInvokeExpr) soExpr.getRight()).getMethodName())) { - return null; - } + Condition condition = + new Condition( + Where.CONN.valueOf(opear), + childrenType.childType, + null, + methodName.toUpperCase(), + childrenType.where, + null); + + where.addWhere(condition); + } else if (methodName.toLowerCase().equals("script")) { + ScriptFilter scriptFilter = new ScriptFilter(); + if (!scriptFilter.tryParseFromMethodExpr(methodExpr)) { + throw new SqlParseException("could not parse script filter"); } + Condition condition = + new Condition(Where.CONN.valueOf(opear), null, null, "SCRIPT", scriptFilter, null); + where.addWhere(condition); + } else if (Maker.isQueryFunction(methodName)) { + Condition condition = getConditionForMethod(expr, Where.CONN.valueOf(opear)); + where.addWhere(condition); + } else { + throw new SqlParseException("unsupported method: " + methodName); + } + } else if (expr instanceof SQLInSubQueryExpr) { + SQLInSubQueryExpr sqlIn = (SQLInSubQueryExpr) expr; - MethodField leftMethod = new MethodField(null, Lists.newArrayList( - new KVValue("", Util.expr2Object(soExpr.getLeft(), "'"))), null, null); - MethodField rightMethod = new MethodField(null, Lists.newArrayList( - new KVValue("", Util.expr2Object(soExpr.getRight(), "'"))), null, null); + Select innerSelect = + sqlParser.parseSelect((MySqlSelectQueryBlock) sqlIn.getSubQuery().getQuery()); - if (soExpr.getLeft() instanceof SQLIdentifierExpr || soExpr.getLeft() instanceof SQLPropertyExpr) { - leftMethod = new MethodField(null, Lists.newArrayList( - new KVValue("", "doc['" + Util.expr2Object(soExpr.getLeft(), "'") + "'].value")), - null, null); - } - - if (soExpr.getRight() instanceof SQLIdentifierExpr || soExpr.getRight() instanceof SQLPropertyExpr) { - rightMethod = new MethodField(null, Lists.newArrayList( - new KVValue("", "doc['" + Util.expr2Object(soExpr.getRight(), "'") + "'].value")), - null, null); - } + if (innerSelect.getFields() == null || innerSelect.getFields().size() != 1) { + throw new SqlParseException("should only have one return field in subQuery"); + } - if (soExpr.getLeft() instanceof SQLMethodInvokeExpr) { - leftMethod = parseSQLMethodInvokeExprWithFunctionInWhere((SQLMethodInvokeExpr) soExpr.getLeft()); - } - if (soExpr.getRight() instanceof SQLMethodInvokeExpr) { - rightMethod = parseSQLMethodInvokeExprWithFunctionInWhere((SQLMethodInvokeExpr) soExpr.getRight()); - } + SubQueryExpression subQueryExpression = new SubQueryExpression(innerSelect); - if (soExpr.getLeft() instanceof SQLCastExpr) { - leftMethod = parseSQLCastExprWithFunctionInWhere((SQLCastExpr) soExpr.getLeft()); - } - if (soExpr.getRight() instanceof SQLCastExpr) { - rightMethod = parseSQLCastExprWithFunctionInWhere((SQLCastExpr) soExpr.getRight()); - } + String leftSide = sqlIn.getExpr().toString(); - String v1 = leftMethod.getParams().get(0).value.toString(); - String v1Dec = leftMethod.getParams().size() == 2 ? leftMethod.getParams().get(1).value.toString() + ";" : ""; + boolean isNested = false; + boolean isChildren = false; + NestedType nestedType = new NestedType(); + if (nestedType.tryFillFromExpr(sqlIn.getExpr())) { + leftSide = nestedType.field; - String v2 = rightMethod.getParams().get(0).value.toString(); - String v2Dec = rightMethod.getParams().size() == 2 ? rightMethod.getParams().get(1).value.toString() + ";" : ""; + isNested = true; + } - String operator = soExpr.getOperator().getName(); + ChildrenType childrenType = new ChildrenType(); + if (childrenType.tryFillFromExpr(sqlIn.getExpr())) { + leftSide = childrenType.field; - if (operator.equals("=")) { - operator = "=="; - } + isChildren = true; + } - String finalStr = v1Dec + v2Dec + v1 + " " + operator + " " + v2; + final Condition condition; - SQLMethodInvokeExpr scriptMethod = new SQLMethodInvokeExpr("script", null); - scriptMethod.addParameter(new SQLCharExpr(finalStr)); - return scriptMethod; + if (isNested) { + condition = + new Condition( + Where.CONN.valueOf(opear), + leftSide, + null, + sqlIn.isNot() ? "NOT IN" : "IN", + subQueryExpression, + null, + nestedType); + } else if (isChildren) { + condition = + new Condition( + Where.CONN.valueOf(opear), + leftSide, + null, + sqlIn.isNot() ? "NOT IN" : "IN", + subQueryExpression, + null, + childrenType); + } else { + condition = + new Condition( + Where.CONN.valueOf(opear), + leftSide, + null, + sqlIn.isNot() ? "NOT IN" : "IN", + subQueryExpression, + null, + null); + } + where.addWhere(condition); + } else { + throw new SqlParseException("err find condition " + expr.getClass()); + } + } + + private MethodField parseSQLMethodInvokeExprWithFunctionInWhere(SQLMethodInvokeExpr soExpr) + throws SqlParseException { + + MethodField methodField = + fieldMaker.makeMethodField( + soExpr.getMethodName(), + soExpr.getParameters(), + null, + null, + query != null ? query.getFrom().getAlias() : null, + false); + return methodField; + } + + private MethodField parseSQLCastExprWithFunctionInWhere(SQLCastExpr soExpr) + throws SqlParseException { + ArrayList parameters = new ArrayList<>(); + parameters.add(soExpr.getExpr()); + return fieldMaker.makeMethodField( + "CAST", parameters, null, null, query != null ? query.getFrom().getAlias() : null, false); + } + + private SQLMethodInvokeExpr parseSQLBinaryOpExprWhoIsConditionInWhere(SQLBinaryOpExpr soExpr) + throws SqlParseException { + + if (bothSideAreNotFunction(soExpr) && bothSidesAreNotCast(soExpr)) { + return null; } - private Boolean bothSideAreNotFunction(SQLBinaryOpExpr soExpr) { - return !(soExpr.getLeft() instanceof SQLMethodInvokeExpr || soExpr.getRight() instanceof SQLMethodInvokeExpr); + if (soExpr.getLeft() instanceof SQLMethodInvokeExpr) { + if (!SQLFunctions.isFunctionTranslatedToScript( + ((SQLMethodInvokeExpr) soExpr.getLeft()).getMethodName())) { + return null; + } } - private Boolean bothSidesAreNotCast(SQLBinaryOpExpr soExpr) { - return !(soExpr.getLeft() instanceof SQLCastExpr || soExpr.getRight() instanceof SQLCastExpr); + if (soExpr.getRight() instanceof SQLMethodInvokeExpr) { + if (!SQLFunctions.isFunctionTranslatedToScript( + ((SQLMethodInvokeExpr) soExpr.getRight()).getMethodName())) { + return null; + } } - private Object[] getMethodValuesWithSubQueries(SQLMethodInvokeExpr method) throws SqlParseException { - List values = new ArrayList<>(); - for (SQLExpr innerExpr : method.getParameters()) { - if (innerExpr instanceof SQLQueryExpr) { - Select select = sqlParser.parseSelect((MySqlSelectQueryBlock) ((SQLQueryExpr) innerExpr).getSubQuery() - .getQuery()); - values.add(new SubQueryExpression(select)); - } else if (innerExpr instanceof SQLTextLiteralExpr) { - values.add(((SQLTextLiteralExpr) innerExpr).getText()); - } else { - values.add(innerExpr); - } + MethodField leftMethod = + new MethodField( + null, + Lists.newArrayList(new KVValue("", Util.expr2Object(soExpr.getLeft(), "'"))), + null, + null); + MethodField rightMethod = + new MethodField( + null, + Lists.newArrayList(new KVValue("", Util.expr2Object(soExpr.getRight(), "'"))), + null, + null); + + if (soExpr.getLeft() instanceof SQLIdentifierExpr + || soExpr.getLeft() instanceof SQLPropertyExpr) { + leftMethod = + new MethodField( + null, + Lists.newArrayList( + new KVValue("", "doc['" + Util.expr2Object(soExpr.getLeft(), "'") + "'].value")), + null, + null); + } - } - return values.toArray(); + if (soExpr.getRight() instanceof SQLIdentifierExpr + || soExpr.getRight() instanceof SQLPropertyExpr) { + rightMethod = + new MethodField( + null, + Lists.newArrayList( + new KVValue("", "doc['" + Util.expr2Object(soExpr.getRight(), "'") + "'].value")), + null, + null); } - private Object[] parseValue(List targetList) throws SqlParseException { - Object[] value = new Object[targetList.size()]; - for (int i = 0; i < targetList.size(); i++) { - value[i] = parseValue(targetList.get(i)); - } - return value; + if (soExpr.getLeft() instanceof SQLMethodInvokeExpr) { + leftMethod = + parseSQLMethodInvokeExprWithFunctionInWhere((SQLMethodInvokeExpr) soExpr.getLeft()); + } + if (soExpr.getRight() instanceof SQLMethodInvokeExpr) { + rightMethod = + parseSQLMethodInvokeExprWithFunctionInWhere((SQLMethodInvokeExpr) soExpr.getRight()); } - private Object parseValue(SQLExpr expr) throws SqlParseException { - if (expr instanceof SQLNumericLiteralExpr) { - Number number = ((SQLNumericLiteralExpr) expr).getNumber(); - if (number instanceof BigDecimal) { - return number.doubleValue(); - } - if (number instanceof BigInteger) { - return number.longValue(); - } - return ((SQLNumericLiteralExpr) expr).getNumber(); - } else if (expr instanceof SQLCharExpr) { - return ((SQLCharExpr) expr).getText(); - } else if (expr instanceof SQLMethodInvokeExpr) { - return expr; - } else if (expr instanceof SQLNullExpr) { - return null; - } else if (expr instanceof SQLIdentifierExpr) { - return expr; - } else if (expr instanceof SQLPropertyExpr) { - return expr; - } else if (expr instanceof SQLBooleanExpr) { - return ((SQLBooleanExpr) expr).getValue(); - } else { - throw new SqlParseException( - String.format("Failed to parse SqlExpression of type %s. expression value: %s", - expr.getClass(), expr) - ); - } + if (soExpr.getLeft() instanceof SQLCastExpr) { + leftMethod = parseSQLCastExprWithFunctionInWhere((SQLCastExpr) soExpr.getLeft()); + } + if (soExpr.getRight() instanceof SQLCastExpr) { + rightMethod = parseSQLCastExprWithFunctionInWhere((SQLCastExpr) soExpr.getRight()); } - public static Condition getConditionForMethod(SQLExpr expr, Where.CONN conn) throws SqlParseException { - SQLExpr param = ((SQLMethodInvokeExpr) expr).getParameters().get(0); - String fieldName = param.toString(); + String v1 = leftMethod.getParams().get(0).value.toString(); + String v1Dec = + leftMethod.getParams().size() == 2 + ? leftMethod.getParams().get(1).value.toString() + ";" + : ""; - NestedType nestedType = new NestedType(); - ChildrenType childrenType = new ChildrenType(); + String v2 = rightMethod.getParams().get(0).value.toString(); + String v2Dec = + rightMethod.getParams().size() == 2 + ? rightMethod.getParams().get(1).value.toString() + ";" + : ""; - if (nestedType.tryFillFromExpr(param)) { - return new Condition(conn, nestedType.field, null, "=", expr, expr, nestedType); - } else if (childrenType.tryFillFromExpr(param)) { - return new Condition(conn, childrenType.field, null, "=", expr, expr, childrenType); - } else { - return new Condition(conn, fieldName, null, "=", expr, expr, null); - } + String operator = soExpr.getOperator().getName(); + + if (operator.equals("=")) { + operator = "=="; + } + + String finalStr = v1Dec + v2Dec + v1 + " " + operator + " " + v2; + + SQLMethodInvokeExpr scriptMethod = new SQLMethodInvokeExpr("script", null); + scriptMethod.addParameter(new SQLCharExpr(finalStr)); + return scriptMethod; + } + + private Boolean bothSideAreNotFunction(SQLBinaryOpExpr soExpr) { + return !(soExpr.getLeft() instanceof SQLMethodInvokeExpr + || soExpr.getRight() instanceof SQLMethodInvokeExpr); + } + + private Boolean bothSidesAreNotCast(SQLBinaryOpExpr soExpr) { + return !(soExpr.getLeft() instanceof SQLCastExpr || soExpr.getRight() instanceof SQLCastExpr); + } + + private Object[] getMethodValuesWithSubQueries(SQLMethodInvokeExpr method) + throws SqlParseException { + List values = new ArrayList<>(); + for (SQLExpr innerExpr : method.getParameters()) { + if (innerExpr instanceof SQLQueryExpr) { + Select select = + sqlParser.parseSelect( + (MySqlSelectQueryBlock) ((SQLQueryExpr) innerExpr).getSubQuery().getQuery()); + values.add(new SubQueryExpression(select)); + } else if (innerExpr instanceof SQLTextLiteralExpr) { + values.add(((SQLTextLiteralExpr) innerExpr).getText()); + } else { + values.add(innerExpr); + } + } + return values.toArray(); + } + + private Object[] parseValue(List targetList) throws SqlParseException { + Object[] value = new Object[targetList.size()]; + for (int i = 0; i < targetList.size(); i++) { + value[i] = parseValue(targetList.get(i)); + } + return value; + } + + private Object parseValue(SQLExpr expr) throws SqlParseException { + if (expr instanceof SQLNumericLiteralExpr) { + Number number = ((SQLNumericLiteralExpr) expr).getNumber(); + if (number instanceof BigDecimal) { + return number.doubleValue(); + } + if (number instanceof BigInteger) { + return number.longValue(); + } + return ((SQLNumericLiteralExpr) expr).getNumber(); + } else if (expr instanceof SQLCharExpr) { + return ((SQLCharExpr) expr).getText(); + } else if (expr instanceof SQLMethodInvokeExpr) { + return expr; + } else if (expr instanceof SQLNullExpr) { + return null; + } else if (expr instanceof SQLIdentifierExpr) { + return expr; + } else if (expr instanceof SQLPropertyExpr) { + return expr; + } else if (expr instanceof SQLBooleanExpr) { + return ((SQLBooleanExpr) expr).getValue(); + } else { + throw new SqlParseException( + String.format( + "Failed to parse SqlExpression of type %s. expression value: %s", + expr.getClass(), expr)); + } + } + + public static Condition getConditionForMethod(SQLExpr expr, Where.CONN conn) + throws SqlParseException { + SQLExpr param = ((SQLMethodInvokeExpr) expr).getParameters().get(0); + String fieldName = param.toString(); + + NestedType nestedType = new NestedType(); + ChildrenType childrenType = new ChildrenType(); + + if (nestedType.tryFillFromExpr(param)) { + return new Condition(conn, nestedType.field, null, "=", expr, expr, nestedType); + } else if (childrenType.tryFillFromExpr(param)) { + return new Condition(conn, childrenType.field, null, "=", expr, expr, childrenType); + } else { + return new Condition(conn, fieldName, null, "=", expr, expr, null); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSQLQueryAction.java b/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSQLQueryAction.java index 9ee9a8a683..ccbf70297e 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSQLQueryAction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSQLQueryAction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.plugin; import static org.opensearch.core.rest.RestStatus.OK; @@ -16,11 +15,11 @@ import org.apache.logging.log4j.Logger; import org.opensearch.client.node.NodeClient; import org.opensearch.common.inject.Injector; +import org.opensearch.core.rest.RestStatus; import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.BytesRestResponse; import org.opensearch.rest.RestChannel; import org.opensearch.rest.RestRequest; -import org.opensearch.core.rest.RestStatus; import org.opensearch.sql.common.antlr.SyntaxCheckException; import org.opensearch.sql.common.response.ResponseListener; import org.opensearch.sql.common.utils.QueryContext; @@ -30,20 +29,20 @@ import org.opensearch.sql.legacy.metrics.Metrics; import org.opensearch.sql.opensearch.security.SecurityAccess; import org.opensearch.sql.protocol.response.QueryResult; +import org.opensearch.sql.protocol.response.format.CommandResponseFormatter; import org.opensearch.sql.protocol.response.format.CsvResponseFormatter; import org.opensearch.sql.protocol.response.format.Format; import org.opensearch.sql.protocol.response.format.JdbcResponseFormatter; import org.opensearch.sql.protocol.response.format.JsonResponseFormatter; -import org.opensearch.sql.protocol.response.format.CommandResponseFormatter; import org.opensearch.sql.protocol.response.format.RawResponseFormatter; import org.opensearch.sql.protocol.response.format.ResponseFormatter; import org.opensearch.sql.sql.SQLService; import org.opensearch.sql.sql.domain.SQLQueryRequest; /** - * New SQL REST action handler. This will not be registered to OpenSearch unless: - * 1) we want to test new SQL engine; - * 2) all old functionalities migrated to new query engine and legacy REST handler removed. + * New SQL REST action handler. This will not be registered to OpenSearch unless: 1) we want to test + * new SQL engine; 2) all old functionalities migrated to new query engine and legacy REST handler + * removed. */ public class RestSQLQueryAction extends BaseRestHandler { @@ -53,9 +52,7 @@ public class RestSQLQueryAction extends BaseRestHandler { private final Injector injector; - /** - * Constructor of RestSQLQueryAction. - */ + /** Constructor of RestSQLQueryAction. */ public RestSQLQueryAction(Injector injector) { super(); this.injector = injector; @@ -105,7 +102,7 @@ public RestChannelConsumer prepareRequest( fallbackHandler)); } // If close request, sqlService.closeCursor - else { + else { return channel -> sqlService.execute( request, @@ -123,8 +120,7 @@ private ResponseListener fallBackListener( return new ResponseListener() { @Override public void onResponse(T response) { - LOG.info("[{}] Request is handled by new SQL query engine", - QueryContext.getRequestId()); + LOG.info("[{}] Request is handled by new SQL query engine", QueryContext.getRequestId()); next.onResponse(response); } @@ -144,12 +140,13 @@ private ResponseListener createExplainResponseListener( return new ResponseListener<>() { @Override public void onResponse(ExplainResponse response) { - JsonResponseFormatter formatter = new JsonResponseFormatter<>(PRETTY) { - @Override - protected Object buildJsonObject(ExplainResponse response) { - return response; - } - }; + JsonResponseFormatter formatter = + new JsonResponseFormatter<>(PRETTY) { + @Override + protected Object buildJsonObject(ExplainResponse response) { + return response; + } + }; sendResponse(channel, OK, formatter.format(response), formatter.contentType()); } @@ -179,9 +176,12 @@ private ResponseListener createQueryResponseListener( return new ResponseListener() { @Override public void onResponse(QueryResponse response) { - sendResponse(channel, OK, - formatter.format(new QueryResult(response.getSchema(), response.getResults(), - response.getCursor())), formatter.contentType()); + sendResponse( + channel, + OK, + formatter.format( + new QueryResult(response.getSchema(), response.getResults(), response.getCursor())), + formatter.contentType()); } @Override @@ -191,9 +191,9 @@ public void onFailure(Exception e) { }; } - private void sendResponse(RestChannel channel, RestStatus status, String content, String contentType) { - channel.sendResponse(new BytesRestResponse( - status, contentType, content)); + private void sendResponse( + RestChannel channel, RestStatus status, String content, String contentType) { + channel.sendResponse(new BytesRestResponse(status, contentType, content)); } private static void logAndPublishMetrics(Exception e) { diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSqlAction.java b/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSqlAction.java index 9a15cc9e21..fc8934dd73 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSqlAction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSqlAction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.plugin; import static org.opensearch.core.rest.RestStatus.BAD_REQUEST; @@ -28,12 +27,12 @@ import org.opensearch.client.node.NodeClient; import org.opensearch.common.inject.Injector; import org.opensearch.common.settings.Settings; +import org.opensearch.core.rest.RestStatus; import org.opensearch.index.IndexNotFoundException; import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.BytesRestResponse; import org.opensearch.rest.RestChannel; import org.opensearch.rest.RestRequest; -import org.opensearch.core.rest.RestStatus; import org.opensearch.sql.common.antlr.SyntaxCheckException; import org.opensearch.sql.common.utils.QueryContext; import org.opensearch.sql.exception.ExpressionEvaluationException; @@ -67,233 +66,263 @@ public class RestSqlAction extends BaseRestHandler { - private static final Logger LOG = LogManager.getLogger(RestSqlAction.class); - - private final boolean allowExplicitIndex; - - private static final Predicate CONTAINS_SUBQUERY = Pattern.compile("\\(\\s*select ").asPredicate(); - - /** - * API endpoint path - */ - public static final String QUERY_API_ENDPOINT = "/_plugins/_sql"; - public static final String EXPLAIN_API_ENDPOINT = QUERY_API_ENDPOINT + "/_explain"; - public static final String CURSOR_CLOSE_ENDPOINT = QUERY_API_ENDPOINT + "/close"; - public static final String LEGACY_QUERY_API_ENDPOINT = "/_opendistro/_sql"; - public static final String LEGACY_EXPLAIN_API_ENDPOINT = LEGACY_QUERY_API_ENDPOINT + "/_explain"; - public static final String LEGACY_CURSOR_CLOSE_ENDPOINT = LEGACY_QUERY_API_ENDPOINT + "/close"; - - /** - * New SQL query request handler. - */ - private final RestSQLQueryAction newSqlQueryHandler; - - public RestSqlAction(Settings settings, Injector injector) { - super(); - this.allowExplicitIndex = MULTI_ALLOW_EXPLICIT_INDEX.get(settings); - this.newSqlQueryHandler = new RestSQLQueryAction(injector); - } - - @Override - public List routes() { - return ImmutableList.of(); - } - - @Override - public List replacedRoutes() { - return ImmutableList.of( - new ReplacedRoute( - RestRequest.Method.POST, QUERY_API_ENDPOINT, - RestRequest.Method.POST, LEGACY_QUERY_API_ENDPOINT), - new ReplacedRoute( - RestRequest.Method.POST, EXPLAIN_API_ENDPOINT, - RestRequest.Method.POST, LEGACY_EXPLAIN_API_ENDPOINT), - new ReplacedRoute( - RestRequest.Method.POST, CURSOR_CLOSE_ENDPOINT, - RestRequest.Method.POST, LEGACY_CURSOR_CLOSE_ENDPOINT)); - } - - @Override - public String getName() { - return "sql_action"; - } - - @Override - protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { - Metrics.getInstance().getNumericalMetric(MetricName.REQ_TOTAL).increment(); - Metrics.getInstance().getNumericalMetric(MetricName.REQ_COUNT_TOTAL).increment(); - - QueryContext.addRequestId(); - - try { - if (!isSQLFeatureEnabled()) { - throw new SQLFeatureDisabledException( - "Either plugins.sql.enabled or rest.action.multi.allow_explicit_index setting is false" - ); - } - - final SqlRequest sqlRequest = SqlRequestFactory.getSqlRequest(request); - if (isLegacyCursor(sqlRequest)) { - if (isExplainRequest(request)) { - throw new IllegalArgumentException("Invalid request. Cannot explain cursor"); - } else { - LOG.info("[{}] Cursor request {}: {}", QueryContext.getRequestId(), request.uri(), sqlRequest.cursor()); - return channel -> handleCursorRequest(request, sqlRequest.cursor(), client, channel); - } - } - - LOG.info("[{}] Incoming request {}", QueryContext.getRequestId(), request.uri()); - - Format format = SqlRequestParam.getFormat(request.params()); - - // Route request to new query engine if it's supported already - SQLQueryRequest newSqlRequest = new SQLQueryRequest(sqlRequest.getJsonContent(), - sqlRequest.getSql(), request.path(), request.params(), sqlRequest.cursor()); - return newSqlQueryHandler.prepareRequest(newSqlRequest, - (restChannel, exception) -> { - try{ - if (newSqlRequest.isExplainRequest()) { - LOG.info("Request is falling back to old SQL engine due to: " + exception.getMessage()); - } - LOG.info("[{}] Request {} is not supported and falling back to old SQL engine", - QueryContext.getRequestId(), newSqlRequest); - LOG.info("Request Query: {}", QueryDataAnonymizer.anonymizeData(sqlRequest.getSql())); - QueryAction queryAction = explainRequest(client, sqlRequest, format); - executeSqlRequest(request, queryAction, client, restChannel); - } catch (Exception e) { - logAndPublishMetrics(e); - reportError(restChannel, e, isClientError(e) ? BAD_REQUEST : SERVICE_UNAVAILABLE); - } - }, - (restChannel, exception) -> { - logAndPublishMetrics(exception); - reportError(restChannel, exception, isClientError(exception) ? - BAD_REQUEST : SERVICE_UNAVAILABLE); - }); - } catch (Exception e) { - logAndPublishMetrics(e); - return channel -> reportError(channel, e, isClientError(e) ? BAD_REQUEST : SERVICE_UNAVAILABLE); - } - } - - - /** - * @param sqlRequest client request - * @return true if this cursor was generated by the legacy engine, false otherwise. - */ - private static boolean isLegacyCursor(SqlRequest sqlRequest) { - String cursor = sqlRequest.cursor(); - return cursor != null - && CursorType.getById(cursor.substring(0, 1)) != CursorType.NULL; - } - - @Override - protected Set responseParams() { - Set responseParams = new HashSet<>(super.responseParams()); - responseParams.addAll(Arrays.asList("sql", "flat", "separator", "_score", "_type", "_id", "newLine", "format", "sanitize")); - return responseParams; - } - - private void handleCursorRequest(final RestRequest request, final String cursor, final Client client, - final RestChannel channel) throws Exception { - CursorAsyncRestExecutor cursorRestExecutor = CursorActionRequestRestExecutorFactory.createExecutor( - request, cursor, SqlRequestParam.getFormat(request.params())); - cursorRestExecutor.execute(client, request.params(), channel); - } - - private static void logAndPublishMetrics(final Exception e) { - if (isClientError(e)) { - LOG.error(QueryContext.getRequestId() + " Client side error during query execution", e); - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_CUS).increment(); - } else { - LOG.error(QueryContext.getRequestId() + " Server side error during query execution", e); - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); - } - } - - private static QueryAction explainRequest(final NodeClient client, final SqlRequest sqlRequest, Format format) - throws SQLFeatureNotSupportedException, SqlParseException, SQLFeatureDisabledException { - - ColumnTypeProvider typeProvider = performAnalysis(sqlRequest.getSql()); - - final QueryAction queryAction = new SearchDao(client) - .explain(new QueryActionRequest(sqlRequest.getSql(), typeProvider, format)); - queryAction.setSqlRequest(sqlRequest); - queryAction.setFormat(format); - queryAction.setColumnTypeProvider(typeProvider); - return queryAction; - } - - private void executeSqlRequest(final RestRequest request, final QueryAction queryAction, final Client client, - final RestChannel channel) throws Exception { - Map params = request.params(); + private static final Logger LOG = LogManager.getLogger(RestSqlAction.class); + + private final boolean allowExplicitIndex; + + private static final Predicate CONTAINS_SUBQUERY = + Pattern.compile("\\(\\s*select ").asPredicate(); + + /** API endpoint path */ + public static final String QUERY_API_ENDPOINT = "/_plugins/_sql"; + + public static final String EXPLAIN_API_ENDPOINT = QUERY_API_ENDPOINT + "/_explain"; + public static final String CURSOR_CLOSE_ENDPOINT = QUERY_API_ENDPOINT + "/close"; + public static final String LEGACY_QUERY_API_ENDPOINT = "/_opendistro/_sql"; + public static final String LEGACY_EXPLAIN_API_ENDPOINT = LEGACY_QUERY_API_ENDPOINT + "/_explain"; + public static final String LEGACY_CURSOR_CLOSE_ENDPOINT = LEGACY_QUERY_API_ENDPOINT + "/close"; + + /** New SQL query request handler. */ + private final RestSQLQueryAction newSqlQueryHandler; + + public RestSqlAction(Settings settings, Injector injector) { + super(); + this.allowExplicitIndex = MULTI_ALLOW_EXPLICIT_INDEX.get(settings); + this.newSqlQueryHandler = new RestSQLQueryAction(injector); + } + + @Override + public List routes() { + return ImmutableList.of(); + } + + @Override + public List replacedRoutes() { + return ImmutableList.of( + new ReplacedRoute( + RestRequest.Method.POST, QUERY_API_ENDPOINT, + RestRequest.Method.POST, LEGACY_QUERY_API_ENDPOINT), + new ReplacedRoute( + RestRequest.Method.POST, EXPLAIN_API_ENDPOINT, + RestRequest.Method.POST, LEGACY_EXPLAIN_API_ENDPOINT), + new ReplacedRoute( + RestRequest.Method.POST, CURSOR_CLOSE_ENDPOINT, + RestRequest.Method.POST, LEGACY_CURSOR_CLOSE_ENDPOINT)); + } + + @Override + public String getName() { + return "sql_action"; + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { + Metrics.getInstance().getNumericalMetric(MetricName.REQ_TOTAL).increment(); + Metrics.getInstance().getNumericalMetric(MetricName.REQ_COUNT_TOTAL).increment(); + + QueryContext.addRequestId(); + + try { + if (!isSQLFeatureEnabled()) { + throw new SQLFeatureDisabledException( + "Either plugins.sql.enabled or rest.action.multi.allow_explicit_index setting is" + + " false"); + } + + final SqlRequest sqlRequest = SqlRequestFactory.getSqlRequest(request); + if (isLegacyCursor(sqlRequest)) { if (isExplainRequest(request)) { - final String jsonExplanation = queryAction.explain().explain(); - String result; - if (SqlRequestParam.isPrettyFormat(params)) { - result = JsonPrettyFormatter.format(jsonExplanation); - } else { - result = jsonExplanation; - } - channel.sendResponse(new BytesRestResponse(OK, "application/json; charset=UTF-8", result)); + throw new IllegalArgumentException("Invalid request. Cannot explain cursor"); } else { - RestExecutor restExecutor = ActionRequestRestExecutorFactory.createExecutor( - SqlRequestParam.getFormat(params), - queryAction); - //doing this hack because OpenSearch throws exception for un-consumed props - Map additionalParams = new HashMap<>(); - for (String paramName : responseParams()) { - if (request.hasParam(paramName)) { - additionalParams.put(paramName, request.param(paramName)); - } - } - restExecutor.execute(client, additionalParams, queryAction, channel); + LOG.info( + "[{}] Cursor request {}: {}", + QueryContext.getRequestId(), + request.uri(), + sqlRequest.cursor()); + return channel -> handleCursorRequest(request, sqlRequest.cursor(), client, channel); } + } + + LOG.info("[{}] Incoming request {}", QueryContext.getRequestId(), request.uri()); + + Format format = SqlRequestParam.getFormat(request.params()); + + // Route request to new query engine if it's supported already + SQLQueryRequest newSqlRequest = + new SQLQueryRequest( + sqlRequest.getJsonContent(), + sqlRequest.getSql(), + request.path(), + request.params(), + sqlRequest.cursor()); + return newSqlQueryHandler.prepareRequest( + newSqlRequest, + (restChannel, exception) -> { + try { + if (newSqlRequest.isExplainRequest()) { + LOG.info( + "Request is falling back to old SQL engine due to: " + exception.getMessage()); + } + LOG.info( + "[{}] Request {} is not supported and falling back to old SQL engine", + QueryContext.getRequestId(), + newSqlRequest); + LOG.info("Request Query: {}", QueryDataAnonymizer.anonymizeData(sqlRequest.getSql())); + QueryAction queryAction = explainRequest(client, sqlRequest, format); + executeSqlRequest(request, queryAction, client, restChannel); + } catch (Exception e) { + logAndPublishMetrics(e); + reportError(restChannel, e, isClientError(e) ? BAD_REQUEST : SERVICE_UNAVAILABLE); + } + }, + (restChannel, exception) -> { + logAndPublishMetrics(exception); + reportError( + restChannel, + exception, + isClientError(exception) ? BAD_REQUEST : SERVICE_UNAVAILABLE); + }); + } catch (Exception e) { + logAndPublishMetrics(e); + return channel -> + reportError(channel, e, isClientError(e) ? BAD_REQUEST : SERVICE_UNAVAILABLE); } - - private static boolean isExplainRequest(final RestRequest request) { - return request.path().endsWith("/_explain"); - } - - private static boolean isClientError(Exception e) { - return e instanceof NullPointerException // NPE is hard to differentiate but more likely caused by bad query - || e instanceof SqlParseException - || e instanceof ParserException - || e instanceof SQLFeatureNotSupportedException - || e instanceof SQLFeatureDisabledException - || e instanceof IllegalArgumentException - || e instanceof IndexNotFoundException - || e instanceof VerificationException - || e instanceof SqlAnalysisException - || e instanceof SyntaxCheckException - || e instanceof SemanticCheckException - || e instanceof ExpressionEvaluationException; - } - - private void sendResponse(final RestChannel channel, final String message, final RestStatus status) { - channel.sendResponse(new BytesRestResponse(status, message)); - } - - private void reportError(final RestChannel channel, final Exception e, final RestStatus status) { - sendResponse(channel, ErrorMessageFactory.createErrorMessage(e, status.getStatus()).toString(), status); - } - - private boolean isSQLFeatureEnabled() { - boolean isSqlEnabled = LocalClusterState.state().getSettingValue( - org.opensearch.sql.common.setting.Settings.Key.SQL_ENABLED); - return allowExplicitIndex && isSqlEnabled; + } + + /** + * @param sqlRequest client request + * @return true if this cursor was generated by the legacy engine, false otherwise. + */ + private static boolean isLegacyCursor(SqlRequest sqlRequest) { + String cursor = sqlRequest.cursor(); + return cursor != null && CursorType.getById(cursor.substring(0, 1)) != CursorType.NULL; + } + + @Override + protected Set responseParams() { + Set responseParams = new HashSet<>(super.responseParams()); + responseParams.addAll( + Arrays.asList( + "sql", "flat", "separator", "_score", "_type", "_id", "newLine", "format", "sanitize")); + return responseParams; + } + + private void handleCursorRequest( + final RestRequest request, + final String cursor, + final Client client, + final RestChannel channel) + throws Exception { + CursorAsyncRestExecutor cursorRestExecutor = + CursorActionRequestRestExecutorFactory.createExecutor( + request, cursor, SqlRequestParam.getFormat(request.params())); + cursorRestExecutor.execute(client, request.params(), channel); + } + + private static void logAndPublishMetrics(final Exception e) { + if (isClientError(e)) { + LOG.error(QueryContext.getRequestId() + " Client side error during query execution", e); + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_CUS).increment(); + } else { + LOG.error(QueryContext.getRequestId() + " Server side error during query execution", e); + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_SYS).increment(); } - - private static ColumnTypeProvider performAnalysis(String sql) { - LocalClusterState clusterState = LocalClusterState.state(); - SqlAnalysisConfig config = new SqlAnalysisConfig(false, false, 200); - - OpenSearchLegacySqlAnalyzer analyzer = new OpenSearchLegacySqlAnalyzer(config); - Optional outputColumnType = analyzer.analyze(sql, clusterState); - if (outputColumnType.isPresent()) { - return new ColumnTypeProvider(outputColumnType.get()); - } else { - return new ColumnTypeProvider(); + } + + private static QueryAction explainRequest( + final NodeClient client, final SqlRequest sqlRequest, Format format) + throws SQLFeatureNotSupportedException, SqlParseException, SQLFeatureDisabledException { + + ColumnTypeProvider typeProvider = performAnalysis(sqlRequest.getSql()); + + final QueryAction queryAction = + new SearchDao(client) + .explain(new QueryActionRequest(sqlRequest.getSql(), typeProvider, format)); + queryAction.setSqlRequest(sqlRequest); + queryAction.setFormat(format); + queryAction.setColumnTypeProvider(typeProvider); + return queryAction; + } + + private void executeSqlRequest( + final RestRequest request, + final QueryAction queryAction, + final Client client, + final RestChannel channel) + throws Exception { + Map params = request.params(); + if (isExplainRequest(request)) { + final String jsonExplanation = queryAction.explain().explain(); + String result; + if (SqlRequestParam.isPrettyFormat(params)) { + result = JsonPrettyFormatter.format(jsonExplanation); + } else { + result = jsonExplanation; + } + channel.sendResponse(new BytesRestResponse(OK, "application/json; charset=UTF-8", result)); + } else { + RestExecutor restExecutor = + ActionRequestRestExecutorFactory.createExecutor( + SqlRequestParam.getFormat(params), queryAction); + // doing this hack because OpenSearch throws exception for un-consumed props + Map additionalParams = new HashMap<>(); + for (String paramName : responseParams()) { + if (request.hasParam(paramName)) { + additionalParams.put(paramName, request.param(paramName)); } + } + restExecutor.execute(client, additionalParams, queryAction, channel); + } + } + + private static boolean isExplainRequest(final RestRequest request) { + return request.path().endsWith("/_explain"); + } + + private static boolean isClientError(Exception e) { + return e + instanceof + NullPointerException // NPE is hard to differentiate but more likely caused by bad query + || e instanceof SqlParseException + || e instanceof ParserException + || e instanceof SQLFeatureNotSupportedException + || e instanceof SQLFeatureDisabledException + || e instanceof IllegalArgumentException + || e instanceof IndexNotFoundException + || e instanceof VerificationException + || e instanceof SqlAnalysisException + || e instanceof SyntaxCheckException + || e instanceof SemanticCheckException + || e instanceof ExpressionEvaluationException; + } + + private void sendResponse( + final RestChannel channel, final String message, final RestStatus status) { + channel.sendResponse(new BytesRestResponse(status, message)); + } + + private void reportError(final RestChannel channel, final Exception e, final RestStatus status) { + sendResponse( + channel, ErrorMessageFactory.createErrorMessage(e, status.getStatus()).toString(), status); + } + + private boolean isSQLFeatureEnabled() { + boolean isSqlEnabled = + LocalClusterState.state() + .getSettingValue(org.opensearch.sql.common.setting.Settings.Key.SQL_ENABLED); + return allowExplicitIndex && isSqlEnabled; + } + + private static ColumnTypeProvider performAnalysis(String sql) { + LocalClusterState clusterState = LocalClusterState.state(); + SqlAnalysisConfig config = new SqlAnalysisConfig(false, false, 200); + + OpenSearchLegacySqlAnalyzer analyzer = new OpenSearchLegacySqlAnalyzer(config); + Optional outputColumnType = analyzer.analyze(sql, clusterState); + if (outputColumnType.isPresent()) { + return new ColumnTypeProvider(outputColumnType.get()); + } else { + return new ColumnTypeProvider(); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSqlStatsAction.java b/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSqlStatsAction.java index d300ea7177..bc0f3c73b8 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSqlStatsAction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSqlStatsAction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.plugin; import static org.opensearch.core.rest.RestStatus.SERVICE_UNAVAILABLE; @@ -17,74 +16,79 @@ import org.apache.logging.log4j.Logger; import org.opensearch.client.node.NodeClient; import org.opensearch.common.settings.Settings; +import org.opensearch.core.rest.RestStatus; import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.BytesRestResponse; import org.opensearch.rest.RestController; import org.opensearch.rest.RestRequest; -import org.opensearch.core.rest.RestStatus; import org.opensearch.sql.common.utils.QueryContext; import org.opensearch.sql.legacy.executor.format.ErrorMessageFactory; import org.opensearch.sql.legacy.metrics.Metrics; /** - * Currently this interface is for node level. - * Cluster level is coming up soon. https://github.com/opendistro-for-elasticsearch/sql/issues/41 + * Currently this interface is for node level. Cluster level is coming up soon. + * https://github.com/opendistro-for-elasticsearch/sql/issues/41 */ public class RestSqlStatsAction extends BaseRestHandler { - private static final Logger LOG = LogManager.getLogger(RestSqlStatsAction.class); - - /** - * API endpoint path - */ - public static final String STATS_API_ENDPOINT = "/_plugins/_sql/stats"; - public static final String LEGACY_STATS_API_ENDPOINT = "/_opendistro/_sql/stats"; - - public RestSqlStatsAction(Settings settings, RestController restController) { - super(); + private static final Logger LOG = LogManager.getLogger(RestSqlStatsAction.class); + + /** API endpoint path */ + public static final String STATS_API_ENDPOINT = "/_plugins/_sql/stats"; + + public static final String LEGACY_STATS_API_ENDPOINT = "/_opendistro/_sql/stats"; + + public RestSqlStatsAction(Settings settings, RestController restController) { + super(); + } + + @Override + public String getName() { + return "sql_stats_action"; + } + + @Override + public List routes() { + return ImmutableList.of(); + } + + @Override + public List replacedRoutes() { + return ImmutableList.of( + new ReplacedRoute( + RestRequest.Method.POST, STATS_API_ENDPOINT, + RestRequest.Method.POST, LEGACY_STATS_API_ENDPOINT), + new ReplacedRoute( + RestRequest.Method.GET, STATS_API_ENDPOINT, + RestRequest.Method.GET, LEGACY_STATS_API_ENDPOINT)); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { + + QueryContext.addRequestId(); + + try { + return channel -> + channel.sendResponse( + new BytesRestResponse(RestStatus.OK, Metrics.getInstance().collectToJSON())); + } catch (Exception e) { + LOG.error("Failed during Query SQL STATS Action.", e); + + return channel -> + channel.sendResponse( + new BytesRestResponse( + SERVICE_UNAVAILABLE, + ErrorMessageFactory.createErrorMessage(e, SERVICE_UNAVAILABLE.getStatus()) + .toString())); } - - @Override - public String getName() { - return "sql_stats_action"; - } - - @Override - public List routes() { - return ImmutableList.of(); - } - - @Override - public List replacedRoutes() { - return ImmutableList.of( - new ReplacedRoute( - RestRequest.Method.POST, STATS_API_ENDPOINT, - RestRequest.Method.POST, LEGACY_STATS_API_ENDPOINT), - new ReplacedRoute( - RestRequest.Method.GET, STATS_API_ENDPOINT, - RestRequest.Method.GET, LEGACY_STATS_API_ENDPOINT)); - } - - @Override - protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { - - QueryContext.addRequestId(); - - try { - return channel -> channel.sendResponse(new BytesRestResponse(RestStatus.OK, - Metrics.getInstance().collectToJSON())); - } catch (Exception e) { - LOG.error("Failed during Query SQL STATS Action.", e); - - return channel -> channel.sendResponse(new BytesRestResponse(SERVICE_UNAVAILABLE, - ErrorMessageFactory.createErrorMessage(e, SERVICE_UNAVAILABLE.getStatus()).toString())); - } - } - - @Override - protected Set responseParams() { - Set responseParams = new HashSet<>(super.responseParams()); - responseParams.addAll(Arrays.asList("sql", "flat", "separator", "_score", "_type", "_id", "newLine", "format", "sanitize")); - return responseParams; - } - + } + + @Override + protected Set responseParams() { + Set responseParams = new HashSet<>(super.responseParams()); + responseParams.addAll( + Arrays.asList( + "sql", "flat", "separator", "_score", "_type", "_id", "newLine", "format", "sanitize")); + return responseParams; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/plugin/SearchDao.java b/legacy/src/main/java/org/opensearch/sql/legacy/plugin/SearchDao.java index a18895723c..ea4e08281c 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/plugin/SearchDao.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/plugin/SearchDao.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.plugin; import java.sql.SQLFeatureNotSupportedException; @@ -16,39 +15,36 @@ import org.opensearch.sql.legacy.query.OpenSearchActionFactory; import org.opensearch.sql.legacy.query.QueryAction; - public class SearchDao { - private static final Set END_TABLE_MAP = new HashSet<>(); - - static { - END_TABLE_MAP.add("limit"); - END_TABLE_MAP.add("order"); - END_TABLE_MAP.add("where"); - END_TABLE_MAP.add("group"); - - } - - private Client client = null; - - public SearchDao(Client client) { - this.client = client; - } - - public Client getClient() { - return client; - } - - /** - * Prepare action And transform sql - * into OpenSearch ActionRequest - * - * @param queryActionRequest SQL query action request to execute. - * @return OpenSearch request - * @throws SqlParseException - */ - public QueryAction explain(QueryActionRequest queryActionRequest) - throws SqlParseException, SQLFeatureNotSupportedException, SQLFeatureDisabledException { - return OpenSearchActionFactory.create(client, queryActionRequest); - } + private static final Set END_TABLE_MAP = new HashSet<>(); + + static { + END_TABLE_MAP.add("limit"); + END_TABLE_MAP.add("order"); + END_TABLE_MAP.add("where"); + END_TABLE_MAP.add("group"); + } + + private Client client = null; + + public SearchDao(Client client) { + this.client = client; + } + + public Client getClient() { + return client; + } + + /** + * Prepare action And transform sql into OpenSearch ActionRequest + * + * @param queryActionRequest SQL query action request to execute. + * @return OpenSearch request + * @throws SqlParseException + */ + public QueryAction explain(QueryActionRequest queryActionRequest) + throws SqlParseException, SQLFeatureNotSupportedException, SQLFeatureDisabledException { + return OpenSearchActionFactory.create(client, queryActionRequest); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/AggregationQueryAction.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/AggregationQueryAction.java index 24194e8de5..57af269001 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/AggregationQueryAction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/AggregationQueryAction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query; import com.alibaba.druid.sql.ast.SQLExpr; @@ -38,457 +37,462 @@ import org.opensearch.sql.legacy.query.maker.AggMaker; import org.opensearch.sql.legacy.query.maker.QueryMaker; -/** - * Transform SQL query to OpenSearch aggregations query - */ +/** Transform SQL query to OpenSearch aggregations query */ public class AggregationQueryAction extends QueryAction { - private final Select select; - private AggMaker aggMaker = new AggMaker(); - private SearchRequestBuilder request; - - public AggregationQueryAction(Client client, Select select) { - super(client, select); - this.select = select; - } - - @Override - public SqlOpenSearchRequestBuilder explain() throws SqlParseException { - this.request = new SearchRequestBuilder(client, SearchAction.INSTANCE); - - if (select.getRowCount() == null) { - select.setRowCount(Select.DEFAULT_LIMIT); - } - - setIndicesAndTypes(); - - setWhere(select.getWhere()); - AggregationBuilder lastAgg = null; - - for (List groupBy : select.getGroupBys()) { - if (!groupBy.isEmpty()) { - Field field = groupBy.get(0); - - //make groupby can reference to field alias - lastAgg = getGroupAgg(field, select); - - if (lastAgg instanceof TermsAggregationBuilder) { - - // TODO: Consider removing that condition - // in theory we should be able to apply this for all types of fields, but - // this change requires too much of related integration tests (e.g. there are comparisons against - // raw javascript dsl, so I'd like to scope the changes as of now to one particular fix for - // scripted functions - - // the condition `field.getName().equals("script")` is to include the CAST cases, since the cast - // method is instance of MethodField with script. => corrects the shard size of CASTs - if (!(field instanceof MethodField) || field instanceof ScriptMethodField - || field.getName().equals("script")) { - //if limit size is too small, increasing shard size is required - if (select.getRowCount() < 200) { - ((TermsAggregationBuilder) lastAgg).shardSize(2000); - for (Hint hint : select.getHints()) { - if (hint.getType() == HintType.SHARD_SIZE) { - if (hint.getParams() != null && hint.getParams().length != 0 - && hint.getParams()[0] != null) { - ((TermsAggregationBuilder) lastAgg).shardSize((Integer) hint.getParams()[0]); - } - } - } - } - - if (select.getRowCount() > 0) { - ((TermsAggregationBuilder) lastAgg).size(select.getRowCount()); - } - } - } - - if (field.isNested()) { - AggregationBuilder nestedBuilder = createNestedAggregation(field); - - if (insertFilterIfExistsAfter(lastAgg, groupBy, nestedBuilder, 1)) { - groupBy.remove(1); - } else { - nestedBuilder.subAggregation(lastAgg); - } + private final Select select; + private AggMaker aggMaker = new AggMaker(); + private SearchRequestBuilder request; - request.addAggregation(wrapNestedIfNeeded(nestedBuilder, field.isReverseNested())); - } else if (field.isChildren()) { - AggregationBuilder childrenBuilder = createChildrenAggregation(field); + public AggregationQueryAction(Client client, Select select) { + super(client, select); + this.select = select; + } - if (insertFilterIfExistsAfter(lastAgg, groupBy, childrenBuilder, 1)) { - groupBy.remove(1); - } else { - childrenBuilder.subAggregation(lastAgg); - } + @Override + public SqlOpenSearchRequestBuilder explain() throws SqlParseException { + this.request = new SearchRequestBuilder(client, SearchAction.INSTANCE); - request.addAggregation(childrenBuilder); - } else { - request.addAggregation(lastAgg); - } + if (select.getRowCount() == null) { + select.setRowCount(Select.DEFAULT_LIMIT); + } - for (int i = 1; i < groupBy.size(); i++) { - field = groupBy.get(i); - AggregationBuilder subAgg = getGroupAgg(field, select); - //ES5.0 termsaggregation with size = 0 not supported anymore -// if (subAgg instanceof TermsAggregationBuilder && !(field instanceof MethodField)) { - -// //((TermsAggregationBuilder) subAgg).size(0); -// } - - if (field.isNested()) { - AggregationBuilder nestedBuilder = createNestedAggregation(field); - - if (insertFilterIfExistsAfter(subAgg, groupBy, nestedBuilder, i + 1)) { - groupBy.remove(i + 1); - i++; - } else { - nestedBuilder.subAggregation(subAgg); - } - - lastAgg.subAggregation(wrapNestedIfNeeded(nestedBuilder, field.isReverseNested())); - } else if (field.isChildren()) { - AggregationBuilder childrenBuilder = createChildrenAggregation(field); - - if (insertFilterIfExistsAfter(subAgg, groupBy, childrenBuilder, i + 1)) { - groupBy.remove(i + 1); - i++; - } else { - childrenBuilder.subAggregation(subAgg); - } - - lastAgg.subAggregation(childrenBuilder); - } else { - lastAgg.subAggregation(subAgg); - } - - lastAgg = subAgg; + setIndicesAndTypes(); + + setWhere(select.getWhere()); + AggregationBuilder lastAgg = null; + + for (List groupBy : select.getGroupBys()) { + if (!groupBy.isEmpty()) { + Field field = groupBy.get(0); + + // make groupby can reference to field alias + lastAgg = getGroupAgg(field, select); + + if (lastAgg instanceof TermsAggregationBuilder) { + + // TODO: Consider removing that condition + // in theory we should be able to apply this for all types of fields, but + // this change requires too much of related integration tests (e.g. there are comparisons + // against + // raw javascript dsl, so I'd like to scope the changes as of now to one particular fix + // for + // scripted functions + + // the condition `field.getName().equals("script")` is to include the CAST cases, since + // the cast + // method is instance of MethodField with script. => corrects the shard size of CASTs + if (!(field instanceof MethodField) + || field instanceof ScriptMethodField + || field.getName().equals("script")) { + // if limit size is too small, increasing shard size is required + if (select.getRowCount() < 200) { + ((TermsAggregationBuilder) lastAgg).shardSize(2000); + for (Hint hint : select.getHints()) { + if (hint.getType() == HintType.SHARD_SIZE) { + if (hint.getParams() != null + && hint.getParams().length != 0 + && hint.getParams()[0] != null) { + ((TermsAggregationBuilder) lastAgg).shardSize((Integer) hint.getParams()[0]); + } } + } } - // explain the field from SELECT and HAVING clause - List combinedList = new ArrayList<>(); - combinedList.addAll(select.getFields()); - if (select.getHaving() != null) { - combinedList.addAll(select.getHaving().getHavingFields()); + if (select.getRowCount() > 0) { + ((TermsAggregationBuilder) lastAgg).size(select.getRowCount()); } - // add aggregation function to each groupBy - explanFields(request, combinedList, lastAgg); - - explainHaving(lastAgg); + } } - if (select.getGroupBys().size() < 1) { - //add aggregation when having no groupBy script - explanFields(request, select.getFields(), lastAgg); + if (field.isNested()) { + AggregationBuilder nestedBuilder = createNestedAggregation(field); - } + if (insertFilterIfExistsAfter(lastAgg, groupBy, nestedBuilder, 1)) { + groupBy.remove(1); + } else { + nestedBuilder.subAggregation(lastAgg); + } - Map groupMap = aggMaker.getGroupMap(); - // add field - if (select.getFields().size() > 0) { - setFields(select.getFields()); -// explanFields(request, select.getFields(), lastAgg); - } + request.addAggregation(wrapNestedIfNeeded(nestedBuilder, field.isReverseNested())); + } else if (field.isChildren()) { + AggregationBuilder childrenBuilder = createChildrenAggregation(field); - // add order - if (lastAgg != null && select.getOrderBys().size() > 0) { - for (Order order : select.getOrderBys()) { - - // check "standard" fields - KVValue temp = groupMap.get(order.getName()); - if (temp != null) { - TermsAggregationBuilder termsBuilder = (TermsAggregationBuilder) temp.value; - switch (temp.key) { - case "COUNT": - termsBuilder.order(BucketOrder.count(isASC(order))); - break; - case "KEY": - termsBuilder.order(BucketOrder.key(isASC(order))); - break; - case "FIELD": - termsBuilder.order(BucketOrder.aggregation(order.getName(), isASC(order))); - break; - default: - throw new SqlParseException(order.getName() + " can not to order"); - } - } else if (order.isScript()) { - // Do not add scripted fields into sort, they must be sorted inside of aggregation - } else { - // TODO: Is there a legit case when we want to add field into sort for aggregation queries? - request.addSort(order.getName(), SortOrder.valueOf(order.getType())); - } - } + if (insertFilterIfExistsAfter(lastAgg, groupBy, childrenBuilder, 1)) { + groupBy.remove(1); + } else { + childrenBuilder.subAggregation(lastAgg); + } + + request.addAggregation(childrenBuilder); + } else { + request.addAggregation(lastAgg); } - setLimitFromHint(this.select.getHints()); + for (int i = 1; i < groupBy.size(); i++) { + field = groupBy.get(i); + AggregationBuilder subAgg = getGroupAgg(field, select); + // ES5.0 termsaggregation with size = 0 not supported anymore + // if (subAgg instanceof TermsAggregationBuilder && !(field instanceof + // MethodField)) { - request.setSearchType(SearchType.DEFAULT); - updateRequestWithIndexAndRoutingOptions(select, request); - updateRequestWithHighlight(select, request); - updateRequestWithCollapse(select, request); - updateRequestWithPostFilter(select, request); - return new SqlOpenSearchRequestBuilder(request); - } + // //((TermsAggregationBuilder) subAgg).size(0); + // } - private AggregationBuilder getGroupAgg(Field groupByField, Select select) throws SqlParseException { - AggregationBuilder lastAgg = null; - Field shadowField = null; - - for (Field selectField : select.getFields()) { - if (selectField instanceof MethodField && selectField.getName().equals("script")) { - MethodField scriptField = (MethodField) selectField; - for (KVValue kv : scriptField.getParams()) { - if (kv.value.equals(groupByField.getName())) { - shadowField = scriptField; - break; - } - } - } - } + if (field.isNested()) { + AggregationBuilder nestedBuilder = createNestedAggregation(field); - if (shadowField == null) { - for (Field selectField: select.getFields()) { - if (selectField.getAlias() != null - && (groupByField.getName().equals(selectField.getAlias()) - || groupByField.getExpression().equals(selectField.getExpression()))) { - shadowField = selectField; - } + if (insertFilterIfExistsAfter(subAgg, groupBy, nestedBuilder, i + 1)) { + groupBy.remove(i + 1); + i++; + } else { + nestedBuilder.subAggregation(subAgg); } - } + lastAgg.subAggregation(wrapNestedIfNeeded(nestedBuilder, field.isReverseNested())); + } else if (field.isChildren()) { + AggregationBuilder childrenBuilder = createChildrenAggregation(field); - if (null != shadowField) { - groupByField.setAlias(shadowField.getAlias()); - groupByField = shadowField; - } + if (insertFilterIfExistsAfter(subAgg, groupBy, childrenBuilder, i + 1)) { + groupBy.remove(i + 1); + i++; + } else { + childrenBuilder.subAggregation(subAgg); + } - lastAgg = aggMaker.makeGroupAgg(groupByField); + lastAgg.subAggregation(childrenBuilder); + } else { + lastAgg.subAggregation(subAgg); + } - // find if we have order for that aggregation. As of now only special case for script fields - if (groupByField.isScriptField()) { - addOrderByScriptFieldIfPresent(select, (TermsAggregationBuilder) lastAgg, groupByField.getExpression()); + lastAgg = subAgg; } + } + + // explain the field from SELECT and HAVING clause + List combinedList = new ArrayList<>(); + combinedList.addAll(select.getFields()); + if (select.getHaving() != null) { + combinedList.addAll(select.getHaving().getHavingFields()); + } + // add aggregation function to each groupBy + explanFields(request, combinedList, lastAgg); + + explainHaving(lastAgg); + } - return lastAgg; + if (select.getGroupBys().size() < 1) { + // add aggregation when having no groupBy script + explanFields(request, select.getFields(), lastAgg); } - private void addOrderByScriptFieldIfPresent(Select select, TermsAggregationBuilder groupByAggregation, - SQLExpr groupByExpression) { - // TODO: Explore other ways to correlate different fields/functions in the query (params?) - // This feels like a hacky way, but it's the best that could be done now. - select - .getOrderBys() - .stream() - .filter(order -> groupByExpression.equals(order.getSortField().getExpression())) - .findFirst() - .ifPresent(orderForGroupBy -> groupByAggregation.order(BucketOrder.key(isASC(orderForGroupBy)))); + Map groupMap = aggMaker.getGroupMap(); + // add field + if (select.getFields().size() > 0) { + setFields(select.getFields()); + // explanFields(request, select.getFields(), lastAgg); } - private AggregationBuilder wrapNestedIfNeeded(AggregationBuilder nestedBuilder, boolean reverseNested) { - if (!reverseNested) { - return nestedBuilder; + // add order + if (lastAgg != null && select.getOrderBys().size() > 0) { + for (Order order : select.getOrderBys()) { + + // check "standard" fields + KVValue temp = groupMap.get(order.getName()); + if (temp != null) { + TermsAggregationBuilder termsBuilder = (TermsAggregationBuilder) temp.value; + switch (temp.key) { + case "COUNT": + termsBuilder.order(BucketOrder.count(isASC(order))); + break; + case "KEY": + termsBuilder.order(BucketOrder.key(isASC(order))); + break; + case "FIELD": + termsBuilder.order(BucketOrder.aggregation(order.getName(), isASC(order))); + break; + default: + throw new SqlParseException(order.getName() + " can not to order"); + } + } else if (order.isScript()) { + // Do not add scripted fields into sort, they must be sorted inside of aggregation + } else { + // TODO: Is there a legit case when we want to add field into sort for aggregation + // queries? + request.addSort(order.getName(), SortOrder.valueOf(order.getType())); } - if (reverseNested && !(nestedBuilder instanceof NestedAggregationBuilder)) { - return nestedBuilder; + } + } + + setLimitFromHint(this.select.getHints()); + + request.setSearchType(SearchType.DEFAULT); + updateRequestWithIndexAndRoutingOptions(select, request); + updateRequestWithHighlight(select, request); + updateRequestWithCollapse(select, request); + updateRequestWithPostFilter(select, request); + return new SqlOpenSearchRequestBuilder(request); + } + + private AggregationBuilder getGroupAgg(Field groupByField, Select select) + throws SqlParseException { + AggregationBuilder lastAgg = null; + Field shadowField = null; + + for (Field selectField : select.getFields()) { + if (selectField instanceof MethodField && selectField.getName().equals("script")) { + MethodField scriptField = (MethodField) selectField; + for (KVValue kv : scriptField.getParams()) { + if (kv.value.equals(groupByField.getName())) { + shadowField = scriptField; + break; + } } - //we need to jump back to root - return AggregationBuilders.reverseNested(nestedBuilder.getName() + "_REVERSED").subAggregation(nestedBuilder); + } } - private AggregationBuilder createNestedAggregation(Field field) { - AggregationBuilder nestedBuilder; + if (shadowField == null) { + for (Field selectField : select.getFields()) { + if (selectField.getAlias() != null + && (groupByField.getName().equals(selectField.getAlias()) + || groupByField.getExpression().equals(selectField.getExpression()))) { + shadowField = selectField; + } + } + } - String nestedPath = field.getNestedPath(); + if (null != shadowField) { + groupByField.setAlias(shadowField.getAlias()); + groupByField = shadowField; + } - if (field.isReverseNested()) { - if (nestedPath == null || !nestedPath.startsWith("~")) { - ReverseNestedAggregationBuilder reverseNestedAggregationBuilder = - AggregationBuilders.reverseNested(getNestedAggName(field)); - if (nestedPath != null) { - reverseNestedAggregationBuilder.path(nestedPath); - } - return reverseNestedAggregationBuilder; - } - nestedPath = nestedPath.substring(1); - } + lastAgg = aggMaker.makeGroupAgg(groupByField); - nestedBuilder = AggregationBuilders.nested(getNestedAggName(field), nestedPath); + // find if we have order for that aggregation. As of now only special case for script fields + if (groupByField.isScriptField()) { + addOrderByScriptFieldIfPresent( + select, (TermsAggregationBuilder) lastAgg, groupByField.getExpression()); + } - return nestedBuilder; + return lastAgg; + } + + private void addOrderByScriptFieldIfPresent( + Select select, TermsAggregationBuilder groupByAggregation, SQLExpr groupByExpression) { + // TODO: Explore other ways to correlate different fields/functions in the query (params?) + // This feels like a hacky way, but it's the best that could be done now. + select.getOrderBys().stream() + .filter(order -> groupByExpression.equals(order.getSortField().getExpression())) + .findFirst() + .ifPresent( + orderForGroupBy -> groupByAggregation.order(BucketOrder.key(isASC(orderForGroupBy)))); + } + + private AggregationBuilder wrapNestedIfNeeded( + AggregationBuilder nestedBuilder, boolean reverseNested) { + if (!reverseNested) { + return nestedBuilder; + } + if (reverseNested && !(nestedBuilder instanceof NestedAggregationBuilder)) { + return nestedBuilder; + } + // we need to jump back to root + return AggregationBuilders.reverseNested(nestedBuilder.getName() + "_REVERSED") + .subAggregation(nestedBuilder); + } + + private AggregationBuilder createNestedAggregation(Field field) { + AggregationBuilder nestedBuilder; + + String nestedPath = field.getNestedPath(); + + if (field.isReverseNested()) { + if (nestedPath == null || !nestedPath.startsWith("~")) { + ReverseNestedAggregationBuilder reverseNestedAggregationBuilder = + AggregationBuilders.reverseNested(getNestedAggName(field)); + if (nestedPath != null) { + reverseNestedAggregationBuilder.path(nestedPath); + } + return reverseNestedAggregationBuilder; + } + nestedPath = nestedPath.substring(1); } - private AggregationBuilder createChildrenAggregation(Field field) { - AggregationBuilder childrenBuilder; + nestedBuilder = AggregationBuilders.nested(getNestedAggName(field), nestedPath); - String childType = field.getChildType(); + return nestedBuilder; + } - childrenBuilder = JoinAggregationBuilders.children(getChildrenAggName(field), childType); + private AggregationBuilder createChildrenAggregation(Field field) { + AggregationBuilder childrenBuilder; - return childrenBuilder; - } + String childType = field.getChildType(); - private String getNestedAggName(Field field) { - String prefix; + childrenBuilder = JoinAggregationBuilders.children(getChildrenAggName(field), childType); - if (field instanceof MethodField) { - String nestedPath = field.getNestedPath(); - if (nestedPath != null) { - prefix = nestedPath; - } else { - prefix = field.getAlias(); - } - } else { - prefix = field.getName(); - } - return prefix + "@NESTED"; - } + return childrenBuilder; + } - private String getChildrenAggName(Field field) { - String prefix; + private String getNestedAggName(Field field) { + String prefix; - if (field instanceof MethodField) { - String childType = field.getChildType(); + if (field instanceof MethodField) { + String nestedPath = field.getNestedPath(); + if (nestedPath != null) { + prefix = nestedPath; + } else { + prefix = field.getAlias(); + } + } else { + prefix = field.getName(); + } + return prefix + "@NESTED"; + } + + private String getChildrenAggName(Field field) { + String prefix; + + if (field instanceof MethodField) { + String childType = field.getChildType(); + + if (childType != null) { + prefix = childType; + } else { + prefix = field.getAlias(); + } + } else { + prefix = field.getName(); + } - if (childType != null) { - prefix = childType; - } else { - prefix = field.getAlias(); - } - } else { - prefix = field.getName(); - } + return prefix + "@CHILDREN"; + } - return prefix + "@CHILDREN"; + private boolean insertFilterIfExistsAfter( + AggregationBuilder agg, List groupBy, AggregationBuilder builder, int nextPosition) + throws SqlParseException { + if (groupBy.size() <= nextPosition) { + return false; } - - private boolean insertFilterIfExistsAfter(AggregationBuilder agg, List groupBy, AggregationBuilder builder, - int nextPosition) throws SqlParseException { - if (groupBy.size() <= nextPosition) { - return false; - } - Field filterFieldCandidate = groupBy.get(nextPosition); - if (!(filterFieldCandidate instanceof MethodField)) { - return false; - } - MethodField methodField = (MethodField) filterFieldCandidate; - if (!methodField.getName().toLowerCase().equals("filter")) { - return false; - } - builder.subAggregation(aggMaker.makeGroupAgg(filterFieldCandidate).subAggregation(agg)); - return true; + Field filterFieldCandidate = groupBy.get(nextPosition); + if (!(filterFieldCandidate instanceof MethodField)) { + return false; } - - private AggregationBuilder updateAggIfNested(AggregationBuilder lastAgg, Field field) { - if (field.isNested()) { - lastAgg = AggregationBuilders.nested(field.getName() + "Nested", field.getNestedPath()) - .subAggregation(lastAgg); - } - return lastAgg; + MethodField methodField = (MethodField) filterFieldCandidate; + if (!methodField.getName().toLowerCase().equals("filter")) { + return false; } - - private boolean isASC(Order order) { - return "ASC".equals(order.getType()); + builder.subAggregation(aggMaker.makeGroupAgg(filterFieldCandidate).subAggregation(agg)); + return true; + } + + private AggregationBuilder updateAggIfNested(AggregationBuilder lastAgg, Field field) { + if (field.isNested()) { + lastAgg = + AggregationBuilders.nested(field.getName() + "Nested", field.getNestedPath()) + .subAggregation(lastAgg); } + return lastAgg; + } - private void setFields(List fields) { - if (select.getFields().size() > 0) { - ArrayList includeFields = new ArrayList<>(); + private boolean isASC(Order order) { + return "ASC".equals(order.getType()); + } - for (Field field : fields) { - if (field != null) { - includeFields.add(field.getName()); - } - } + private void setFields(List fields) { + if (select.getFields().size() > 0) { + ArrayList includeFields = new ArrayList<>(); - request.setFetchSource(includeFields.toArray(new String[0]), null); + for (Field field : fields) { + if (field != null) { + includeFields.add(field.getName()); } - } + } - private void explanFields(SearchRequestBuilder request, List fields, AggregationBuilder groupByAgg) - throws SqlParseException { - for (Field field : fields) { - if (field instanceof MethodField) { - - if (field.getName().equals("script")) { - request.addStoredField(field.getAlias()); - DefaultQueryAction defaultQueryAction = new DefaultQueryAction(client, select); - defaultQueryAction.initialize(request); - List tempFields = Lists.newArrayList(field); - defaultQueryAction.setFields(tempFields); - continue; - } - - AggregationBuilder makeAgg = aggMaker - .withWhere(select.getWhere()) - .makeFieldAgg((MethodField) field, groupByAgg); - if (groupByAgg != null) { - groupByAgg.subAggregation(makeAgg); - } else { - request.addAggregation(makeAgg); - } - } else if (field != null) { - request.addStoredField(field.getName()); - } else { - throw new SqlParseException("it did not support this field method " + field); - } - } + request.setFetchSource(includeFields.toArray(new String[0]), null); } - - private void explainHaving(AggregationBuilder lastAgg) throws SqlParseException { - Having having = select.getHaving(); - if (having != null) { - having.explain(lastAgg, select.getFields()); + } + + private void explanFields( + SearchRequestBuilder request, List fields, AggregationBuilder groupByAgg) + throws SqlParseException { + for (Field field : fields) { + if (field instanceof MethodField) { + + if (field.getName().equals("script")) { + request.addStoredField(field.getAlias()); + DefaultQueryAction defaultQueryAction = new DefaultQueryAction(client, select); + defaultQueryAction.initialize(request); + List tempFields = Lists.newArrayList(field); + defaultQueryAction.setFields(tempFields); + continue; } - } - /** - * Create filters based on - * the Where clause. - * - * @param where the 'WHERE' part of the SQL query. - * @throws SqlParseException - */ - private void setWhere(Where where) throws SqlParseException { - BoolQueryBuilder boolQuery = null; - if (where != null) { - boolQuery = QueryMaker.explain(where, this.select.isQuery); - } - // Used to prevent NullPointerException in old tests as they do not set sqlRequest in QueryAction - if (sqlRequest != null) { - boolQuery = sqlRequest.checkAndAddFilter(boolQuery); + AggregationBuilder makeAgg = + aggMaker.withWhere(select.getWhere()).makeFieldAgg((MethodField) field, groupByAgg); + if (groupByAgg != null) { + groupByAgg.subAggregation(makeAgg); + } else { + request.addAggregation(makeAgg); } - request.setQuery(boolQuery); + } else if (field != null) { + request.addStoredField(field.getName()); + } else { + throw new SqlParseException("it did not support this field method " + field); + } } + } - - /** - * Set indices and types to the search request. - */ - private void setIndicesAndTypes() { - request.setIndices(query.getIndexArr()); + private void explainHaving(AggregationBuilder lastAgg) throws SqlParseException { + Having having = select.getHaving(); + if (having != null) { + having.explain(lastAgg, select.getFields()); } - - private void setLimitFromHint(List hints) { - int from = 0; - int size = 0; - for (Hint hint : hints) { - if (hint.getType() == HintType.DOCS_WITH_AGGREGATION) { - Integer[] params = (Integer[]) hint.getParams(); - if (params.length > 1) { - // if 2 or more are given, use the first as the from and the second as the size - // so it is the same as LIMIT from,size - // except written as /*! DOCS_WITH_AGGREGATION(from,size) */ - from = params[0]; - size = params[1]; - } else if (params.length == 1) { - // if only 1 parameter is given, use it as the size with a from of 0 - size = params[0]; - } - break; - } + } + + /** + * Create filters based on the Where clause. + * + * @param where the 'WHERE' part of the SQL query. + * @throws SqlParseException + */ + private void setWhere(Where where) throws SqlParseException { + BoolQueryBuilder boolQuery = null; + if (where != null) { + boolQuery = QueryMaker.explain(where, this.select.isQuery); + } + // Used to prevent NullPointerException in old tests as they do not set sqlRequest in + // QueryAction + if (sqlRequest != null) { + boolQuery = sqlRequest.checkAndAddFilter(boolQuery); + } + request.setQuery(boolQuery); + } + + /** Set indices and types to the search request. */ + private void setIndicesAndTypes() { + request.setIndices(query.getIndexArr()); + } + + private void setLimitFromHint(List hints) { + int from = 0; + int size = 0; + for (Hint hint : hints) { + if (hint.getType() == HintType.DOCS_WITH_AGGREGATION) { + Integer[] params = (Integer[]) hint.getParams(); + if (params.length > 1) { + // if 2 or more are given, use the first as the from and the second as the size + // so it is the same as LIMIT from,size + // except written as /*! DOCS_WITH_AGGREGATION(from,size) */ + from = params[0]; + size = params[1]; + } else if (params.length == 1) { + // if only 1 parameter is given, use it as the size with a from of 0 + size = params[0]; } - request.setFrom(from); - request.setSize(size); + break; + } } + request.setFrom(from); + request.setSize(size); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/DefaultQueryAction.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/DefaultQueryAction.java index 0ed5043ac8..18c9708df8 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/DefaultQueryAction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/DefaultQueryAction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query; import com.alibaba.druid.sql.ast.SQLExpr; @@ -50,264 +49,268 @@ import org.opensearch.sql.legacy.rewriter.nestedfield.NestedFieldProjection; import org.opensearch.sql.legacy.utils.SQLFunctions; -/** - * Transform SQL query to standard OpenSearch search query - */ +/** Transform SQL query to standard OpenSearch search query */ public class DefaultQueryAction extends QueryAction { - private final Select select; - private SearchRequestBuilder request; - - private final List fieldNames = new LinkedList<>(); - - public DefaultQueryAction(Client client, Select select) { - super(client, select); - this.select = select; - } - - public void initialize(SearchRequestBuilder request) { - this.request = request; + private final Select select; + private SearchRequestBuilder request; + + private final List fieldNames = new LinkedList<>(); + + public DefaultQueryAction(Client client, Select select) { + super(client, select); + this.select = select; + } + + public void initialize(SearchRequestBuilder request) { + this.request = request; + } + + @Override + public SqlOpenSearchRequestBuilder explain() throws SqlParseException { + Objects.requireNonNull(this.sqlRequest, "SqlRequest is required for OpenSearch request build"); + buildRequest(); + checkAndSetScroll(); + return new SqlOpenSearchRequestBuilder(request); + } + + private void buildRequest() throws SqlParseException { + this.request = new SearchRequestBuilder(client, SearchAction.INSTANCE); + setIndicesAndTypes(); + setFields(select.getFields()); + setWhere(select.getWhere()); + setSorts(select.getOrderBys()); + updateRequestWithIndexAndRoutingOptions(select, request); + updateRequestWithHighlight(select, request); + updateRequestWithCollapse(select, request); + updateRequestWithPostFilter(select, request); + updateRequestWithInnerHits(select, request); + } + + @VisibleForTesting + public void checkAndSetScroll() { + LocalClusterState clusterState = LocalClusterState.state(); + + Integer fetchSize = sqlRequest.fetchSize(); + TimeValue timeValue = clusterState.getSettingValue(Settings.Key.SQL_CURSOR_KEEP_ALIVE); + Integer rowCount = select.getRowCount(); + + if (checkIfScrollNeeded(fetchSize, rowCount)) { + Metrics.getInstance() + .getNumericalMetric(MetricName.DEFAULT_CURSOR_REQUEST_COUNT_TOTAL) + .increment(); + Metrics.getInstance().getNumericalMetric(MetricName.DEFAULT_CURSOR_REQUEST_TOTAL).increment(); + request.setSize(fetchSize).setScroll(timeValue); + } else { + request.setSearchType(SearchType.DFS_QUERY_THEN_FETCH); + setLimit(select.getOffset(), rowCount != null ? rowCount : Select.DEFAULT_LIMIT); } - - @Override - public SqlOpenSearchRequestBuilder explain() throws SqlParseException { - Objects.requireNonNull(this.sqlRequest, "SqlRequest is required for OpenSearch request build"); - buildRequest(); - checkAndSetScroll(); - return new SqlOpenSearchRequestBuilder(request); - } - - private void buildRequest() throws SqlParseException { - this.request = new SearchRequestBuilder(client, SearchAction.INSTANCE); - setIndicesAndTypes(); - setFields(select.getFields()); - setWhere(select.getWhere()); - setSorts(select.getOrderBys()); - updateRequestWithIndexAndRoutingOptions(select, request); - updateRequestWithHighlight(select, request); - updateRequestWithCollapse(select, request); - updateRequestWithPostFilter(select, request); - updateRequestWithInnerHits(select, request); - } - - @VisibleForTesting - public void checkAndSetScroll() { - LocalClusterState clusterState = LocalClusterState.state(); - - Integer fetchSize = sqlRequest.fetchSize(); - TimeValue timeValue = clusterState.getSettingValue(Settings.Key.SQL_CURSOR_KEEP_ALIVE); - Integer rowCount = select.getRowCount(); - - if (checkIfScrollNeeded(fetchSize, rowCount)) { - Metrics.getInstance().getNumericalMetric(MetricName.DEFAULT_CURSOR_REQUEST_COUNT_TOTAL).increment(); - Metrics.getInstance().getNumericalMetric(MetricName.DEFAULT_CURSOR_REQUEST_TOTAL).increment(); - request.setSize(fetchSize).setScroll(timeValue); - } else { - request.setSearchType(SearchType.DFS_QUERY_THEN_FETCH); - setLimit(select.getOffset(), rowCount != null ? rowCount : Select.DEFAULT_LIMIT); + } + + private boolean checkIfScrollNeeded(Integer fetchSize, Integer rowCount) { + return (format != null && format.equals(Format.JDBC)) + && fetchSize > 0 + && (rowCount == null || (rowCount > fetchSize)); + } + + @Override + public Optional> getFieldNames() { + return Optional.of(fieldNames); + } + + public Select getSelect() { + return select; + } + + /** Set indices and types to the search request. */ + private void setIndicesAndTypes() { + request.setIndices(query.getIndexArr()); + } + + /** + * Set source filtering on a search request. + * + * @param fields list of fields to source filter. + */ + public void setFields(List fields) throws SqlParseException { + + if (!select.getFields().isEmpty() && !select.isSelectAll()) { + ArrayList includeFields = new ArrayList<>(); + ArrayList excludeFields = new ArrayList<>(); + + for (Field field : fields) { + if (field instanceof MethodField) { + MethodField method = (MethodField) field; + if (method.getName().toLowerCase().equals("script")) { + handleScriptField(method); + if (method.getExpression() instanceof SQLCastExpr) { + includeFields.add(method.getParams().get(0).toString()); + } + } else if (method.getName().equalsIgnoreCase("include")) { + for (KVValue kvValue : method.getParams()) { + includeFields.add(kvValue.value.toString()); + } + } else if (method.getName().equalsIgnoreCase("exclude")) { + for (KVValue kvValue : method.getParams()) { + excludeFields.add(kvValue.value.toString()); + } + } + } else if (field != null) { + if (isNotNested(field)) { + includeFields.add(field.getName()); + } } - } - - - private boolean checkIfScrollNeeded(Integer fetchSize, Integer rowCount) { - return (format != null && format.equals(Format.JDBC)) - && fetchSize > 0 - && (rowCount == null || (rowCount > fetchSize)); - } + } - @Override - public Optional> getFieldNames() { - return Optional.of(fieldNames); + fieldNames.addAll(includeFields); + request.setFetchSource( + includeFields.toArray(new String[0]), excludeFields.toArray(new String[0])); } + } + private void handleScriptField(final MethodField method) throws SqlParseException { - public Select getSelect() { - return select; - } - - /** - * Set indices and types to the search request. - */ - private void setIndicesAndTypes() { - request.setIndices(query.getIndexArr()); - } - - /** - * Set source filtering on a search request. - * - * @param fields list of fields to source filter. - */ - public void setFields(List fields) throws SqlParseException { - - if (!select.getFields().isEmpty() && !select.isSelectAll()) { - ArrayList includeFields = new ArrayList<>(); - ArrayList excludeFields = new ArrayList<>(); - - for (Field field : fields) { - if (field instanceof MethodField) { - MethodField method = (MethodField) field; - if (method.getName().toLowerCase().equals("script")) { - handleScriptField(method); - if (method.getExpression() instanceof SQLCastExpr) { - includeFields.add(method.getParams().get(0).toString()); - } - } else if (method.getName().equalsIgnoreCase("include")) { - for (KVValue kvValue : method.getParams()) { - includeFields.add(kvValue.value.toString()); - } - } else if (method.getName().equalsIgnoreCase("exclude")) { - for (KVValue kvValue : method.getParams()) { - excludeFields.add(kvValue.value.toString()); - } - } - } else if (field != null) { - if (isNotNested(field)) { - includeFields.add(field.getName()); - } - } - } + final List params = method.getParams(); + final int numOfParams = params.size(); - fieldNames.addAll(includeFields); - request.setFetchSource(includeFields.toArray(new String[0]), excludeFields.toArray(new String[0])); - } + if (2 != numOfParams && 3 != numOfParams) { + throw new SqlParseException( + "scripted_field only allows 'script(name,script)' " + "or 'script(name,lang,script)'"); } - private void handleScriptField(final MethodField method) throws SqlParseException { - - final List params = method.getParams(); - final int numOfParams = params.size(); - - if (2 != numOfParams && 3 != numOfParams) { - throw new SqlParseException("scripted_field only allows 'script(name,script)' " - + "or 'script(name,lang,script)'"); - } - - final String fieldName = params.get(0).value.toString(); - fieldNames.add(fieldName); - - final String secondParam = params.get(1).value.toString(); - final Script script = (2 == numOfParams) ? new Script(secondParam) : - new Script(ScriptType.INLINE, secondParam, params.get(2).value.toString(), Collections.emptyMap()); - request.addScriptField(fieldName, script); + final String fieldName = params.get(0).value.toString(); + fieldNames.add(fieldName); + + final String secondParam = params.get(1).value.toString(); + final Script script = + (2 == numOfParams) + ? new Script(secondParam) + : new Script( + ScriptType.INLINE, + secondParam, + params.get(2).value.toString(), + Collections.emptyMap()); + request.addScriptField(fieldName, script); + } + + /** + * Create filters or queries based on the Where clause. + * + * @param where the 'WHERE' part of the SQL query. + * @throws SqlParseException if the where clause does not represent valid sql + */ + private void setWhere(Where where) throws SqlParseException { + BoolQueryBuilder boolQuery = null; + if (where != null) { + boolQuery = QueryMaker.explain(where, this.select.isQuery); } - - /** - * Create filters or queries based on the Where clause. - * - * @param where the 'WHERE' part of the SQL query. - * @throws SqlParseException if the where clause does not represent valid sql - */ - private void setWhere(Where where) throws SqlParseException { - BoolQueryBuilder boolQuery = null; - if (where != null) { - boolQuery = QueryMaker.explain(where, this.select.isQuery); - } - // Used to prevent NullPointerException in old tests as they do not set sqlRequest in QueryAction - if (sqlRequest != null) { - boolQuery = sqlRequest.checkAndAddFilter(boolQuery); - } - request.setQuery(boolQuery); + // Used to prevent NullPointerException in old tests as they do not set sqlRequest in + // QueryAction + if (sqlRequest != null) { + boolQuery = sqlRequest.checkAndAddFilter(boolQuery); } - - /** - * Add sorts to the OpenSearch query based on the 'ORDER BY' clause. - * - * @param orderBys list of Order object - */ - private void setSorts(List orderBys) { - Map sortBuilderMap = new HashMap<>(); - - for (Order order : orderBys) { - String orderByName = order.getName(); - SortOrder sortOrder = SortOrder.valueOf(order.getType()); - - if (order.getNestedPath() != null) { - request.addSort( - SortBuilders.fieldSort(orderByName) - .order(sortOrder) - .setNestedSort(new NestedSortBuilder(order.getNestedPath()))); - } else if (order.isScript()) { - // TODO: Investigate how to find the type of expression (string or number) - // As of now this shouldn't be a problem, because the support is for date_format function - request.addSort( - SortBuilders - .scriptSort(new Script(orderByName), getScriptSortType(order)) - .order(sortOrder)); - } else if (orderByName.equals(ScoreSortBuilder.NAME)) { - request.addSort(orderByName, sortOrder); - } else { - FieldSortBuilder fieldSortBuilder = sortBuilderMap.computeIfAbsent(orderByName, key -> { - FieldSortBuilder fs = SortBuilders.fieldSort(key); - request.addSort(fs); - return fs; + request.setQuery(boolQuery); + } + + /** + * Add sorts to the OpenSearch query based on the 'ORDER BY' clause. + * + * @param orderBys list of Order object + */ + private void setSorts(List orderBys) { + Map sortBuilderMap = new HashMap<>(); + + for (Order order : orderBys) { + String orderByName = order.getName(); + SortOrder sortOrder = SortOrder.valueOf(order.getType()); + + if (order.getNestedPath() != null) { + request.addSort( + SortBuilders.fieldSort(orderByName) + .order(sortOrder) + .setNestedSort(new NestedSortBuilder(order.getNestedPath()))); + } else if (order.isScript()) { + // TODO: Investigate how to find the type of expression (string or number) + // As of now this shouldn't be a problem, because the support is for date_format function + request.addSort( + SortBuilders.scriptSort(new Script(orderByName), getScriptSortType(order)) + .order(sortOrder)); + } else if (orderByName.equals(ScoreSortBuilder.NAME)) { + request.addSort(orderByName, sortOrder); + } else { + FieldSortBuilder fieldSortBuilder = + sortBuilderMap.computeIfAbsent( + orderByName, + key -> { + FieldSortBuilder fs = SortBuilders.fieldSort(key); + request.addSort(fs); + return fs; }); - setSortParams(fieldSortBuilder, order); - } - } + setSortParams(fieldSortBuilder, order); + } } + } + private void setSortParams(FieldSortBuilder fieldSortBuilder, Order order) { + fieldSortBuilder.order(SortOrder.valueOf(order.getType())); - private void setSortParams(FieldSortBuilder fieldSortBuilder, Order order) { - fieldSortBuilder.order(SortOrder.valueOf(order.getType())); - - SQLExpr expr = order.getSortField().getExpression(); - if (expr instanceof SQLBinaryOpExpr) { - // we set SQLBinaryOpExpr in Field.setExpression() to support ORDER by IS NULL/IS NOT NULL - fieldSortBuilder.missing(getNullOrderString((SQLBinaryOpExpr) expr)); - } + SQLExpr expr = order.getSortField().getExpression(); + if (expr instanceof SQLBinaryOpExpr) { + // we set SQLBinaryOpExpr in Field.setExpression() to support ORDER by IS NULL/IS NOT NULL + fieldSortBuilder.missing(getNullOrderString((SQLBinaryOpExpr) expr)); } - - private String getNullOrderString(SQLBinaryOpExpr expr) { - SQLBinaryOperator operator = expr.getOperator(); - return operator == SQLBinaryOperator.IsNot ? "_first" : "_last"; + } + + private String getNullOrderString(SQLBinaryOpExpr expr) { + SQLBinaryOperator operator = expr.getOperator(); + return operator == SQLBinaryOperator.IsNot ? "_first" : "_last"; + } + + private ScriptSortType getScriptSortType(Order order) { + ScriptSortType scriptSortType; + Schema.Type scriptFunctionReturnType = SQLFunctions.getOrderByFieldType(order.getSortField()); + + // as of now script function return type returns only text and double + switch (scriptFunctionReturnType) { + case TEXT: + scriptSortType = ScriptSortType.STRING; + break; + + case DOUBLE: + case FLOAT: + case INTEGER: + case LONG: + scriptSortType = ScriptSortType.NUMBER; + break; + default: + throw new IllegalStateException("Unknown type: " + scriptFunctionReturnType); } - - private ScriptSortType getScriptSortType(Order order) { - ScriptSortType scriptSortType; - Schema.Type scriptFunctionReturnType = SQLFunctions.getOrderByFieldType(order.getSortField()); - - - // as of now script function return type returns only text and double - switch (scriptFunctionReturnType) { - case TEXT: - scriptSortType = ScriptSortType.STRING; - break; - - case DOUBLE: - case FLOAT: - case INTEGER: - case LONG: - scriptSortType = ScriptSortType.NUMBER; - break; - default: - throw new IllegalStateException("Unknown type: " + scriptFunctionReturnType); - } - return scriptSortType; + return scriptSortType; + } + + /** + * Add from and size to the OpenSearch query based on the 'LIMIT' clause + * + * @param from starts from document at position from + * @param size number of documents to return. + */ + private void setLimit(int from, int size) { + request.setFrom(from); + + if (size > -1) { + request.setSize(size); } + } - /** - * Add from and size to the OpenSearch query based on the 'LIMIT' clause - * - * @param from starts from document at position from - * @param size number of documents to return. - */ - private void setLimit(int from, int size) { - request.setFrom(from); - - if (size > -1) { - request.setSize(size); - } - } + public SearchRequestBuilder getRequestBuilder() { + return request; + } - public SearchRequestBuilder getRequestBuilder() { - return request; - } + private boolean isNotNested(Field field) { + return !field.isNested() || field.isReverseNested(); + } - private boolean isNotNested(Field field) { - return !field.isNested() || field.isReverseNested(); - } - - private void updateRequestWithInnerHits(Select select, SearchRequestBuilder request) { - new NestedFieldProjection(request).project(select.getFields(), select.getNestedJoinType()); - } + private void updateRequestWithInnerHits(Select select, SearchRequestBuilder request) { + new NestedFieldProjection(request).project(select.getFields(), select.getNestedJoinType()); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/DeleteQueryAction.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/DeleteQueryAction.java index 892c5aeb2d..331921345f 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/DeleteQueryAction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/DeleteQueryAction.java @@ -3,10 +3,8 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query; - import org.opensearch.client.Client; import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.QueryBuilders; @@ -20,50 +18,44 @@ public class DeleteQueryAction extends QueryAction { - private final Delete delete; - private DeleteByQueryRequestBuilder request; - - public DeleteQueryAction(Client client, Delete delete) { - super(client, delete); - this.delete = delete; - } - - @Override - public SqlElasticDeleteByQueryRequestBuilder explain() throws SqlParseException { - this.request = new DeleteByQueryRequestBuilder(client, DeleteByQueryAction.INSTANCE); - - setIndicesAndTypes(); - setWhere(delete.getWhere()); - SqlElasticDeleteByQueryRequestBuilder deleteByQueryRequestBuilder = - new SqlElasticDeleteByQueryRequestBuilder(request); - return deleteByQueryRequestBuilder; - } - - - /** - * Set indices and types to the delete by query request. - */ - private void setIndicesAndTypes() { - - DeleteByQueryRequest innerRequest = request.request(); - innerRequest.indices(query.getIndexArr()); + private final Delete delete; + private DeleteByQueryRequestBuilder request; + + public DeleteQueryAction(Client client, Delete delete) { + super(client, delete); + this.delete = delete; + } + + @Override + public SqlElasticDeleteByQueryRequestBuilder explain() throws SqlParseException { + this.request = new DeleteByQueryRequestBuilder(client, DeleteByQueryAction.INSTANCE); + + setIndicesAndTypes(); + setWhere(delete.getWhere()); + SqlElasticDeleteByQueryRequestBuilder deleteByQueryRequestBuilder = + new SqlElasticDeleteByQueryRequestBuilder(request); + return deleteByQueryRequestBuilder; + } + + /** Set indices and types to the delete by query request. */ + private void setIndicesAndTypes() { + + DeleteByQueryRequest innerRequest = request.request(); + innerRequest.indices(query.getIndexArr()); + } + + /** + * Create filters based on the Where clause. + * + * @param where the 'WHERE' part of the SQL query. + * @throws SqlParseException + */ + private void setWhere(Where where) throws SqlParseException { + if (where != null) { + QueryBuilder whereQuery = QueryMaker.explain(where); + request.filter(whereQuery); + } else { + request.filter(QueryBuilders.matchAllQuery()); } - - - /** - * Create filters based on - * the Where clause. - * - * @param where the 'WHERE' part of the SQL query. - * @throws SqlParseException - */ - private void setWhere(Where where) throws SqlParseException { - if (where != null) { - QueryBuilder whereQuery = QueryMaker.explain(where); - request.filter(whereQuery); - } else { - request.filter(QueryBuilders.matchAllQuery()); - } - } - + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/DescribeQueryAction.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/DescribeQueryAction.java index 077d9c28b8..ffc9695d81 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/DescribeQueryAction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/DescribeQueryAction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query; import org.opensearch.action.admin.indices.get.GetIndexRequestBuilder; @@ -14,22 +13,23 @@ public class DescribeQueryAction extends QueryAction { - private final IndexStatement statement; + private final IndexStatement statement; - public DescribeQueryAction(Client client, IndexStatement statement) { - super(client, null); - this.statement = statement; - } + public DescribeQueryAction(Client client, IndexStatement statement) { + super(client, null); + this.statement = statement; + } - @Override - public QueryStatement getQueryStatement() { - return statement; - } + @Override + public QueryStatement getQueryStatement() { + return statement; + } - @Override - public SqlOpenSearchRequestBuilder explain() { - final GetIndexRequestBuilder indexRequestBuilder = Util.prepareIndexRequestBuilder(client, statement); + @Override + public SqlOpenSearchRequestBuilder explain() { + final GetIndexRequestBuilder indexRequestBuilder = + Util.prepareIndexRequestBuilder(client, statement); - return new SqlOpenSearchRequestBuilder(indexRequestBuilder); - } + return new SqlOpenSearchRequestBuilder(indexRequestBuilder); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/OpenSearchActionFactory.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/OpenSearchActionFactory.java index de7256d2cf..b9a7c9f218 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/OpenSearchActionFactory.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/OpenSearchActionFactory.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query; import static org.opensearch.sql.legacy.domain.IndexStatement.StatementType; @@ -65,188 +64,193 @@ public class OpenSearchActionFactory { - public static QueryAction create(Client client, String sql) - throws SqlParseException, SQLFeatureNotSupportedException, SQLFeatureDisabledException { - return create(client, new QueryActionRequest(sql, new ColumnTypeProvider(), Format.JSON)); - } - - /** - * Create the compatible Query object - * based on the SQL query. - * - * @param request The SQL query. - * @return Query object. - */ - public static QueryAction create(Client client, QueryActionRequest request) - throws SqlParseException, SQLFeatureNotSupportedException, SQLFeatureDisabledException { - String sql = request.getSql(); - // Remove line breaker anywhere and semicolon at the end - sql = sql.replaceAll("\\R", " ").trim(); - if (sql.endsWith(";")) { - sql = sql.substring(0, sql.length() - 1); - } - - switch (getFirstWord(sql)) { - case "SELECT": - SQLQueryExpr sqlExpr = (SQLQueryExpr) toSqlExpr(sql); - - RewriteRuleExecutor ruleExecutor = RewriteRuleExecutor.builder() - .withRule(new SQLExprParentSetterRule()) - .withRule(new OrdinalRewriterRule(sql)) - .withRule(new UnquoteIdentifierRule()) - .withRule(new TableAliasPrefixRemoveRule()) - .withRule(new SubQueryRewriteRule()) - .build(); - ruleExecutor.executeOn(sqlExpr); - sqlExpr.accept(new NestedFieldRewriter()); - - if (isMulti(sqlExpr)) { - sqlExpr.accept(new TermFieldRewriter(TermRewriterFilter.MULTI_QUERY)); - MultiQuerySelect multiSelect = - new SqlParser().parseMultiSelect((SQLUnionQuery) sqlExpr.getSubQuery().getQuery()); - return new MultiQueryAction(client, multiSelect); - } else if (isJoin(sqlExpr, sql)) { - new JoinRewriteRule(LocalClusterState.state()).rewrite(sqlExpr); - sqlExpr.accept(new TermFieldRewriter(TermRewriterFilter.JOIN)); - JoinSelect joinSelect = new SqlParser().parseJoinSelect(sqlExpr); - return OpenSearchJoinQueryActionFactory.createJoinAction(client, joinSelect); - } else { - sqlExpr.accept(new TermFieldRewriter()); - // migrate aggregation to query planner framework. - if (shouldMigrateToQueryPlan(sqlExpr, request.getFormat())) { - return new QueryPlanQueryAction(new QueryPlanRequestBuilder( - new BindingTupleQueryPlanner(client, sqlExpr, request.getTypeProvider()))); - } - Select select = new SqlParser().parseSelect(sqlExpr); - return handleSelect(client, select); - } - case "DELETE": - if (isSQLDeleteEnabled()) { - SQLStatementParser parser = createSqlStatementParser(sql); - SQLDeleteStatement deleteStatement = parser.parseDeleteStatement(); - Delete delete = new SqlParser().parseDelete(deleteStatement); - return new DeleteQueryAction(client, delete); - } else { - throw new SQLFeatureDisabledException( - StringUtils.format("DELETE clause is disabled by default and will be " - + "deprecated. Using the %s setting to enable it", - Settings.Key.SQL_DELETE_ENABLED.getKeyValue())); - } - case "SHOW": - IndexStatement showStatement = new IndexStatement(StatementType.SHOW, sql); - return new ShowQueryAction(client, showStatement); - case "DESCRIBE": - IndexStatement describeStatement = new IndexStatement(StatementType.DESCRIBE, sql); - return new DescribeQueryAction(client, describeStatement); - default: - throw new SQLFeatureNotSupportedException( - String.format("Query must start with SELECT, DELETE, SHOW or DESCRIBE: %s", sql)); - } - } + public static QueryAction create(Client client, String sql) + throws SqlParseException, SQLFeatureNotSupportedException, SQLFeatureDisabledException { + return create(client, new QueryActionRequest(sql, new ColumnTypeProvider(), Format.JSON)); + } - private static boolean isSQLDeleteEnabled() { - return LocalClusterState.state().getSettingValue(Settings.Key.SQL_DELETE_ENABLED); + /** + * Create the compatible Query object based on the SQL query. + * + * @param request The SQL query. + * @return Query object. + */ + public static QueryAction create(Client client, QueryActionRequest request) + throws SqlParseException, SQLFeatureNotSupportedException, SQLFeatureDisabledException { + String sql = request.getSql(); + // Remove line breaker anywhere and semicolon at the end + sql = sql.replaceAll("\\R", " ").trim(); + if (sql.endsWith(";")) { + sql = sql.substring(0, sql.length() - 1); } - private static String getFirstWord(String sql) { - int endOfFirstWord = sql.indexOf(' '); - return sql.substring(0, endOfFirstWord > 0 ? endOfFirstWord : sql.length()).toUpperCase(); - } + switch (getFirstWord(sql)) { + case "SELECT": + SQLQueryExpr sqlExpr = (SQLQueryExpr) toSqlExpr(sql); - private static boolean isMulti(SQLQueryExpr sqlExpr) { - return sqlExpr.getSubQuery().getQuery() instanceof SQLUnionQuery; - } + RewriteRuleExecutor ruleExecutor = + RewriteRuleExecutor.builder() + .withRule(new SQLExprParentSetterRule()) + .withRule(new OrdinalRewriterRule(sql)) + .withRule(new UnquoteIdentifierRule()) + .withRule(new TableAliasPrefixRemoveRule()) + .withRule(new SubQueryRewriteRule()) + .build(); + ruleExecutor.executeOn(sqlExpr); + sqlExpr.accept(new NestedFieldRewriter()); - private static void executeAndFillSubQuery(Client client, - SubQueryExpression subQueryExpression, - QueryAction queryAction) throws SqlParseException { - List values = new ArrayList<>(); - Object queryResult; - try { - queryResult = QueryActionElasticExecutor.executeAnyAction(client, queryAction); - } catch (Exception e) { - throw new SqlParseException("could not execute SubQuery: " + e.getMessage()); + if (isMulti(sqlExpr)) { + sqlExpr.accept(new TermFieldRewriter(TermRewriterFilter.MULTI_QUERY)); + MultiQuerySelect multiSelect = + new SqlParser().parseMultiSelect((SQLUnionQuery) sqlExpr.getSubQuery().getQuery()); + return new MultiQueryAction(client, multiSelect); + } else if (isJoin(sqlExpr, sql)) { + new JoinRewriteRule(LocalClusterState.state()).rewrite(sqlExpr); + sqlExpr.accept(new TermFieldRewriter(TermRewriterFilter.JOIN)); + JoinSelect joinSelect = new SqlParser().parseJoinSelect(sqlExpr); + return OpenSearchJoinQueryActionFactory.createJoinAction(client, joinSelect); + } else { + sqlExpr.accept(new TermFieldRewriter()); + // migrate aggregation to query planner framework. + if (shouldMigrateToQueryPlan(sqlExpr, request.getFormat())) { + return new QueryPlanQueryAction( + new QueryPlanRequestBuilder( + new BindingTupleQueryPlanner(client, sqlExpr, request.getTypeProvider()))); + } + Select select = new SqlParser().parseSelect(sqlExpr); + return handleSelect(client, select); } - - String returnField = subQueryExpression.getReturnField(); - if (queryResult instanceof SearchHits) { - SearchHits hits = (SearchHits) queryResult; - for (SearchHit hit : hits) { - values.add(ElasticResultHandler.getFieldValue(hit, returnField)); - } + case "DELETE": + if (isSQLDeleteEnabled()) { + SQLStatementParser parser = createSqlStatementParser(sql); + SQLDeleteStatement deleteStatement = parser.parseDeleteStatement(); + Delete delete = new SqlParser().parseDelete(deleteStatement); + return new DeleteQueryAction(client, delete); } else { - throw new SqlParseException("on sub queries only support queries that return Hits and not aggregations"); + throw new SQLFeatureDisabledException( + StringUtils.format( + "DELETE clause is disabled by default and will be " + + "deprecated. Using the %s setting to enable it", + Settings.Key.SQL_DELETE_ENABLED.getKeyValue())); } - subQueryExpression.setValues(values.toArray()); + case "SHOW": + IndexStatement showStatement = new IndexStatement(StatementType.SHOW, sql); + return new ShowQueryAction(client, showStatement); + case "DESCRIBE": + IndexStatement describeStatement = new IndexStatement(StatementType.DESCRIBE, sql); + return new DescribeQueryAction(client, describeStatement); + default: + throw new SQLFeatureNotSupportedException( + String.format("Query must start with SELECT, DELETE, SHOW or DESCRIBE: %s", sql)); } + } - private static QueryAction handleSelect(Client client, Select select) { - if (select.isAggregate) { - return new AggregationQueryAction(client, select); - } else { - return new DefaultQueryAction(client, select); - } + private static boolean isSQLDeleteEnabled() { + return LocalClusterState.state().getSettingValue(Settings.Key.SQL_DELETE_ENABLED); + } + + private static String getFirstWord(String sql) { + int endOfFirstWord = sql.indexOf(' '); + return sql.substring(0, endOfFirstWord > 0 ? endOfFirstWord : sql.length()).toUpperCase(); + } + + private static boolean isMulti(SQLQueryExpr sqlExpr) { + return sqlExpr.getSubQuery().getQuery() instanceof SQLUnionQuery; + } + + private static void executeAndFillSubQuery( + Client client, SubQueryExpression subQueryExpression, QueryAction queryAction) + throws SqlParseException { + List values = new ArrayList<>(); + Object queryResult; + try { + queryResult = QueryActionElasticExecutor.executeAnyAction(client, queryAction); + } catch (Exception e) { + throw new SqlParseException("could not execute SubQuery: " + e.getMessage()); } - private static SQLStatementParser createSqlStatementParser(String sql) { - ElasticLexer lexer = new ElasticLexer(sql); - lexer.nextToken(); - return new MySqlStatementParser(lexer); + String returnField = subQueryExpression.getReturnField(); + if (queryResult instanceof SearchHits) { + SearchHits hits = (SearchHits) queryResult; + for (SearchHit hit : hits) { + values.add(ElasticResultHandler.getFieldValue(hit, returnField)); + } + } else { + throw new SqlParseException( + "on sub queries only support queries that return Hits and not aggregations"); } + subQueryExpression.setValues(values.toArray()); + } - private static boolean isJoin(SQLQueryExpr sqlExpr, String sql) { - MySqlSelectQueryBlock query = (MySqlSelectQueryBlock) sqlExpr.getSubQuery().getQuery(); - return query.getFrom() instanceof SQLJoinTableSource - && ((SQLJoinTableSource) query.getFrom()).getJoinType() != SQLJoinTableSource.JoinType.COMMA; + private static QueryAction handleSelect(Client client, Select select) { + if (select.isAggregate) { + return new AggregationQueryAction(client, select); + } else { + return new DefaultQueryAction(client, select); } + } - @VisibleForTesting - public static boolean shouldMigrateToQueryPlan(SQLQueryExpr expr, Format format) { - // The JSON format will return the OpenSearch aggregation result, which is not supported by the QueryPlanner. - if (format == Format.JSON) { - return false; - } - QueryPlannerScopeDecider decider = new QueryPlannerScopeDecider(); - return decider.isInScope(expr); + private static SQLStatementParser createSqlStatementParser(String sql) { + ElasticLexer lexer = new ElasticLexer(sql); + lexer.nextToken(); + return new MySqlStatementParser(lexer); + } + + private static boolean isJoin(SQLQueryExpr sqlExpr, String sql) { + MySqlSelectQueryBlock query = (MySqlSelectQueryBlock) sqlExpr.getSubQuery().getQuery(); + return query.getFrom() instanceof SQLJoinTableSource + && ((SQLJoinTableSource) query.getFrom()).getJoinType() + != SQLJoinTableSource.JoinType.COMMA; + } + + @VisibleForTesting + public static boolean shouldMigrateToQueryPlan(SQLQueryExpr expr, Format format) { + // The JSON format will return the OpenSearch aggregation result, which is not supported by the + // QueryPlanner. + if (format == Format.JSON) { + return false; } + QueryPlannerScopeDecider decider = new QueryPlannerScopeDecider(); + return decider.isInScope(expr); + } - private static class QueryPlannerScopeDecider extends MySqlASTVisitorAdapter { - private boolean hasAggregationFunc = false; - private boolean hasNestedFunction = false; - private boolean hasGroupBy = false; - private boolean hasAllColumnExpr = false; + private static class QueryPlannerScopeDecider extends MySqlASTVisitorAdapter { + private boolean hasAggregationFunc = false; + private boolean hasNestedFunction = false; + private boolean hasGroupBy = false; + private boolean hasAllColumnExpr = false; - public boolean isInScope(SQLQueryExpr expr) { - expr.accept(this); - return !hasAllColumnExpr && !hasNestedFunction && (hasGroupBy || hasAggregationFunc); - } + public boolean isInScope(SQLQueryExpr expr) { + expr.accept(this); + return !hasAllColumnExpr && !hasNestedFunction && (hasGroupBy || hasAggregationFunc); + } - @Override - public boolean visit(SQLSelectItem expr) { - if (expr.getExpr() instanceof SQLAllColumnExpr) { - hasAllColumnExpr = true; - } - return super.visit(expr); - } + @Override + public boolean visit(SQLSelectItem expr) { + if (expr.getExpr() instanceof SQLAllColumnExpr) { + hasAllColumnExpr = true; + } + return super.visit(expr); + } - @Override - public boolean visit(SQLSelectGroupByClause expr) { - hasGroupBy = true; - return super.visit(expr); - } + @Override + public boolean visit(SQLSelectGroupByClause expr) { + hasGroupBy = true; + return super.visit(expr); + } - @Override - public boolean visit(SQLAggregateExpr expr) { - hasAggregationFunc = true; - return super.visit(expr); - } + @Override + public boolean visit(SQLAggregateExpr expr) { + hasAggregationFunc = true; + return super.visit(expr); + } - @Override - public boolean visit(SQLMethodInvokeExpr expr) { - if (expr.getMethodName().equalsIgnoreCase("nested")) { - hasNestedFunction = true; - } - return super.visit(expr); - } + @Override + public boolean visit(SQLMethodInvokeExpr expr) { + if (expr.getMethodName().equalsIgnoreCase("nested")) { + hasNestedFunction = true; + } + return super.visit(expr); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/QueryAction.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/QueryAction.java index 7646639be4..c9b39d2f97 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/QueryAction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/QueryAction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query; import com.fasterxml.jackson.core.JsonFactory; @@ -32,199 +31,208 @@ import org.opensearch.sql.legacy.request.SqlRequest; /** - * Abstract class. used to transform Select object (Represents SQL query) to - * SearchRequestBuilder (Represents OpenSearch query) + * Abstract class. used to transform Select object (Represents SQL query) to SearchRequestBuilder + * (Represents OpenSearch query) */ public abstract class QueryAction { - protected Query query; - protected Client client; - protected SqlRequest sqlRequest = SqlRequest.NULL; - protected ColumnTypeProvider scriptColumnType; - protected Format format; - - public QueryAction(Client client, Query query) { - this.client = client; - this.query = query; - } - - public Client getClient() { - return client; - } - - public QueryStatement getQueryStatement() { - return query; - } - - public void setSqlRequest(SqlRequest sqlRequest) { - this.sqlRequest = sqlRequest; - } - - public void setColumnTypeProvider(ColumnTypeProvider scriptColumnType) { - this.scriptColumnType = scriptColumnType; - } - - public SqlRequest getSqlRequest() { - return sqlRequest; - } - - public void setFormat(Format format) { - this.format = format; - } - - public Format getFormat() { - return this.format; - } - - public ColumnTypeProvider getScriptColumnType() { - return scriptColumnType; - } - - /** - * @return List of field names produced by the query - */ - public Optional> getFieldNames() { - return Optional.empty(); - } - - protected void updateRequestWithCollapse(Select select, SearchRequestBuilder request) throws SqlParseException { - JsonFactory jsonFactory = new JsonFactory(); - for (Hint hint : select.getHints()) { - if (hint.getType() == HintType.COLLAPSE && hint.getParams() != null && 0 < hint.getParams().length) { - try (JsonXContentParser parser = new JsonXContentParser(NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, jsonFactory.createParser(hint.getParams()[0].toString()))) { - request.setCollapse(CollapseBuilder.fromXContent(parser)); - } catch (IOException e) { - throw new SqlParseException("could not parse collapse hint: " + e.getMessage()); - } - } - } - } - - protected void updateRequestWithPostFilter(Select select, SearchRequestBuilder request) { - for (Hint hint : select.getHints()) { - if (hint.getType() == HintType.POST_FILTER && hint.getParams() != null && 0 < hint.getParams().length) { - request.setPostFilter(QueryBuilders.wrapperQuery(hint.getParams()[0].toString())); - } - } - } - - protected void updateRequestWithIndexAndRoutingOptions(Select select, SearchRequestBuilder request) { - for (Hint hint : select.getHints()) { - if (hint.getType() == HintType.IGNORE_UNAVAILABLE) { - //saving the defaults from TransportClient search - request.setIndicesOptions(IndicesOptions.fromOptions(true, false, true, false, - IndicesOptions.strictExpandOpenAndForbidClosed())); - } - if (hint.getType() == HintType.ROUTINGS) { - Object[] routings = hint.getParams(); - String[] routingsAsStringArray = new String[routings.length]; - for (int i = 0; i < routings.length; i++) { - routingsAsStringArray[i] = routings[i].toString(); - } - request.setRouting(routingsAsStringArray); - } - } - } - - protected void updateRequestWithHighlight(Select select, SearchRequestBuilder request) { - boolean foundAnyHighlights = false; - HighlightBuilder highlightBuilder = new HighlightBuilder(); - for (Hint hint : select.getHints()) { - if (hint.getType() == HintType.HIGHLIGHT) { - HighlightBuilder.Field highlightField = parseHighlightField(hint.getParams()); - if (highlightField != null) { - foundAnyHighlights = true; - highlightBuilder.field(highlightField); - } - } - } - if (foundAnyHighlights) { - request.highlighter(highlightBuilder); - } - } - - protected HighlightBuilder.Field parseHighlightField(Object[] params) { - if (params == null || params.length == 0 || params.length > 2) { - //todo: exception. + protected Query query; + protected Client client; + protected SqlRequest sqlRequest = SqlRequest.NULL; + protected ColumnTypeProvider scriptColumnType; + protected Format format; + + public QueryAction(Client client, Query query) { + this.client = client; + this.query = query; + } + + public Client getClient() { + return client; + } + + public QueryStatement getQueryStatement() { + return query; + } + + public void setSqlRequest(SqlRequest sqlRequest) { + this.sqlRequest = sqlRequest; + } + + public void setColumnTypeProvider(ColumnTypeProvider scriptColumnType) { + this.scriptColumnType = scriptColumnType; + } + + public SqlRequest getSqlRequest() { + return sqlRequest; + } + + public void setFormat(Format format) { + this.format = format; + } + + public Format getFormat() { + return this.format; + } + + public ColumnTypeProvider getScriptColumnType() { + return scriptColumnType; + } + + /** + * @return List of field names produced by the query + */ + public Optional> getFieldNames() { + return Optional.empty(); + } + + protected void updateRequestWithCollapse(Select select, SearchRequestBuilder request) + throws SqlParseException { + JsonFactory jsonFactory = new JsonFactory(); + for (Hint hint : select.getHints()) { + if (hint.getType() == HintType.COLLAPSE + && hint.getParams() != null + && 0 < hint.getParams().length) { + try (JsonXContentParser parser = + new JsonXContentParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + jsonFactory.createParser(hint.getParams()[0].toString()))) { + request.setCollapse(CollapseBuilder.fromXContent(parser)); + } catch (IOException e) { + throw new SqlParseException("could not parse collapse hint: " + e.getMessage()); } - HighlightBuilder.Field field = new HighlightBuilder.Field(params[0].toString()); - if (params.length == 1) { - return field; + } + } + } + + protected void updateRequestWithPostFilter(Select select, SearchRequestBuilder request) { + for (Hint hint : select.getHints()) { + if (hint.getType() == HintType.POST_FILTER + && hint.getParams() != null + && 0 < hint.getParams().length) { + request.setPostFilter(QueryBuilders.wrapperQuery(hint.getParams()[0].toString())); + } + } + } + + protected void updateRequestWithIndexAndRoutingOptions( + Select select, SearchRequestBuilder request) { + for (Hint hint : select.getHints()) { + if (hint.getType() == HintType.IGNORE_UNAVAILABLE) { + // saving the defaults from TransportClient search + request.setIndicesOptions( + IndicesOptions.fromOptions( + true, false, true, false, IndicesOptions.strictExpandOpenAndForbidClosed())); + } + if (hint.getType() == HintType.ROUTINGS) { + Object[] routings = hint.getParams(); + String[] routingsAsStringArray = new String[routings.length]; + for (int i = 0; i < routings.length; i++) { + routingsAsStringArray[i] = routings[i].toString(); } - Map highlightParams = (Map) params[1]; - - for (Map.Entry param : highlightParams.entrySet()) { - switch (param.getKey()) { - case "type": - field.highlighterType((String) param.getValue()); - break; - case "boundary_chars": - field.boundaryChars(fromArrayListToCharArray((ArrayList) param.getValue())); - break; - case "boundary_max_scan": - field.boundaryMaxScan((Integer) param.getValue()); - break; - case "force_source": - field.forceSource((Boolean) param.getValue()); - break; - case "fragmenter": - field.fragmenter((String) param.getValue()); - break; - case "fragment_offset": - field.fragmentOffset((Integer) param.getValue()); - break; - case "fragment_size": - field.fragmentSize((Integer) param.getValue()); - break; - case "highlight_filter": - field.highlightFilter((Boolean) param.getValue()); - break; - case "matched_fields": - field.matchedFields((String[]) ((ArrayList) param.getValue()).toArray(new String[0])); - break; - case "no_match_size": - field.noMatchSize((Integer) param.getValue()); - break; - case "num_of_fragments": - field.numOfFragments((Integer) param.getValue()); - break; - case "order": - field.order((String) param.getValue()); - break; - case "phrase_limit": - field.phraseLimit((Integer) param.getValue()); - break; - case "post_tags": - field.postTags((String[]) ((ArrayList) param.getValue()).toArray(new String[0])); - break; - case "pre_tags": - field.preTags((String[]) ((ArrayList) param.getValue()).toArray(new String[0])); - break; - case "require_field_match": - field.requireFieldMatch((Boolean) param.getValue()); - break; - - } + request.setRouting(routingsAsStringArray); + } + } + } + + protected void updateRequestWithHighlight(Select select, SearchRequestBuilder request) { + boolean foundAnyHighlights = false; + HighlightBuilder highlightBuilder = new HighlightBuilder(); + for (Hint hint : select.getHints()) { + if (hint.getType() == HintType.HIGHLIGHT) { + HighlightBuilder.Field highlightField = parseHighlightField(hint.getParams()); + if (highlightField != null) { + foundAnyHighlights = true; + highlightBuilder.field(highlightField); } - return field; - } - - private char[] fromArrayListToCharArray(ArrayList arrayList) { - char[] chars = new char[arrayList.size()]; - int i = 0; - for (Object item : arrayList) { - chars[i] = item.toString().charAt(0); - i++; - } - return chars; - } - - /** - * Prepare the request, and return OpenSearch request. - * - * @return ActionRequestBuilder (OpenSearch request) - * @throws SqlParseException - */ - public abstract SqlElasticRequestBuilder explain() throws SqlParseException; + } + } + if (foundAnyHighlights) { + request.highlighter(highlightBuilder); + } + } + + protected HighlightBuilder.Field parseHighlightField(Object[] params) { + if (params == null || params.length == 0 || params.length > 2) { + // todo: exception. + } + HighlightBuilder.Field field = new HighlightBuilder.Field(params[0].toString()); + if (params.length == 1) { + return field; + } + Map highlightParams = (Map) params[1]; + + for (Map.Entry param : highlightParams.entrySet()) { + switch (param.getKey()) { + case "type": + field.highlighterType((String) param.getValue()); + break; + case "boundary_chars": + field.boundaryChars(fromArrayListToCharArray((ArrayList) param.getValue())); + break; + case "boundary_max_scan": + field.boundaryMaxScan((Integer) param.getValue()); + break; + case "force_source": + field.forceSource((Boolean) param.getValue()); + break; + case "fragmenter": + field.fragmenter((String) param.getValue()); + break; + case "fragment_offset": + field.fragmentOffset((Integer) param.getValue()); + break; + case "fragment_size": + field.fragmentSize((Integer) param.getValue()); + break; + case "highlight_filter": + field.highlightFilter((Boolean) param.getValue()); + break; + case "matched_fields": + field.matchedFields((String[]) ((ArrayList) param.getValue()).toArray(new String[0])); + break; + case "no_match_size": + field.noMatchSize((Integer) param.getValue()); + break; + case "num_of_fragments": + field.numOfFragments((Integer) param.getValue()); + break; + case "order": + field.order((String) param.getValue()); + break; + case "phrase_limit": + field.phraseLimit((Integer) param.getValue()); + break; + case "post_tags": + field.postTags((String[]) ((ArrayList) param.getValue()).toArray(new String[0])); + break; + case "pre_tags": + field.preTags((String[]) ((ArrayList) param.getValue()).toArray(new String[0])); + break; + case "require_field_match": + field.requireFieldMatch((Boolean) param.getValue()); + break; + } + } + return field; + } + + private char[] fromArrayListToCharArray(ArrayList arrayList) { + char[] chars = new char[arrayList.size()]; + int i = 0; + for (Object item : arrayList) { + chars[i] = item.toString().charAt(0); + i++; + } + return chars; + } + + /** + * Prepare the request, and return OpenSearch request. + * + * @return ActionRequestBuilder (OpenSearch request) + * @throws SqlParseException + */ + public abstract SqlElasticRequestBuilder explain() throws SqlParseException; } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/ShowQueryAction.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/ShowQueryAction.java index 7a414087e4..d9baa901fa 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/ShowQueryAction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/ShowQueryAction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query; import static org.opensearch.sql.legacy.utils.Util.prepareIndexRequestBuilder; @@ -15,22 +14,23 @@ public class ShowQueryAction extends QueryAction { - private final IndexStatement statement; + private final IndexStatement statement; - public ShowQueryAction(Client client, IndexStatement statement) { - super(client, null); - this.statement = statement; - } + public ShowQueryAction(Client client, IndexStatement statement) { + super(client, null); + this.statement = statement; + } - @Override - public QueryStatement getQueryStatement() { - return statement; - } + @Override + public QueryStatement getQueryStatement() { + return statement; + } - @Override - public SqlOpenSearchRequestBuilder explain() { - final GetIndexRequestBuilder indexRequestBuilder = prepareIndexRequestBuilder(client, statement); + @Override + public SqlOpenSearchRequestBuilder explain() { + final GetIndexRequestBuilder indexRequestBuilder = + prepareIndexRequestBuilder(client, statement); - return new SqlOpenSearchRequestBuilder(indexRequestBuilder); - } + return new SqlOpenSearchRequestBuilder(indexRequestBuilder); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/SqlElasticDeleteByQueryRequestBuilder.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/SqlElasticDeleteByQueryRequestBuilder.java index 6963996b22..2203cbb39e 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/SqlElasticDeleteByQueryRequestBuilder.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/SqlElasticDeleteByQueryRequestBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query; import org.opensearch.action.ActionRequest; @@ -12,41 +11,39 @@ import org.opensearch.core.action.ActionResponse; import org.opensearch.index.reindex.DeleteByQueryRequestBuilder; -/** - * Created by Eliran on 19/8/2015. - */ +/** Created by Eliran on 19/8/2015. */ public class SqlElasticDeleteByQueryRequestBuilder implements SqlElasticRequestBuilder { - DeleteByQueryRequestBuilder deleteByQueryRequestBuilder; - - public SqlElasticDeleteByQueryRequestBuilder(DeleteByQueryRequestBuilder deleteByQueryRequestBuilder) { - this.deleteByQueryRequestBuilder = deleteByQueryRequestBuilder; - } - - @Override - public ActionRequest request() { - return deleteByQueryRequestBuilder.request(); + DeleteByQueryRequestBuilder deleteByQueryRequestBuilder; + + public SqlElasticDeleteByQueryRequestBuilder( + DeleteByQueryRequestBuilder deleteByQueryRequestBuilder) { + this.deleteByQueryRequestBuilder = deleteByQueryRequestBuilder; + } + + @Override + public ActionRequest request() { + return deleteByQueryRequestBuilder.request(); + } + + @Override + public String explain() { + try { + SearchRequestBuilder source = deleteByQueryRequestBuilder.source(); + return source.toString(); + } catch (Exception e) { + e.printStackTrace(); } + return null; + } - @Override - public String explain() { - try { - SearchRequestBuilder source = deleteByQueryRequestBuilder.source(); - return source.toString(); - } catch (Exception e) { - e.printStackTrace(); - } - return null; - } - - @Override - public ActionResponse get() { + @Override + public ActionResponse get() { - return this.deleteByQueryRequestBuilder.get(); - } - - @Override - public ActionRequestBuilder getBuilder() { - return deleteByQueryRequestBuilder; - } + return this.deleteByQueryRequestBuilder.get(); + } + @Override + public ActionRequestBuilder getBuilder() { + return deleteByQueryRequestBuilder; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/SqlElasticRequestBuilder.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/SqlElasticRequestBuilder.java index e1f3db3fa7..7babbe5abe 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/SqlElasticRequestBuilder.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/SqlElasticRequestBuilder.java @@ -3,22 +3,19 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query; import org.opensearch.action.ActionRequest; import org.opensearch.action.ActionRequestBuilder; import org.opensearch.core.action.ActionResponse; -/** - * Created by Eliran on 19/8/2015. - */ +/** Created by Eliran on 19/8/2015. */ public interface SqlElasticRequestBuilder { - ActionRequest request(); + ActionRequest request(); - String explain(); + String explain(); - ActionResponse get(); + ActionResponse get(); - ActionRequestBuilder getBuilder(); + ActionRequestBuilder getBuilder(); } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/SqlOpenSearchRequestBuilder.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/SqlOpenSearchRequestBuilder.java index 6bba1048c4..2beb16837b 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/SqlOpenSearchRequestBuilder.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/SqlOpenSearchRequestBuilder.java @@ -3,45 +3,42 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query; import org.opensearch.action.ActionRequest; import org.opensearch.action.ActionRequestBuilder; import org.opensearch.core.action.ActionResponse; -/** - * Created by Eliran on 19/8/2015. - */ +/** Created by Eliran on 19/8/2015. */ public class SqlOpenSearchRequestBuilder implements SqlElasticRequestBuilder { - ActionRequestBuilder requestBuilder; - - public SqlOpenSearchRequestBuilder(ActionRequestBuilder requestBuilder) { - this.requestBuilder = requestBuilder; - } - - @Override - public ActionRequest request() { - return requestBuilder.request(); - } - - @Override - public String explain() { - return requestBuilder.toString(); - } - - @Override - public ActionResponse get() { - return requestBuilder.get(); - } - - @Override - public ActionRequestBuilder getBuilder() { - return requestBuilder; - } - - @Override - public String toString() { - return this.requestBuilder.toString(); - } + ActionRequestBuilder requestBuilder; + + public SqlOpenSearchRequestBuilder(ActionRequestBuilder requestBuilder) { + this.requestBuilder = requestBuilder; + } + + @Override + public ActionRequest request() { + return requestBuilder.request(); + } + + @Override + public String explain() { + return requestBuilder.toString(); + } + + @Override + public ActionResponse get() { + return requestBuilder.get(); + } + + @Override + public ActionRequestBuilder getBuilder() { + return requestBuilder; + } + + @Override + public String toString() { + return this.requestBuilder.toString(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/BackOffRetryStrategy.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/BackOffRetryStrategy.java index 06ec21247a..d767268cb1 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/BackOffRetryStrategy.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/BackOffRetryStrategy.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.join; import java.util.ArrayList; @@ -22,198 +21,198 @@ public class BackOffRetryStrategy { - private static final Logger LOG = LogManager.getLogger(); - - /** - * Interval (ms) between each retry - */ - private static final long[] intervals = milliseconds(new double[]{4, 8 + 4, 16 + 4}); + private static final Logger LOG = LogManager.getLogger(); - /** - * Delta to randomize interval (ms) - */ - private static final long delta = 4 * 1000; + /** Interval (ms) between each retry */ + private static final long[] intervals = milliseconds(new double[] {4, 8 + 4, 16 + 4}); - private static final int threshold = 85; + /** Delta to randomize interval (ms) */ + private static final long delta = 4 * 1000; - private static IdentityHashMap> memUse = new IdentityHashMap<>(); + private static final int threshold = 85; - private static AtomicLong mem = new AtomicLong(0L); + private static IdentityHashMap> memUse = new IdentityHashMap<>(); - private static long lastTimeoutCleanTime = System.currentTimeMillis(); + private static AtomicLong mem = new AtomicLong(0L); - private static final long RELTIMEOUT = 1000 * 60 * 30; + private static long lastTimeoutCleanTime = System.currentTimeMillis(); - private static final int MAXRETRIES = 999; + private static final long RELTIMEOUT = 1000 * 60 * 30; - private static final Object obj = new Object(); + private static final int MAXRETRIES = 999; - public static final Supplier GET_CB_STATE = () -> isMemoryHealthy() ? 0 : 1; + private static final Object obj = new Object(); - private BackOffRetryStrategy() { + public static final Supplier GET_CB_STATE = () -> isMemoryHealthy() ? 0 : 1; - } + private BackOffRetryStrategy() {} - private static boolean isMemoryHealthy() { - final long freeMemory = Runtime.getRuntime().freeMemory(); - final long totalMemory = Runtime.getRuntime().totalMemory(); - final int memoryUsage = (int) Math.round((double) (totalMemory - freeMemory + mem.get()) - / (double) totalMemory * 100); + private static boolean isMemoryHealthy() { + final long freeMemory = Runtime.getRuntime().freeMemory(); + final long totalMemory = Runtime.getRuntime().totalMemory(); + final int memoryUsage = + (int) + Math.round( + (double) (totalMemory - freeMemory + mem.get()) / (double) totalMemory * 100); - LOG.debug("[MCB1] Memory total, free, allocate: {}, {}, {}", totalMemory, freeMemory, mem.get()); - LOG.debug("[MCB1] Memory usage and limit: {}%, {}%", memoryUsage, threshold); + LOG.debug( + "[MCB1] Memory total, free, allocate: {}, {}, {}", totalMemory, freeMemory, mem.get()); + LOG.debug("[MCB1] Memory usage and limit: {}%, {}%", memoryUsage, threshold); - return memoryUsage < threshold; - } + return memoryUsage < threshold; + } - public static boolean isHealthy() { - for (int i = 0; i < intervals.length; i++) { - if (isMemoryHealthy()) { - return true; - } - - LOG.warn("[MCB1] Memory monitor is unhealthy now, back off retrying: {} attempt, thread id = {}", - i, Thread.currentThread().getId()); - if (ThreadLocalRandom.current().nextBoolean()) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_CB).increment(); - LOG.warn("[MCB1] Directly abort on idx {}.", i); - return false; - } - backOffSleep(intervals[i]); - } - - boolean isHealthy = isMemoryHealthy(); - if (!isHealthy) { - Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_CB).increment(); - } + public static boolean isHealthy() { + for (int i = 0; i < intervals.length; i++) { + if (isMemoryHealthy()) { + return true; + } - return isHealthy; + LOG.warn( + "[MCB1] Memory monitor is unhealthy now, back off retrying: {} attempt, thread id = {}", + i, + Thread.currentThread().getId()); + if (ThreadLocalRandom.current().nextBoolean()) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_CB).increment(); + LOG.warn("[MCB1] Directly abort on idx {}.", i); + return false; + } + backOffSleep(intervals[i]); } - private static boolean isMemoryHealthy(long allocateMemory, int idx, Object key) { - long logMem = mem.get(); - - releaseTimeoutMemory(); - if (idx == 0 && allocateMemory > 0) { - logMem = mem.addAndGet(allocateMemory); - synchronized (BackOffRetryStrategy.class) { - if (memUse.containsKey(key)) { - memUse.put(key, Tuple.tuple(memUse.get(key).v1(), memUse.get(key).v2() + allocateMemory)); - } else { - memUse.put(key, Tuple.tuple(System.currentTimeMillis(), allocateMemory)); - } - } - } - - final long freeMemory = Runtime.getRuntime().freeMemory(); - final long totalMemory = Runtime.getRuntime().totalMemory(); - final int memoryUsage = (int) Math.round((double) (totalMemory - freeMemory + logMem) - / (double) totalMemory * 100); + boolean isHealthy = isMemoryHealthy(); + if (!isHealthy) { + Metrics.getInstance().getNumericalMetric(MetricName.FAILED_REQ_COUNT_CB).increment(); + } - LOG.debug("[MCB] Idx is {}", idx); - LOG.debug("[MCB] Memory total, free, allocate: {}, {}, {}, {}", totalMemory, freeMemory, - allocateMemory, logMem); - LOG.debug("[MCB] Memory usage and limit: {}%, {}%", memoryUsage, threshold); + return isHealthy; + } - return memoryUsage < threshold; + private static boolean isMemoryHealthy(long allocateMemory, int idx, Object key) { + long logMem = mem.get(); + releaseTimeoutMemory(); + if (idx == 0 && allocateMemory > 0) { + logMem = mem.addAndGet(allocateMemory); + synchronized (BackOffRetryStrategy.class) { + if (memUse.containsKey(key)) { + memUse.put(key, Tuple.tuple(memUse.get(key).v1(), memUse.get(key).v2() + allocateMemory)); + } else { + memUse.put(key, Tuple.tuple(System.currentTimeMillis(), allocateMemory)); + } + } } - public static boolean isHealthy(long allocateMemory, Object key) { - if (key == null) { - key = obj; - } + final long freeMemory = Runtime.getRuntime().freeMemory(); + final long totalMemory = Runtime.getRuntime().totalMemory(); + final int memoryUsage = + (int) Math.round((double) (totalMemory - freeMemory + logMem) / (double) totalMemory * 100); + + LOG.debug("[MCB] Idx is {}", idx); + LOG.debug( + "[MCB] Memory total, free, allocate: {}, {}, {}, {}", + totalMemory, + freeMemory, + allocateMemory, + logMem); + LOG.debug("[MCB] Memory usage and limit: {}%, {}%", memoryUsage, threshold); + + return memoryUsage < threshold; + } + + public static boolean isHealthy(long allocateMemory, Object key) { + if (key == null) { + key = obj; + } - for (int i = 0; i < intervals.length; i++) { - if (isMemoryHealthy(allocateMemory, i, key)) { - return true; - } - - LOG.warn("[MCB] Memory monitor is unhealthy now, back off retrying: {} attempt, " - + "executor = {}, thread id = {}", i, key, Thread.currentThread().getId()); - if (ThreadLocalRandom.current().nextBoolean()) { - LOG.warn("[MCB] Directly abort on idx {}, executor is {}.", i, key); - return false; - } - backOffSleep(intervals[i]); - } - return isMemoryHealthy(allocateMemory, MAXRETRIES, key); + for (int i = 0; i < intervals.length; i++) { + if (isMemoryHealthy(allocateMemory, i, key)) { + return true; + } + + LOG.warn( + "[MCB] Memory monitor is unhealthy now, back off retrying: {} attempt, " + + "executor = {}, thread id = {}", + i, + key, + Thread.currentThread().getId()); + if (ThreadLocalRandom.current().nextBoolean()) { + LOG.warn("[MCB] Directly abort on idx {}, executor is {}.", i, key); + return false; + } + backOffSleep(intervals[i]); } + return isMemoryHealthy(allocateMemory, MAXRETRIES, key); + } - public static void backOffSleep(long interval) { - try { - long millis = randomize(interval); + public static void backOffSleep(long interval) { + try { + long millis = randomize(interval); - LOG.info("[MCB] Back off sleeping: {} ms", millis); - Thread.sleep(millis); - } catch (InterruptedException e) { - LOG.error("[MCB] Sleep interrupted", e); - } + LOG.info("[MCB] Back off sleeping: {} ms", millis); + Thread.sleep(millis); + } catch (InterruptedException e) { + LOG.error("[MCB] Sleep interrupted", e); } - - /** - * Generate random interval in [interval-delta, interval+delta) - */ - private static long randomize(long interval) { - // Random number within range generator for JDK 7+ - return ThreadLocalRandom.current().nextLong( - lowerBound(interval), upperBound(interval) - ); + } + + /** Generate random interval in [interval-delta, interval+delta) */ + private static long randomize(long interval) { + // Random number within range generator for JDK 7+ + return ThreadLocalRandom.current().nextLong(lowerBound(interval), upperBound(interval)); + } + + private static long lowerBound(long interval) { + return Math.max(0, interval - delta); + } + + private static long upperBound(long interval) { + return interval + delta; + } + + private static long[] milliseconds(double[] seconds) { + return Arrays.stream(seconds).mapToLong((second) -> (long) (1000 * second)).toArray(); + } + + public static void releaseMem(Object key) { + LOG.debug("[MCB] mem is {} before release", mem); + long v = 0L; + synchronized (BackOffRetryStrategy.class) { + if (memUse.containsKey(key)) { + v = memUse.get(key).v2(); + memUse.remove(key); + } } - - private static long lowerBound(long interval) { - return Math.max(0, interval - delta); + if (v > 0) { + atomicMinusLowBoundZero(mem, v); } + LOG.debug("[MCB] mem is {} after release", mem); + } - private static long upperBound(long interval) { - return interval + delta; + private static void releaseTimeoutMemory() { + long cur = System.currentTimeMillis(); + if (cur - lastTimeoutCleanTime < RELTIMEOUT) { + return; } - private static long[] milliseconds(double[] seconds) { - return Arrays.stream(seconds). - mapToLong((second) -> (long) (1000 * second)). - toArray(); + List bulks = new ArrayList<>(); + Predicate> isTimeout = t -> cur - t.v1() > RELTIMEOUT; + synchronized (BackOffRetryStrategy.class) { + memUse.values().stream().filter(isTimeout).forEach(v -> bulks.add(v.v2())); + memUse.values().removeIf(isTimeout); } - public static void releaseMem(Object key) { - LOG.debug("[MCB] mem is {} before release", mem); - long v = 0L; - synchronized (BackOffRetryStrategy.class) { - if (memUse.containsKey(key)) { - v = memUse.get(key).v2(); - memUse.remove(key); - } - } - if (v > 0) { - atomicMinusLowBoundZero(mem, v); - } - LOG.debug("[MCB] mem is {} after release", mem); + for (long v : bulks) { + atomicMinusLowBoundZero(mem, v); } - private static void releaseTimeoutMemory() { - long cur = System.currentTimeMillis(); - if (cur - lastTimeoutCleanTime < RELTIMEOUT) { - return; - } + lastTimeoutCleanTime = cur; + } - List bulks = new ArrayList<>(); - Predicate> isTimeout = t -> cur - t.v1() > RELTIMEOUT; - synchronized (BackOffRetryStrategy.class) { - memUse.values().stream().filter(isTimeout).forEach(v -> bulks.add(v.v2())); - memUse.values().removeIf(isTimeout); - } - - for (long v : bulks) { - atomicMinusLowBoundZero(mem, v); - } - - lastTimeoutCleanTime = cur; - } - - private static void atomicMinusLowBoundZero(AtomicLong x, Long y) { - long memRes = x.addAndGet(-y); - if (memRes < 0) { - x.compareAndSet(memRes, 0L); - } + private static void atomicMinusLowBoundZero(AtomicLong x, Long y) { + long memRes = x.addAndGet(-y); + if (memRes < 0) { + x.compareAndSet(memRes, 0L); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/HashJoinElasticRequestBuilder.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/HashJoinElasticRequestBuilder.java index 3ab8c11ee0..c8b44e1bbb 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/HashJoinElasticRequestBuilder.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/HashJoinElasticRequestBuilder.java @@ -3,42 +3,39 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.join; import java.util.List; import java.util.Map; import org.opensearch.sql.legacy.domain.Field; -/** - * Created by Eliran on 22/8/2015. - */ +/** Created by Eliran on 22/8/2015. */ public class HashJoinElasticRequestBuilder extends JoinRequestBuilder { - private List>> t1ToT2FieldsComparison; - private boolean useTermFiltersOptimization; + private List>> t1ToT2FieldsComparison; + private boolean useTermFiltersOptimization; - public HashJoinElasticRequestBuilder() { - } + public HashJoinElasticRequestBuilder() {} - @Override - public String explain() { - return "HashJoin " + super.explain(); - } + @Override + public String explain() { + return "HashJoin " + super.explain(); + } - public List>> getT1ToT2FieldsComparison() { - return t1ToT2FieldsComparison; - } + public List>> getT1ToT2FieldsComparison() { + return t1ToT2FieldsComparison; + } - public void setT1ToT2FieldsComparison(List>> t1ToT2FieldsComparison) { - this.t1ToT2FieldsComparison = t1ToT2FieldsComparison; - } + public void setT1ToT2FieldsComparison( + List>> t1ToT2FieldsComparison) { + this.t1ToT2FieldsComparison = t1ToT2FieldsComparison; + } - public boolean isUseTermFiltersOptimization() { - return useTermFiltersOptimization; - } + public boolean isUseTermFiltersOptimization() { + return useTermFiltersOptimization; + } - public void setUseTermFiltersOptimization(boolean useTermFiltersOptimization) { - this.useTermFiltersOptimization = useTermFiltersOptimization; - } + public void setUseTermFiltersOptimization(boolean useTermFiltersOptimization) { + this.useTermFiltersOptimization = useTermFiltersOptimization; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/JoinRequestBuilder.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/JoinRequestBuilder.java index 316d17a275..82ebd1b225 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/JoinRequestBuilder.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/JoinRequestBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.join; import com.alibaba.druid.sql.ast.statement.SQLJoinTableSource; @@ -18,95 +17,99 @@ import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.sql.legacy.query.SqlElasticRequestBuilder; -/** - * Created by Eliran on 15/9/2015. - */ +/** Created by Eliran on 15/9/2015. */ public class JoinRequestBuilder implements SqlElasticRequestBuilder { - private MultiSearchRequest multi; - private TableInJoinRequestBuilder firstTable; - private TableInJoinRequestBuilder secondTable; - private SQLJoinTableSource.JoinType joinType; - private int totalLimit; - - public JoinRequestBuilder() { - firstTable = new TableInJoinRequestBuilder(); - secondTable = new TableInJoinRequestBuilder(); - } - - - @Override - public ActionRequest request() { - if (multi == null) { - buildMulti(); - } - return multi; - - } - - private void buildMulti() { - multi = new MultiSearchRequest(); - multi.add(firstTable.getRequestBuilder()); - multi.add(secondTable.getRequestBuilder()); - } - - @Override - public String explain() { - try { - XContentBuilder firstBuilder = XContentFactory.jsonBuilder().prettyPrint(); - firstTable.getRequestBuilder().request().source().toXContent(firstBuilder, ToXContent.EMPTY_PARAMS); - - XContentBuilder secondBuilder = XContentFactory.jsonBuilder().prettyPrint(); - secondTable.getRequestBuilder().request().source().toXContent(secondBuilder, ToXContent.EMPTY_PARAMS); - return String.format(" first query:\n%s\n second query:\n%s", - BytesReference.bytes(firstBuilder).utf8ToString(), - BytesReference.bytes(secondBuilder).utf8ToString()); - } catch (IOException e) { - e.printStackTrace(); - } - return null; - } - - @Override - public ActionResponse get() { - return null; - } - - @Override - public ActionRequestBuilder getBuilder() { - return this.firstTable.getRequestBuilder(); + private MultiSearchRequest multi; + private TableInJoinRequestBuilder firstTable; + private TableInJoinRequestBuilder secondTable; + private SQLJoinTableSource.JoinType joinType; + private int totalLimit; + + public JoinRequestBuilder() { + firstTable = new TableInJoinRequestBuilder(); + secondTable = new TableInJoinRequestBuilder(); + } + + @Override + public ActionRequest request() { + if (multi == null) { + buildMulti(); } - - public MultiSearchRequest getMulti() { - return multi; + return multi; + } + + private void buildMulti() { + multi = new MultiSearchRequest(); + multi.add(firstTable.getRequestBuilder()); + multi.add(secondTable.getRequestBuilder()); + } + + @Override + public String explain() { + try { + XContentBuilder firstBuilder = XContentFactory.jsonBuilder().prettyPrint(); + firstTable + .getRequestBuilder() + .request() + .source() + .toXContent(firstBuilder, ToXContent.EMPTY_PARAMS); + + XContentBuilder secondBuilder = XContentFactory.jsonBuilder().prettyPrint(); + secondTable + .getRequestBuilder() + .request() + .source() + .toXContent(secondBuilder, ToXContent.EMPTY_PARAMS); + return String.format( + " first query:\n%s\n second query:\n%s", + BytesReference.bytes(firstBuilder).utf8ToString(), + BytesReference.bytes(secondBuilder).utf8ToString()); + } catch (IOException e) { + e.printStackTrace(); } - - public void setMulti(MultiSearchRequest multi) { - this.multi = multi; - } - - public SQLJoinTableSource.JoinType getJoinType() { - return joinType; - } - - public void setJoinType(SQLJoinTableSource.JoinType joinType) { - this.joinType = joinType; - } - - public TableInJoinRequestBuilder getFirstTable() { - return firstTable; - } - - public TableInJoinRequestBuilder getSecondTable() { - return secondTable; - } - - public int getTotalLimit() { - return totalLimit; - } - - public void setTotalLimit(int totalLimit) { - this.totalLimit = totalLimit; - } - + return null; + } + + @Override + public ActionResponse get() { + return null; + } + + @Override + public ActionRequestBuilder getBuilder() { + return this.firstTable.getRequestBuilder(); + } + + public MultiSearchRequest getMulti() { + return multi; + } + + public void setMulti(MultiSearchRequest multi) { + this.multi = multi; + } + + public SQLJoinTableSource.JoinType getJoinType() { + return joinType; + } + + public void setJoinType(SQLJoinTableSource.JoinType joinType) { + this.joinType = joinType; + } + + public TableInJoinRequestBuilder getFirstTable() { + return firstTable; + } + + public TableInJoinRequestBuilder getSecondTable() { + return secondTable; + } + + public int getTotalLimit() { + return totalLimit; + } + + public void setTotalLimit(int totalLimit) { + this.totalLimit = totalLimit; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/NestedLoopsElasticRequestBuilder.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/NestedLoopsElasticRequestBuilder.java index 899e0f5e1d..9dd34c71b9 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/NestedLoopsElasticRequestBuilder.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/NestedLoopsElasticRequestBuilder.java @@ -3,15 +3,13 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.join; - import java.io.IOException; import org.json.JSONObject; import org.json.JSONStringer; -import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.sql.legacy.domain.Condition; @@ -19,86 +17,96 @@ import org.opensearch.sql.legacy.exception.SqlParseException; import org.opensearch.sql.legacy.query.maker.QueryMaker; -/** - * Created by Eliran on 15/9/2015. - */ +/** Created by Eliran on 15/9/2015. */ public class NestedLoopsElasticRequestBuilder extends JoinRequestBuilder { - private Where connectedWhere; - private int multiSearchMaxSize; + private Where connectedWhere; + private int multiSearchMaxSize; - public NestedLoopsElasticRequestBuilder() { + public NestedLoopsElasticRequestBuilder() { - multiSearchMaxSize = 100; - } + multiSearchMaxSize = 100; + } - @Override - public String explain() { - String conditions = ""; - - try { - Where where = (Where) this.connectedWhere.clone(); - setValueTypeConditionToStringRecursive(where); - if (where != null) { - conditions = QueryMaker.explain(where, false).toString(); - } - } catch (CloneNotSupportedException | SqlParseException e) { - conditions = "Could not parse conditions due to " + e.getMessage(); - } - - String desc = "Nested Loops run first query, and for each result run " - + "second query with additional conditions as following."; - String[] queries = explainNL(); - JSONStringer jsonStringer = new JSONStringer(); - jsonStringer.object().key("description").value(desc) - .key("conditions").value(new JSONObject(conditions)) - .key("first query").value(new JSONObject(queries[0])) - .key("second query").value(new JSONObject(queries[1])).endObject(); - return jsonStringer.toString(); - } + @Override + public String explain() { + String conditions = ""; - public int getMultiSearchMaxSize() { - return multiSearchMaxSize; + try { + Where where = (Where) this.connectedWhere.clone(); + setValueTypeConditionToStringRecursive(where); + if (where != null) { + conditions = QueryMaker.explain(where, false).toString(); + } + } catch (CloneNotSupportedException | SqlParseException e) { + conditions = "Could not parse conditions due to " + e.getMessage(); } - public void setMultiSearchMaxSize(int multiSearchMaxSize) { - this.multiSearchMaxSize = multiSearchMaxSize; + String desc = + "Nested Loops run first query, and for each result run " + + "second query with additional conditions as following."; + String[] queries = explainNL(); + JSONStringer jsonStringer = new JSONStringer(); + jsonStringer + .object() + .key("description") + .value(desc) + .key("conditions") + .value(new JSONObject(conditions)) + .key("first query") + .value(new JSONObject(queries[0])) + .key("second query") + .value(new JSONObject(queries[1])) + .endObject(); + return jsonStringer.toString(); + } + + public int getMultiSearchMaxSize() { + return multiSearchMaxSize; + } + + public void setMultiSearchMaxSize(int multiSearchMaxSize) { + this.multiSearchMaxSize = multiSearchMaxSize; + } + + public Where getConnectedWhere() { + return connectedWhere; + } + + public void setConnectedWhere(Where connectedWhere) { + this.connectedWhere = connectedWhere; + } + + private void setValueTypeConditionToStringRecursive(Where where) { + if (where == null) { + return; } - - public Where getConnectedWhere() { - return connectedWhere; + if (where instanceof Condition) { + Condition c = (Condition) where; + c.setValue(c.getValue().toString()); + return; + } else { + for (Where innerWhere : where.getWheres()) { + setValueTypeConditionToStringRecursive(innerWhere); + } } - - public void setConnectedWhere(Where connectedWhere) { - this.connectedWhere = connectedWhere; - } - - private void setValueTypeConditionToStringRecursive(Where where) { - if (where == null) { - return; - } - if (where instanceof Condition) { - Condition c = (Condition) where; - c.setValue(c.getValue().toString()); - return; - } else { - for (Where innerWhere : where.getWheres()) { - setValueTypeConditionToStringRecursive(innerWhere); - } - } - } - - private String[] explainNL() { - return new String[]{explainQuery(this.getFirstTable()), explainQuery(this.getSecondTable())}; - } - - private String explainQuery(TableInJoinRequestBuilder requestBuilder) { - try { - XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().prettyPrint(); - requestBuilder.getRequestBuilder().request().source().toXContent(xContentBuilder, ToXContent.EMPTY_PARAMS); - return BytesReference.bytes(xContentBuilder).utf8ToString(); - } catch (IOException e) { - return e.getMessage(); - } + } + + private String[] explainNL() { + return new String[] {explainQuery(this.getFirstTable()), explainQuery(this.getSecondTable())}; + } + + private String explainQuery(TableInJoinRequestBuilder requestBuilder) { + try { + XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().prettyPrint(); + requestBuilder + .getRequestBuilder() + .request() + .source() + .toXContent(xContentBuilder, ToXContent.EMPTY_PARAMS); + return BytesReference.bytes(xContentBuilder).utf8ToString(); + } catch (IOException e) { + return e.getMessage(); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/OpenSearchHashJoinQueryAction.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/OpenSearchHashJoinQueryAction.java index 0a87c16067..078ed6bcce 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/OpenSearchHashJoinQueryAction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/OpenSearchHashJoinQueryAction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.join; import java.util.AbstractMap; @@ -20,129 +19,126 @@ import org.opensearch.sql.legacy.exception.SqlParseException; import org.opensearch.sql.legacy.query.planner.HashJoinQueryPlanRequestBuilder; -/** - * Created by Eliran on 22/8/2015. - */ +/** Created by Eliran on 22/8/2015. */ public class OpenSearchHashJoinQueryAction extends OpenSearchJoinQueryAction { - public OpenSearchHashJoinQueryAction(Client client, JoinSelect joinSelect) { - super(client, joinSelect); - } + public OpenSearchHashJoinQueryAction(Client client, JoinSelect joinSelect) { + super(client, joinSelect); + } - @Override - protected void fillSpecificRequestBuilder(JoinRequestBuilder requestBuilder) throws SqlParseException { - String t1Alias = joinSelect.getFirstTable().getAlias(); - String t2Alias = joinSelect.getSecondTable().getAlias(); + @Override + protected void fillSpecificRequestBuilder(JoinRequestBuilder requestBuilder) + throws SqlParseException { + String t1Alias = joinSelect.getFirstTable().getAlias(); + String t2Alias = joinSelect.getSecondTable().getAlias(); - List>> comparisonFields = getComparisonFields(t1Alias, t2Alias, - joinSelect.getConnectedWhere()); + List>> comparisonFields = + getComparisonFields(t1Alias, t2Alias, joinSelect.getConnectedWhere()); - ((HashJoinElasticRequestBuilder) requestBuilder).setT1ToT2FieldsComparison(comparisonFields); - } + ((HashJoinElasticRequestBuilder) requestBuilder).setT1ToT2FieldsComparison(comparisonFields); + } - @Override - protected JoinRequestBuilder createSpecificBuilder() { - if (isLegacy()) { - return new HashJoinElasticRequestBuilder(); - } - return new HashJoinQueryPlanRequestBuilder(client, sqlRequest); + @Override + protected JoinRequestBuilder createSpecificBuilder() { + if (isLegacy()) { + return new HashJoinElasticRequestBuilder(); } - - @Override - protected void updateRequestWithHints(JoinRequestBuilder requestBuilder) { - super.updateRequestWithHints(requestBuilder); - for (Hint hint : joinSelect.getHints()) { - if (hint.getType() == HintType.HASH_WITH_TERMS_FILTER) { - ((HashJoinElasticRequestBuilder) requestBuilder).setUseTermFiltersOptimization(true); - } - } + return new HashJoinQueryPlanRequestBuilder(client, sqlRequest); + } + + @Override + protected void updateRequestWithHints(JoinRequestBuilder requestBuilder) { + super.updateRequestWithHints(requestBuilder); + for (Hint hint : joinSelect.getHints()) { + if (hint.getType() == HintType.HASH_WITH_TERMS_FILTER) { + ((HashJoinElasticRequestBuilder) requestBuilder).setUseTermFiltersOptimization(true); + } } - - /** - * Keep the option to run legacy hash join algorithm mainly for the comparison - */ - private boolean isLegacy() { - for (Hint hint : joinSelect.getHints()) { - if (hint.getType() == HintType.JOIN_ALGORITHM_USE_LEGACY) { - return true; - } - } - return false; + } + + /** Keep the option to run legacy hash join algorithm mainly for the comparison */ + private boolean isLegacy() { + for (Hint hint : joinSelect.getHints()) { + if (hint.getType() == HintType.JOIN_ALGORITHM_USE_LEGACY) { + return true; + } } - - private List> getComparisonFields(String t1Alias, String t2Alias, - List connectedConditions) - throws SqlParseException { - List> comparisonFields = new ArrayList<>(); - for (Condition condition : connectedConditions) { - - if (condition.getOPERATOR() != Condition.OPERATOR.EQ) { - throw new SqlParseException( - String.format("HashJoin should only be with EQ conditions, got:%s on condition:%s", - condition.getOPERATOR().name(), condition.toString())); - } - - String firstField = condition.getName(); - String secondField = condition.getValue().toString(); - Field t1Field, t2Field; - if (firstField.startsWith(t1Alias)) { - t1Field = new Field(removeAlias(firstField, t1Alias), null); - t2Field = new Field(removeAlias(secondField, t2Alias), null); - } else { - t1Field = new Field(removeAlias(secondField, t1Alias), null); - t2Field = new Field(removeAlias(firstField, t2Alias), null); - } - comparisonFields.add(new AbstractMap.SimpleEntry<>(t1Field, t2Field)); - } - return comparisonFields; + return false; + } + + private List> getComparisonFields( + String t1Alias, String t2Alias, List connectedConditions) + throws SqlParseException { + List> comparisonFields = new ArrayList<>(); + for (Condition condition : connectedConditions) { + + if (condition.getOPERATOR() != Condition.OPERATOR.EQ) { + throw new SqlParseException( + String.format( + "HashJoin should only be with EQ conditions, got:%s on condition:%s", + condition.getOPERATOR().name(), condition.toString())); + } + + String firstField = condition.getName(); + String secondField = condition.getValue().toString(); + Field t1Field, t2Field; + if (firstField.startsWith(t1Alias)) { + t1Field = new Field(removeAlias(firstField, t1Alias), null); + t2Field = new Field(removeAlias(secondField, t2Alias), null); + } else { + t1Field = new Field(removeAlias(secondField, t1Alias), null); + t2Field = new Field(removeAlias(firstField, t2Alias), null); + } + comparisonFields.add(new AbstractMap.SimpleEntry<>(t1Field, t2Field)); } - - private List>> getComparisonFields(String t1Alias, String t2Alias, - Where connectedWhere) throws SqlParseException { - List>> comparisonFields = new ArrayList<>(); - //where is AND with lots of conditions. - if (connectedWhere == null) { - return comparisonFields; - } - boolean allAnds = true; - for (Where innerWhere : connectedWhere.getWheres()) { - if (innerWhere.getConn() == Where.CONN.OR) { - allAnds = false; - break; - } - } - if (allAnds) { - List> innerComparisonFields = - getComparisonFieldsFromWhere(t1Alias, t2Alias, connectedWhere); - comparisonFields.add(innerComparisonFields); - } else { - for (Where innerWhere : connectedWhere.getWheres()) { - comparisonFields.add(getComparisonFieldsFromWhere(t1Alias, t2Alias, innerWhere)); - } - } - - return comparisonFields; + return comparisonFields; + } + + private List>> getComparisonFields( + String t1Alias, String t2Alias, Where connectedWhere) throws SqlParseException { + List>> comparisonFields = new ArrayList<>(); + // where is AND with lots of conditions. + if (connectedWhere == null) { + return comparisonFields; } - - private List> getComparisonFieldsFromWhere(String t1Alias, String t2Alias, Where where) - throws SqlParseException { - List conditions = new ArrayList<>(); - if (where instanceof Condition) { - conditions.add((Condition) where); - } else { - for (Where innerWhere : where.getWheres()) { - if (!(innerWhere instanceof Condition)) { - throw new SqlParseException( - "if connectedCondition is AND then all inner wheres should be Conditions"); - } - conditions.add((Condition) innerWhere); - } - } - return getComparisonFields(t1Alias, t2Alias, conditions); + boolean allAnds = true; + for (Where innerWhere : connectedWhere.getWheres()) { + if (innerWhere.getConn() == Where.CONN.OR) { + allAnds = false; + break; + } + } + if (allAnds) { + List> innerComparisonFields = + getComparisonFieldsFromWhere(t1Alias, t2Alias, connectedWhere); + comparisonFields.add(innerComparisonFields); + } else { + for (Where innerWhere : connectedWhere.getWheres()) { + comparisonFields.add(getComparisonFieldsFromWhere(t1Alias, t2Alias, innerWhere)); + } } - private String removeAlias(String field, String alias) { - return field.replace(alias + ".", ""); + return comparisonFields; + } + + private List> getComparisonFieldsFromWhere( + String t1Alias, String t2Alias, Where where) throws SqlParseException { + List conditions = new ArrayList<>(); + if (where instanceof Condition) { + conditions.add((Condition) where); + } else { + for (Where innerWhere : where.getWheres()) { + if (!(innerWhere instanceof Condition)) { + throw new SqlParseException( + "if connectedCondition is AND then all inner wheres should be Conditions"); + } + conditions.add((Condition) innerWhere); + } } + return getComparisonFields(t1Alias, t2Alias, conditions); + } + private String removeAlias(String field, String alias) { + return field.replace(alias + ".", ""); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/OpenSearchJoinQueryAction.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/OpenSearchJoinQueryAction.java index 35e718d985..7068ddf9a2 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/OpenSearchJoinQueryAction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/OpenSearchJoinQueryAction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.join; import java.util.List; @@ -20,111 +19,107 @@ import org.opensearch.sql.legacy.query.planner.HashJoinQueryPlanRequestBuilder; import org.opensearch.sql.legacy.query.planner.core.Config; -/** - * Created by Eliran on 15/9/2015. - */ +/** Created by Eliran on 15/9/2015. */ public abstract class OpenSearchJoinQueryAction extends QueryAction { - protected JoinSelect joinSelect; - - public OpenSearchJoinQueryAction(Client client, JoinSelect joinSelect) { - super(client, joinSelect); - this.joinSelect = joinSelect; - } - - @Override - public SqlElasticRequestBuilder explain() throws SqlParseException { - JoinRequestBuilder requestBuilder = createSpecificBuilder(); - fillBasicJoinRequestBuilder(requestBuilder); - fillSpecificRequestBuilder(requestBuilder); - return requestBuilder; - } - - protected abstract void fillSpecificRequestBuilder(JoinRequestBuilder requestBuilder) throws SqlParseException; - - protected abstract JoinRequestBuilder createSpecificBuilder(); - - - private void fillBasicJoinRequestBuilder(JoinRequestBuilder requestBuilder) throws SqlParseException { - - fillTableInJoinRequestBuilder(requestBuilder.getFirstTable(), joinSelect.getFirstTable()); - fillTableInJoinRequestBuilder(requestBuilder.getSecondTable(), joinSelect.getSecondTable()); - - requestBuilder.setJoinType(joinSelect.getJoinType()); - - requestBuilder.setTotalLimit(joinSelect.getTotalLimit()); - - updateRequestWithHints(requestBuilder); - - - } - - protected void updateRequestWithHints(JoinRequestBuilder requestBuilder) { - for (Hint hint : joinSelect.getHints()) { - Object[] params = hint.getParams(); - switch (hint.getType()) { - case JOIN_LIMIT: - requestBuilder.getFirstTable().setHintLimit((Integer) params[0]); - requestBuilder.getSecondTable().setHintLimit((Integer) params[1]); - break; - case JOIN_ALGORITHM_BLOCK_SIZE: - if (requestBuilder instanceof HashJoinQueryPlanRequestBuilder) { - queryPlannerConfig(requestBuilder).configureBlockSize(hint.getParams()); - } - break; - case JOIN_SCROLL_PAGE_SIZE: - if (requestBuilder instanceof HashJoinQueryPlanRequestBuilder) { - queryPlannerConfig(requestBuilder).configureScrollPageSize(hint.getParams()); - } - break; - case JOIN_CIRCUIT_BREAK_LIMIT: - if (requestBuilder instanceof HashJoinQueryPlanRequestBuilder) { - queryPlannerConfig(requestBuilder).configureCircuitBreakLimit(hint.getParams()); - } - break; - case JOIN_BACK_OFF_RETRY_INTERVALS: - if (requestBuilder instanceof HashJoinQueryPlanRequestBuilder) { - queryPlannerConfig(requestBuilder).configureBackOffRetryIntervals(hint.getParams()); - } - break; - case JOIN_TIME_OUT: - if (requestBuilder instanceof HashJoinQueryPlanRequestBuilder) { - queryPlannerConfig(requestBuilder).configureTimeOut(hint.getParams()); - } - break; - } - } + protected JoinSelect joinSelect; + + public OpenSearchJoinQueryAction(Client client, JoinSelect joinSelect) { + super(client, joinSelect); + this.joinSelect = joinSelect; + } + + @Override + public SqlElasticRequestBuilder explain() throws SqlParseException { + JoinRequestBuilder requestBuilder = createSpecificBuilder(); + fillBasicJoinRequestBuilder(requestBuilder); + fillSpecificRequestBuilder(requestBuilder); + return requestBuilder; + } + + protected abstract void fillSpecificRequestBuilder(JoinRequestBuilder requestBuilder) + throws SqlParseException; + + protected abstract JoinRequestBuilder createSpecificBuilder(); + + private void fillBasicJoinRequestBuilder(JoinRequestBuilder requestBuilder) + throws SqlParseException { + + fillTableInJoinRequestBuilder(requestBuilder.getFirstTable(), joinSelect.getFirstTable()); + fillTableInJoinRequestBuilder(requestBuilder.getSecondTable(), joinSelect.getSecondTable()); + + requestBuilder.setJoinType(joinSelect.getJoinType()); + + requestBuilder.setTotalLimit(joinSelect.getTotalLimit()); + + updateRequestWithHints(requestBuilder); + } + + protected void updateRequestWithHints(JoinRequestBuilder requestBuilder) { + for (Hint hint : joinSelect.getHints()) { + Object[] params = hint.getParams(); + switch (hint.getType()) { + case JOIN_LIMIT: + requestBuilder.getFirstTable().setHintLimit((Integer) params[0]); + requestBuilder.getSecondTable().setHintLimit((Integer) params[1]); + break; + case JOIN_ALGORITHM_BLOCK_SIZE: + if (requestBuilder instanceof HashJoinQueryPlanRequestBuilder) { + queryPlannerConfig(requestBuilder).configureBlockSize(hint.getParams()); + } + break; + case JOIN_SCROLL_PAGE_SIZE: + if (requestBuilder instanceof HashJoinQueryPlanRequestBuilder) { + queryPlannerConfig(requestBuilder).configureScrollPageSize(hint.getParams()); + } + break; + case JOIN_CIRCUIT_BREAK_LIMIT: + if (requestBuilder instanceof HashJoinQueryPlanRequestBuilder) { + queryPlannerConfig(requestBuilder).configureCircuitBreakLimit(hint.getParams()); + } + break; + case JOIN_BACK_OFF_RETRY_INTERVALS: + if (requestBuilder instanceof HashJoinQueryPlanRequestBuilder) { + queryPlannerConfig(requestBuilder).configureBackOffRetryIntervals(hint.getParams()); + } + break; + case JOIN_TIME_OUT: + if (requestBuilder instanceof HashJoinQueryPlanRequestBuilder) { + queryPlannerConfig(requestBuilder).configureTimeOut(hint.getParams()); + } + break; + } } - - private Config queryPlannerConfig(JoinRequestBuilder requestBuilder) { - return ((HashJoinQueryPlanRequestBuilder) requestBuilder).getConfig(); - } - - private void fillTableInJoinRequestBuilder(TableInJoinRequestBuilder requestBuilder, - TableOnJoinSelect tableOnJoinSelect) throws SqlParseException { - List connectedFields = tableOnJoinSelect.getConnectedFields(); - addFieldsToSelectIfMissing(tableOnJoinSelect, connectedFields); - requestBuilder.setOriginalSelect(tableOnJoinSelect); - DefaultQueryAction queryAction = new DefaultQueryAction(client, tableOnJoinSelect); - queryAction.explain(); - requestBuilder.setRequestBuilder(queryAction.getRequestBuilder()); - requestBuilder.setReturnedFields(tableOnJoinSelect.getSelectedFields()); - requestBuilder.setAlias(tableOnJoinSelect.getAlias()); + } + + private Config queryPlannerConfig(JoinRequestBuilder requestBuilder) { + return ((HashJoinQueryPlanRequestBuilder) requestBuilder).getConfig(); + } + + private void fillTableInJoinRequestBuilder( + TableInJoinRequestBuilder requestBuilder, TableOnJoinSelect tableOnJoinSelect) + throws SqlParseException { + List connectedFields = tableOnJoinSelect.getConnectedFields(); + addFieldsToSelectIfMissing(tableOnJoinSelect, connectedFields); + requestBuilder.setOriginalSelect(tableOnJoinSelect); + DefaultQueryAction queryAction = new DefaultQueryAction(client, tableOnJoinSelect); + queryAction.explain(); + requestBuilder.setRequestBuilder(queryAction.getRequestBuilder()); + requestBuilder.setReturnedFields(tableOnJoinSelect.getSelectedFields()); + requestBuilder.setAlias(tableOnJoinSelect.getAlias()); + } + + private void addFieldsToSelectIfMissing(Select select, List fields) { + // this means all fields + if (select.getFields() == null || select.getFields().size() == 0) { + return; } - private void addFieldsToSelectIfMissing(Select select, List fields) { - //this means all fields - if (select.getFields() == null || select.getFields().size() == 0) { - return; - } - - List selectedFields = select.getFields(); - for (Field field : fields) { - if (!selectedFields.contains(field)) { - selectedFields.add(field); - } - } - + List selectedFields = select.getFields(); + for (Field field : fields) { + if (!selectedFields.contains(field)) { + selectedFields.add(field); + } } - + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/OpenSearchJoinQueryActionFactory.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/OpenSearchJoinQueryActionFactory.java index c96cb6120c..c638f43519 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/OpenSearchJoinQueryActionFactory.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/OpenSearchJoinQueryActionFactory.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.join; import java.util.List; @@ -14,36 +13,32 @@ import org.opensearch.sql.legacy.domain.hints.HintType; import org.opensearch.sql.legacy.query.QueryAction; -/** - * Created by Eliran on 15/9/2015. - */ +/** Created by Eliran on 15/9/2015. */ public class OpenSearchJoinQueryActionFactory { - public static QueryAction createJoinAction(Client client, JoinSelect joinSelect) { - List connectedConditions = joinSelect.getConnectedConditions(); - boolean allEqual = true; - for (Condition condition : connectedConditions) { - if (condition.getOPERATOR() != Condition.OPERATOR.EQ) { - allEqual = false; - break; - } - - } - if (!allEqual) { - return new OpenSearchNestedLoopsQueryAction(client, joinSelect); - } - - boolean useNestedLoopsHintExist = false; - for (Hint hint : joinSelect.getHints()) { - if (hint.getType() == HintType.USE_NESTED_LOOPS) { - useNestedLoopsHintExist = true; - break; - } - } - if (useNestedLoopsHintExist) { - return new OpenSearchNestedLoopsQueryAction(client, joinSelect); - } - - return new OpenSearchHashJoinQueryAction(client, joinSelect); + public static QueryAction createJoinAction(Client client, JoinSelect joinSelect) { + List connectedConditions = joinSelect.getConnectedConditions(); + boolean allEqual = true; + for (Condition condition : connectedConditions) { + if (condition.getOPERATOR() != Condition.OPERATOR.EQ) { + allEqual = false; + break; + } + } + if (!allEqual) { + return new OpenSearchNestedLoopsQueryAction(client, joinSelect); + } + boolean useNestedLoopsHintExist = false; + for (Hint hint : joinSelect.getHints()) { + if (hint.getType() == HintType.USE_NESTED_LOOPS) { + useNestedLoopsHintExist = true; + break; + } } + if (useNestedLoopsHintExist) { + return new OpenSearchNestedLoopsQueryAction(client, joinSelect); + } + + return new OpenSearchHashJoinQueryAction(client, joinSelect); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/OpenSearchNestedLoopsQueryAction.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/OpenSearchNestedLoopsQueryAction.java index 8954106f8a..e9e9169605 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/OpenSearchNestedLoopsQueryAction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/OpenSearchNestedLoopsQueryAction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.join; import org.opensearch.client.Client; @@ -13,45 +12,44 @@ import org.opensearch.sql.legacy.domain.hints.HintType; import org.opensearch.sql.legacy.exception.SqlParseException; -/** - * Created by Eliran on 15/9/2015. - */ +/** Created by Eliran on 15/9/2015. */ public class OpenSearchNestedLoopsQueryAction extends OpenSearchJoinQueryAction { - public OpenSearchNestedLoopsQueryAction(Client client, JoinSelect joinSelect) { - super(client, joinSelect); - } - - @Override - protected void fillSpecificRequestBuilder(JoinRequestBuilder requestBuilder) throws SqlParseException { - NestedLoopsElasticRequestBuilder nestedBuilder = (NestedLoopsElasticRequestBuilder) requestBuilder; - Where where = joinSelect.getConnectedWhere(); - nestedBuilder.setConnectedWhere(where); - + public OpenSearchNestedLoopsQueryAction(Client client, JoinSelect joinSelect) { + super(client, joinSelect); + } + + @Override + protected void fillSpecificRequestBuilder(JoinRequestBuilder requestBuilder) + throws SqlParseException { + NestedLoopsElasticRequestBuilder nestedBuilder = + (NestedLoopsElasticRequestBuilder) requestBuilder; + Where where = joinSelect.getConnectedWhere(); + nestedBuilder.setConnectedWhere(where); + } + + @Override + protected JoinRequestBuilder createSpecificBuilder() { + return new NestedLoopsElasticRequestBuilder(); + } + + @Override + protected void updateRequestWithHints(JoinRequestBuilder requestBuilder) { + super.updateRequestWithHints(requestBuilder); + for (Hint hint : this.joinSelect.getHints()) { + if (hint.getType() == HintType.NL_MULTISEARCH_SIZE) { + Integer multiSearchMaxSize = (Integer) hint.getParams()[0]; + ((NestedLoopsElasticRequestBuilder) requestBuilder) + .setMultiSearchMaxSize(multiSearchMaxSize); + } } + } - @Override - protected JoinRequestBuilder createSpecificBuilder() { - return new NestedLoopsElasticRequestBuilder(); + private String removeAlias(String field) { + String alias = joinSelect.getFirstTable().getAlias(); + if (!field.startsWith(alias + ".")) { + alias = joinSelect.getSecondTable().getAlias(); } - - @Override - protected void updateRequestWithHints(JoinRequestBuilder requestBuilder) { - super.updateRequestWithHints(requestBuilder); - for (Hint hint : this.joinSelect.getHints()) { - if (hint.getType() == HintType.NL_MULTISEARCH_SIZE) { - Integer multiSearchMaxSize = (Integer) hint.getParams()[0]; - ((NestedLoopsElasticRequestBuilder) requestBuilder).setMultiSearchMaxSize(multiSearchMaxSize); - } - } - } - - private String removeAlias(String field) { - String alias = joinSelect.getFirstTable().getAlias(); - if (!field.startsWith(alias + ".")) { - alias = joinSelect.getSecondTable().getAlias(); - } - return field.replace(alias + ".", ""); - } - + return field.replace(alias + ".", ""); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/TableInJoinRequestBuilder.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/TableInJoinRequestBuilder.java index b1a07486b7..0b37497541 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/join/TableInJoinRequestBuilder.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/join/TableInJoinRequestBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.join; import java.util.List; @@ -11,56 +10,53 @@ import org.opensearch.sql.legacy.domain.Field; import org.opensearch.sql.legacy.domain.Select; -/** - * Created by Eliran on 28/8/2015. - */ +/** Created by Eliran on 28/8/2015. */ public class TableInJoinRequestBuilder { - private SearchRequestBuilder requestBuilder; - private String alias; - private List returnedFields; - private Select originalSelect; - private Integer hintLimit; + private SearchRequestBuilder requestBuilder; + private String alias; + private List returnedFields; + private Select originalSelect; + private Integer hintLimit; - public TableInJoinRequestBuilder() { - } + public TableInJoinRequestBuilder() {} - public SearchRequestBuilder getRequestBuilder() { - return requestBuilder; - } + public SearchRequestBuilder getRequestBuilder() { + return requestBuilder; + } - public void setRequestBuilder(SearchRequestBuilder requestBuilder) { - this.requestBuilder = requestBuilder; - } + public void setRequestBuilder(SearchRequestBuilder requestBuilder) { + this.requestBuilder = requestBuilder; + } - public String getAlias() { - return alias; - } + public String getAlias() { + return alias; + } - public void setAlias(String alias) { - this.alias = alias; - } + public void setAlias(String alias) { + this.alias = alias; + } - public List getReturnedFields() { - return returnedFields; - } + public List getReturnedFields() { + return returnedFields; + } - public void setReturnedFields(List returnedFields) { - this.returnedFields = returnedFields; - } + public void setReturnedFields(List returnedFields) { + this.returnedFields = returnedFields; + } - public Select getOriginalSelect() { - return originalSelect; - } + public Select getOriginalSelect() { + return originalSelect; + } - public void setOriginalSelect(Select originalSelect) { - this.originalSelect = originalSelect; - } + public void setOriginalSelect(Select originalSelect) { + this.originalSelect = originalSelect; + } - public Integer getHintLimit() { - return hintLimit; - } + public Integer getHintLimit() { + return hintLimit; + } - public void setHintLimit(Integer hintLimit) { - this.hintLimit = hintLimit; - } + public void setHintLimit(Integer hintLimit) { + this.hintLimit = hintLimit; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/maker/AggMaker.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/maker/AggMaker.java index 0c9caab03d..6501fc480d 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/maker/AggMaker.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/maker/AggMaker.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.maker; import com.alibaba.druid.sql.ast.expr.SQLAggregateOption; @@ -18,14 +17,16 @@ import java.util.Map; import java.util.stream.Collectors; import org.apache.commons.lang3.StringUtils; -import org.opensearch.core.common.ParsingException; import org.opensearch.common.xcontent.LoggingDeprecationHandler; import org.opensearch.common.xcontent.json.JsonXContent; import org.opensearch.common.xcontent.json.JsonXContentParser; +import org.opensearch.core.common.ParsingException; import org.opensearch.core.common.Strings; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoGridAggregationBuilder; import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoHashGridAggregationBuilder; +import org.opensearch.geo.search.aggregations.metrics.GeoBoundsAggregationBuilder; import org.opensearch.join.aggregations.JoinAggregationBuilders; import org.opensearch.script.Script; import org.opensearch.script.ScriptType; @@ -35,7 +36,6 @@ import org.opensearch.search.aggregations.BucketOrder; import org.opensearch.search.aggregations.InternalOrder; import org.opensearch.search.aggregations.bucket.filter.FilterAggregationBuilder; -import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoGridAggregationBuilder; import org.opensearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.opensearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.opensearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; @@ -45,7 +45,6 @@ import org.opensearch.search.aggregations.bucket.range.RangeAggregationBuilder; import org.opensearch.search.aggregations.bucket.terms.IncludeExclude; import org.opensearch.search.aggregations.bucket.terms.TermsAggregationBuilder; -import org.opensearch.geo.search.aggregations.metrics.GeoBoundsAggregationBuilder; import org.opensearch.search.aggregations.metrics.PercentilesAggregationBuilder; import org.opensearch.search.aggregations.metrics.ScriptedMetricAggregationBuilder; import org.opensearch.search.aggregations.metrics.TopHitsAggregationBuilder; @@ -65,758 +64,782 @@ public class AggMaker { - /** - * The mapping bettwen group fieldName or Alias to the KVValue. - */ - private Map groupMap = new HashMap<>(); - private Where where; - - /** - * - * - * @param field - * @return - * @throws SqlParseException - */ - public AggregationBuilder makeGroupAgg(Field field) throws SqlParseException { - - if (field instanceof MethodField && field.getName().equals("script")) { - MethodField methodField = (MethodField) field; - TermsAggregationBuilder termsBuilder = AggregationBuilders.terms(methodField.getAlias()) - .script(new Script(methodField.getParams().get(1).value.toString())); - extendGroupMap(methodField, new KVValue("KEY", termsBuilder)); - return termsBuilder; - } - - - if (field instanceof MethodField) { - - MethodField methodField = (MethodField) field; - if (methodField.getName().equals("filter")) { - Map paramsAsMap = methodField.getParamsAsMap(); - Where where = (Where) paramsAsMap.get("where"); - return AggregationBuilders.filter(paramsAsMap.get("alias").toString(), - QueryMaker.explain(where)); - } - return makeRangeGroup(methodField); - } else { - String termName = (Strings.isNullOrEmpty(field.getAlias())) ? field.getName() : field.getAlias(); - TermsAggregationBuilder termsBuilder = AggregationBuilders.terms(termName).field(field.getName()); - final KVValue kvValue = new KVValue("KEY", termsBuilder); - groupMap.put(termName, kvValue); - // map the field name with KVValue if it is not yet. The use case is when alias exist, - // the termName is different with fieldName, both of them should be included in the map. - groupMap.putIfAbsent(field.getName(), kvValue); - return termsBuilder; - } + /** The mapping bettwen group fieldName or Alias to the KVValue. */ + private Map groupMap = new HashMap<>(); + + private Where where; + + /** + * @param field + * @return + * @throws SqlParseException + */ + public AggregationBuilder makeGroupAgg(Field field) throws SqlParseException { + + if (field instanceof MethodField && field.getName().equals("script")) { + MethodField methodField = (MethodField) field; + TermsAggregationBuilder termsBuilder = + AggregationBuilders.terms(methodField.getAlias()) + .script(new Script(methodField.getParams().get(1).value.toString())); + extendGroupMap(methodField, new KVValue("KEY", termsBuilder)); + return termsBuilder; } - - /** - * Create aggregation according to the SQL function. - * - * @param field SQL function - * @param parent parentAggregation - * @return AggregationBuilder represents the SQL function - * @throws SqlParseException in case of unrecognized function - */ - public AggregationBuilder makeFieldAgg(MethodField field, AggregationBuilder parent) throws SqlParseException { - extendGroupMap(field, new KVValue("FIELD", parent)); - ValuesSourceAggregationBuilder builder; - field.setAlias(fixAlias(field.getAlias())); - switch (field.getName().toUpperCase()) { - case "SUM": - builder = AggregationBuilders.sum(field.getAlias()); - return addFieldToAgg(field, builder); - case "MAX": - builder = AggregationBuilders.max(field.getAlias()); - return addFieldToAgg(field, builder); - case "MIN": - builder = AggregationBuilders.min(field.getAlias()); - return addFieldToAgg(field, builder); - case "AVG": - builder = AggregationBuilders.avg(field.getAlias()); - return addFieldToAgg(field, builder); - case "STATS": - builder = AggregationBuilders.stats(field.getAlias()); - return addFieldToAgg(field, builder); - case "EXTENDED_STATS": - builder = AggregationBuilders.extendedStats(field.getAlias()); - return addFieldToAgg(field, builder); - case "PERCENTILES": - builder = AggregationBuilders.percentiles(field.getAlias()); - addSpecificPercentiles((PercentilesAggregationBuilder) builder, field.getParams()); - return addFieldToAgg(field, builder); - case "TOPHITS": - return makeTopHitsAgg(field); - case "SCRIPTED_METRIC": - return scriptedMetric(field); - case "COUNT": - extendGroupMap(field, new KVValue("COUNT", parent)); - return addFieldToAgg(field, makeCountAgg(field)); - default: - throw new SqlParseException("the agg function not to define !"); - } + if (field instanceof MethodField) { + + MethodField methodField = (MethodField) field; + if (methodField.getName().equals("filter")) { + Map paramsAsMap = methodField.getParamsAsMap(); + Where where = (Where) paramsAsMap.get("where"); + return AggregationBuilders.filter( + paramsAsMap.get("alias").toString(), QueryMaker.explain(where)); + } + return makeRangeGroup(methodField); + } else { + String termName = + (Strings.isNullOrEmpty(field.getAlias())) ? field.getName() : field.getAlias(); + TermsAggregationBuilder termsBuilder = + AggregationBuilders.terms(termName).field(field.getName()); + final KVValue kvValue = new KVValue("KEY", termsBuilder); + groupMap.put(termName, kvValue); + // map the field name with KVValue if it is not yet. The use case is when alias exist, + // the termName is different with fieldName, both of them should be included in the map. + groupMap.putIfAbsent(field.getName(), kvValue); + return termsBuilder; } - - /** - * With {@link Where} Condition. - */ - public AggMaker withWhere(Where where) { - this.where = where; - return this; + } + + /** + * Create aggregation according to the SQL function. + * + * @param field SQL function + * @param parent parentAggregation + * @return AggregationBuilder represents the SQL function + * @throws SqlParseException in case of unrecognized function + */ + public AggregationBuilder makeFieldAgg(MethodField field, AggregationBuilder parent) + throws SqlParseException { + extendGroupMap(field, new KVValue("FIELD", parent)); + ValuesSourceAggregationBuilder builder; + field.setAlias(fixAlias(field.getAlias())); + switch (field.getName().toUpperCase()) { + case "SUM": + builder = AggregationBuilders.sum(field.getAlias()); + return addFieldToAgg(field, builder); + case "MAX": + builder = AggregationBuilders.max(field.getAlias()); + return addFieldToAgg(field, builder); + case "MIN": + builder = AggregationBuilders.min(field.getAlias()); + return addFieldToAgg(field, builder); + case "AVG": + builder = AggregationBuilders.avg(field.getAlias()); + return addFieldToAgg(field, builder); + case "STATS": + builder = AggregationBuilders.stats(field.getAlias()); + return addFieldToAgg(field, builder); + case "EXTENDED_STATS": + builder = AggregationBuilders.extendedStats(field.getAlias()); + return addFieldToAgg(field, builder); + case "PERCENTILES": + builder = AggregationBuilders.percentiles(field.getAlias()); + addSpecificPercentiles((PercentilesAggregationBuilder) builder, field.getParams()); + return addFieldToAgg(field, builder); + case "TOPHITS": + return makeTopHitsAgg(field); + case "SCRIPTED_METRIC": + return scriptedMetric(field); + case "COUNT": + extendGroupMap(field, new KVValue("COUNT", parent)); + return addFieldToAgg(field, makeCountAgg(field)); + default: + throw new SqlParseException("the agg function not to define !"); } - - private void addSpecificPercentiles(PercentilesAggregationBuilder percentilesBuilder, List params) { - List percentiles = new ArrayList<>(); - for (KVValue kValue : params) { - if (kValue.value.getClass().equals(BigDecimal.class)) { - BigDecimal percentile = (BigDecimal) kValue.value; - percentiles.add(percentile.doubleValue()); - - } else if (kValue.value instanceof Integer) { - percentiles.add(((Integer) kValue.value).doubleValue()); - } - } - if (percentiles.size() > 0) { - double[] percentilesArr = new double[percentiles.size()]; - int i = 0; - for (Double percentile : percentiles) { - percentilesArr[i] = percentile; - i++; - } - percentilesBuilder.percentiles(percentilesArr); - } + } + + /** With {@link Where} Condition. */ + public AggMaker withWhere(Where where) { + this.where = where; + return this; + } + + private void addSpecificPercentiles( + PercentilesAggregationBuilder percentilesBuilder, List params) { + List percentiles = new ArrayList<>(); + for (KVValue kValue : params) { + if (kValue.value.getClass().equals(BigDecimal.class)) { + BigDecimal percentile = (BigDecimal) kValue.value; + percentiles.add(percentile.doubleValue()); + + } else if (kValue.value instanceof Integer) { + percentiles.add(((Integer) kValue.value).doubleValue()); + } } - - private String fixAlias(String alias) { - //because [ is not legal as alias - return alias.replaceAll("\\[", "(").replaceAll("\\]", ")"); + if (percentiles.size() > 0) { + double[] percentilesArr = new double[percentiles.size()]; + int i = 0; + for (Double percentile : percentiles) { + percentilesArr[i] = percentile; + i++; + } + percentilesBuilder.percentiles(percentilesArr); } + } + + private String fixAlias(String alias) { + // because [ is not legal as alias + return alias.replaceAll("\\[", "(").replaceAll("\\]", ")"); + } + + private AggregationBuilder addFieldToAgg( + MethodField field, ValuesSourceAggregationBuilder builder) throws SqlParseException { + KVValue kvValue = field.getParams().get(0); + if (kvValue.key != null && kvValue.key.equals("script")) { + if (kvValue.value instanceof MethodField) { + return builder.script( + new Script(((MethodField) kvValue.value).getParams().get(1).toString())); + } else { + return builder.script(new Script(kvValue.value.toString())); + } + + } else if (kvValue.key != null && kvValue.value.toString().trim().startsWith("def")) { + return builder.script(new Script(kvValue.value.toString())); + } else if (kvValue.key != null + && (kvValue.key.equals("nested") || kvValue.key.equals("reverse_nested"))) { + NestedType nestedType = (NestedType) kvValue.value; + nestedType.addBucketPath(Path.getMetricPath(builder.getName())); + + if (nestedType.isNestedField()) { + builder.field("_index"); + } else { + builder.field(nestedType.field); + } + + AggregationBuilder nestedBuilder; + + String nestedAggName = nestedType.getNestedAggName(); + + if (nestedType.isReverse()) { + if (nestedType.path != null && nestedType.path.startsWith("~")) { + String realPath = nestedType.path.substring(1); + nestedBuilder = AggregationBuilders.nested(nestedAggName, realPath); + nestedBuilder = nestedBuilder.subAggregation(builder); + return AggregationBuilders.reverseNested(nestedAggName + "_REVERSED") + .subAggregation(nestedBuilder); + } else { + ReverseNestedAggregationBuilder reverseNestedAggregationBuilder = + AggregationBuilders.reverseNested(nestedAggName); + if (nestedType.path != null) { + reverseNestedAggregationBuilder.path(nestedType.path); + } + nestedBuilder = reverseNestedAggregationBuilder; + } + } else { + nestedBuilder = AggregationBuilders.nested(nestedAggName, nestedType.path); + } - private AggregationBuilder addFieldToAgg(MethodField field, ValuesSourceAggregationBuilder builder) - throws SqlParseException { - KVValue kvValue = field.getParams().get(0); - if (kvValue.key != null && kvValue.key.equals("script")) { - if (kvValue.value instanceof MethodField) { - return builder.script(new Script(((MethodField) kvValue.value).getParams().get(1).toString())); - } else { - return builder.script(new Script(kvValue.value.toString())); - } - - } else if (kvValue.key != null && kvValue.value.toString().trim().startsWith("def")) { - return builder.script(new Script(kvValue.value.toString())); - } else if (kvValue.key != null && (kvValue.key.equals("nested") || kvValue.key.equals("reverse_nested"))) { - NestedType nestedType = (NestedType) kvValue.value; - nestedType.addBucketPath(Path.getMetricPath(builder.getName())); - - if (nestedType.isNestedField()) { - builder.field("_index"); - } else { - builder.field(nestedType.field); - } - - AggregationBuilder nestedBuilder; - - String nestedAggName = nestedType.getNestedAggName(); - - if (nestedType.isReverse()) { - if (nestedType.path != null && nestedType.path.startsWith("~")) { - String realPath = nestedType.path.substring(1); - nestedBuilder = AggregationBuilders.nested(nestedAggName, realPath); - nestedBuilder = nestedBuilder.subAggregation(builder); - return AggregationBuilders.reverseNested(nestedAggName + "_REVERSED") - .subAggregation(nestedBuilder); - } else { - ReverseNestedAggregationBuilder reverseNestedAggregationBuilder = - AggregationBuilders.reverseNested(nestedAggName); - if (nestedType.path != null) { - reverseNestedAggregationBuilder.path(nestedType.path); - } - nestedBuilder = reverseNestedAggregationBuilder; - } - } else { - nestedBuilder = AggregationBuilders.nested(nestedAggName, nestedType.path); - } - - AggregationBuilder aggregation = nestedBuilder.subAggregation(wrapWithFilterAgg( - nestedType, - builder)); - nestedType.addBucketPath(Path.getAggPath(nestedBuilder.getName())); - return aggregation; - } else if (kvValue.key != null && (kvValue.key.equals("children"))) { - ChildrenType childrenType = (ChildrenType) kvValue.value; + AggregationBuilder aggregation = + nestedBuilder.subAggregation(wrapWithFilterAgg(nestedType, builder)); + nestedType.addBucketPath(Path.getAggPath(nestedBuilder.getName())); + return aggregation; + } else if (kvValue.key != null && (kvValue.key.equals("children"))) { + ChildrenType childrenType = (ChildrenType) kvValue.value; - builder.field(childrenType.field); + builder.field(childrenType.field); - AggregationBuilder childrenBuilder; + AggregationBuilder childrenBuilder; - String childrenAggName = childrenType.field + "@CHILDREN"; + String childrenAggName = childrenType.field + "@CHILDREN"; - childrenBuilder = JoinAggregationBuilders.children(childrenAggName, childrenType.childType); + childrenBuilder = JoinAggregationBuilders.children(childrenAggName, childrenType.childType); - return childrenBuilder; - } - - return builder.field(kvValue.toString()); + return childrenBuilder; } - private AggregationBuilder makeRangeGroup(MethodField field) throws SqlParseException { - switch (field.getName().toLowerCase()) { - case "range": - return rangeBuilder(field); - case "date_histogram": - return dateHistogram(field); - case "date_range": - case "month": - return dateRange(field); - case "histogram": - return histogram(field); - case "geohash_grid": - return geohashGrid(field); - case "geo_bounds": - return geoBounds(field); - case "terms": - return termsAgg(field); - default: - throw new SqlParseException("can define this method " + field); - } - + return builder.field(kvValue.toString()); + } + + private AggregationBuilder makeRangeGroup(MethodField field) throws SqlParseException { + switch (field.getName().toLowerCase()) { + case "range": + return rangeBuilder(field); + case "date_histogram": + return dateHistogram(field); + case "date_range": + case "month": + return dateRange(field); + case "histogram": + return histogram(field); + case "geohash_grid": + return geohashGrid(field); + case "geo_bounds": + return geoBounds(field); + case "terms": + return termsAgg(field); + default: + throw new SqlParseException("can define this method " + field); } - - private AggregationBuilder geoBounds(MethodField field) throws SqlParseException { - String aggName = gettAggNameFromParamsOrAlias(field); - GeoBoundsAggregationBuilder boundsBuilder = new GeoBoundsAggregationBuilder(aggName); - String value; - for (KVValue kv : field.getParams()) { - value = kv.value.toString(); - switch (kv.key.toLowerCase()) { - case "field": - boundsBuilder.field(value); - break; - case "wrap_longitude": - boundsBuilder.wrapLongitude(Boolean.getBoolean(value)); - break; - case "alias": - case "nested": - case "reverse_nested": - case "children": - break; - default: - throw new SqlParseException("geo_bounds err or not define field " + kv.toString()); - } - } - return boundsBuilder; + } + + private AggregationBuilder geoBounds(MethodField field) throws SqlParseException { + String aggName = gettAggNameFromParamsOrAlias(field); + GeoBoundsAggregationBuilder boundsBuilder = new GeoBoundsAggregationBuilder(aggName); + String value; + for (KVValue kv : field.getParams()) { + value = kv.value.toString(); + switch (kv.key.toLowerCase()) { + case "field": + boundsBuilder.field(value); + break; + case "wrap_longitude": + boundsBuilder.wrapLongitude(Boolean.getBoolean(value)); + break; + case "alias": + case "nested": + case "reverse_nested": + case "children": + break; + default: + throw new SqlParseException("geo_bounds err or not define field " + kv.toString()); + } } - - private AggregationBuilder termsAgg(MethodField field) throws SqlParseException { - String aggName = gettAggNameFromParamsOrAlias(field); - TermsAggregationBuilder terms = AggregationBuilders.terms(aggName); - String value; - IncludeExclude include = null, exclude = null; - for (KVValue kv : field.getParams()) { - if (kv.value.toString().contains("doc[")) { - String script = kv.value + "; return " + kv.key; - terms.script(new Script(script)); + return boundsBuilder; + } + + private AggregationBuilder termsAgg(MethodField field) throws SqlParseException { + String aggName = gettAggNameFromParamsOrAlias(field); + TermsAggregationBuilder terms = AggregationBuilders.terms(aggName); + String value; + IncludeExclude include = null, exclude = null; + for (KVValue kv : field.getParams()) { + if (kv.value.toString().contains("doc[")) { + String script = kv.value + "; return " + kv.key; + terms.script(new Script(script)); + } else { + value = kv.value.toString(); + switch (kv.key.toLowerCase()) { + case "field": + terms.field(value); + break; + case "size": + terms.size(Integer.parseInt(value)); + break; + case "shard_size": + terms.shardSize(Integer.parseInt(value)); + break; + case "min_doc_count": + terms.minDocCount(Integer.parseInt(value)); + break; + case "missing": + terms.missing(value); + break; + case "order": + if ("asc".equalsIgnoreCase(value)) { + terms.order(BucketOrder.key(true)); + } else if ("desc".equalsIgnoreCase(value)) { + terms.order(BucketOrder.key(false)); } else { - value = kv.value.toString(); - switch (kv.key.toLowerCase()) { - case "field": - terms.field(value); - break; - case "size": - terms.size(Integer.parseInt(value)); - break; - case "shard_size": - terms.shardSize(Integer.parseInt(value)); - break; - case "min_doc_count": - terms.minDocCount(Integer.parseInt(value)); - break; - case "missing": - terms.missing(value); - break; - case "order": - if ("asc".equalsIgnoreCase(value)) { - terms.order(BucketOrder.key(true)); - } else if ("desc".equalsIgnoreCase(value)) { - terms.order(BucketOrder.key(false)); - } else { - List orderElements = new ArrayList<>(); - try (JsonXContentParser parser = new JsonXContentParser(NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, new JsonFactory().createParser(value))) { - XContentParser.Token currentToken = parser.nextToken(); - if (currentToken == XContentParser.Token.START_OBJECT) { - orderElements.add(InternalOrder.Parser.parseOrderParam(parser)); - } else if (currentToken == XContentParser.Token.START_ARRAY) { - for (currentToken = parser.nextToken(); - currentToken != XContentParser.Token.END_ARRAY; - currentToken = parser.nextToken()) { - if (currentToken == XContentParser.Token.START_OBJECT) { - orderElements.add(InternalOrder.Parser.parseOrderParam(parser)); - } else { - throw new ParsingException(parser.getTokenLocation(), - "Invalid token in order array"); - } - } - } - } catch (IOException e) { - throw new SqlParseException("couldn't parse order: " + e.getMessage()); - } - terms.order(orderElements); - } - break; - case "alias": - case "nested": - case "reverse_nested": - case "children": - break; - case "execution_hint": - terms.executionHint(value); - break; - case "include": - try (XContentParser parser = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, value)) { - parser.nextToken(); - include = IncludeExclude.parseInclude(parser); - } catch (IOException e) { - throw new SqlParseException("parse include[" + value + "] error: " + e.getMessage()); - } - break; - case "exclude": - try (XContentParser parser = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, value)) { - parser.nextToken(); - exclude = IncludeExclude.parseExclude(parser); - } catch (IOException e) { - throw new SqlParseException("parse exclude[" + value + "] error: " + e.getMessage()); - } - break; - default: - throw new SqlParseException("terms aggregation err or not define field " + kv.toString()); + List orderElements = new ArrayList<>(); + try (JsonXContentParser parser = + new JsonXContentParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + new JsonFactory().createParser(value))) { + XContentParser.Token currentToken = parser.nextToken(); + if (currentToken == XContentParser.Token.START_OBJECT) { + orderElements.add(InternalOrder.Parser.parseOrderParam(parser)); + } else if (currentToken == XContentParser.Token.START_ARRAY) { + for (currentToken = parser.nextToken(); + currentToken != XContentParser.Token.END_ARRAY; + currentToken = parser.nextToken()) { + if (currentToken == XContentParser.Token.START_OBJECT) { + orderElements.add(InternalOrder.Parser.parseOrderParam(parser)); + } else { + throw new ParsingException( + parser.getTokenLocation(), "Invalid token in order array"); + } + } } + } catch (IOException e) { + throw new SqlParseException("couldn't parse order: " + e.getMessage()); + } + terms.order(orderElements); } - } - terms.includeExclude(IncludeExclude.merge(include, exclude)); - return terms; - } - - private AbstractAggregationBuilder scriptedMetric(MethodField field) throws SqlParseException { - String aggName = gettAggNameFromParamsOrAlias(field); - ScriptedMetricAggregationBuilder scriptedMetricBuilder = AggregationBuilders.scriptedMetric(aggName); - Map scriptedMetricParams = field.getParamsAsMap(); - if (!scriptedMetricParams.containsKey("map_script") && !scriptedMetricParams.containsKey("map_script_id") - && !scriptedMetricParams.containsKey("map_script_file")) { - throw new SqlParseException( - "scripted metric parameters must contain map_script/map_script_id/map_script_file parameter"); - } - HashMap scriptAdditionalParams = new HashMap<>(); - HashMap reduceScriptAdditionalParams = new HashMap<>(); - for (Map.Entry param : scriptedMetricParams.entrySet()) { - String paramValue = param.getValue().toString(); - if (param.getKey().startsWith("@")) { - if (param.getKey().startsWith("@reduce_")) { - reduceScriptAdditionalParams.put(param.getKey().replace("@reduce_", ""), - param.getValue()); - } else { - scriptAdditionalParams.put(param.getKey().replace("@", ""), param.getValue()); - } - continue; + break; + case "alias": + case "nested": + case "reverse_nested": + case "children": + break; + case "execution_hint": + terms.executionHint(value); + break; + case "include": + try (XContentParser parser = + JsonXContent.jsonXContent.createParser( + NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, value)) { + parser.nextToken(); + include = IncludeExclude.parseInclude(parser); + } catch (IOException e) { + throw new SqlParseException("parse include[" + value + "] error: " + e.getMessage()); } - - switch (param.getKey().toLowerCase()) { - case "map_script": - scriptedMetricBuilder.mapScript(new Script(paramValue)); - break; - case "map_script_id": - scriptedMetricBuilder.mapScript(new Script(ScriptType.STORED, Script.DEFAULT_SCRIPT_LANG, - paramValue, new HashMap<>())); - break; - case "init_script": - scriptedMetricBuilder.initScript(new Script(paramValue)); - break; - case "init_script_id": - scriptedMetricBuilder.initScript(new Script(ScriptType.STORED, Script.DEFAULT_SCRIPT_LANG, - paramValue, new HashMap<>())); - break; - case "combine_script": - scriptedMetricBuilder.combineScript(new Script(paramValue)); - break; - case "combine_script_id": - scriptedMetricBuilder.combineScript(new Script(ScriptType.STORED, Script.DEFAULT_SCRIPT_LANG, - paramValue, new HashMap<>())); - break; - case "reduce_script": - scriptedMetricBuilder.reduceScript(new Script(ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, - paramValue, reduceScriptAdditionalParams)); - break; - case "reduce_script_id": - scriptedMetricBuilder.reduceScript(new Script(ScriptType.STORED, Script.DEFAULT_SCRIPT_LANG, - paramValue, reduceScriptAdditionalParams)); - break; - case "alias": - case "nested": - case "reverse_nested": - case "children": - break; - default: - throw new SqlParseException("scripted_metric err or not define field " + param.getKey()); + break; + case "exclude": + try (XContentParser parser = + JsonXContent.jsonXContent.createParser( + NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, value)) { + parser.nextToken(); + exclude = IncludeExclude.parseExclude(parser); + } catch (IOException e) { + throw new SqlParseException("parse exclude[" + value + "] error: " + e.getMessage()); } + break; + default: + throw new SqlParseException( + "terms aggregation err or not define field " + kv.toString()); } - if (scriptAdditionalParams.size() > 0) { - scriptAdditionalParams.put("_agg", new HashMap<>()); - scriptedMetricBuilder.params(scriptAdditionalParams); - } - - return scriptedMetricBuilder; + } } - - private AggregationBuilder geohashGrid(MethodField field) throws SqlParseException { - String aggName = gettAggNameFromParamsOrAlias(field); - GeoGridAggregationBuilder geoHashGrid = new GeoHashGridAggregationBuilder(aggName); - String value; - for (KVValue kv : field.getParams()) { - value = kv.value.toString(); - switch (kv.key.toLowerCase()) { - case "precision": - geoHashGrid.precision(Integer.parseInt(value)); - break; - case "field": - geoHashGrid.field(value); - break; - case "size": - geoHashGrid.size(Integer.parseInt(value)); - break; - case "shard_size": - geoHashGrid.shardSize(Integer.parseInt(value)); - break; - case "alias": - case "nested": - case "reverse_nested": - case "children": - break; - default: - throw new SqlParseException("geohash grid err or not define field " + kv.toString()); - } - } - return geoHashGrid; + terms.includeExclude(IncludeExclude.merge(include, exclude)); + return terms; + } + + private AbstractAggregationBuilder scriptedMetric(MethodField field) throws SqlParseException { + String aggName = gettAggNameFromParamsOrAlias(field); + ScriptedMetricAggregationBuilder scriptedMetricBuilder = + AggregationBuilders.scriptedMetric(aggName); + Map scriptedMetricParams = field.getParamsAsMap(); + if (!scriptedMetricParams.containsKey("map_script") + && !scriptedMetricParams.containsKey("map_script_id") + && !scriptedMetricParams.containsKey("map_script_file")) { + throw new SqlParseException( + "scripted metric parameters must contain map_script/map_script_id/map_script_file" + + " parameter"); } - - private static final String TIME_FARMAT = "yyyy-MM-dd HH:mm:ss"; - - private ValuesSourceAggregationBuilder dateRange(MethodField field) { - String alias = gettAggNameFromParamsOrAlias(field); - DateRangeAggregationBuilder dateRange = AggregationBuilders.dateRange(alias).format(TIME_FARMAT); - - String value; - List ranges = new ArrayList<>(); - for (KVValue kv : field.getParams()) { - value = kv.value.toString(); - if ("field".equals(kv.key)) { - dateRange.field(value); - } else if ("format".equals(kv.key)) { - dateRange.format(value); - } else if ("time_zone".equals(kv.key)) { - dateRange.timeZone(ZoneOffset.of(value)); - } else if ("from".equals(kv.key)) { - dateRange.addUnboundedFrom(kv.value.toString()); - } else if ("to".equals(kv.key)) { - dateRange.addUnboundedTo(kv.value.toString()); - } else if (!"alias".equals(kv.key) && !"nested".equals(kv.key) && !"children".equals(kv.key)) { - ranges.add(value); - } - } - - for (int i = 1; i < ranges.size(); i++) { - dateRange.addRange(ranges.get(i - 1), ranges.get(i)); + HashMap scriptAdditionalParams = new HashMap<>(); + HashMap reduceScriptAdditionalParams = new HashMap<>(); + for (Map.Entry param : scriptedMetricParams.entrySet()) { + String paramValue = param.getValue().toString(); + if (param.getKey().startsWith("@")) { + if (param.getKey().startsWith("@reduce_")) { + reduceScriptAdditionalParams.put( + param.getKey().replace("@reduce_", ""), param.getValue()); + } else { + scriptAdditionalParams.put(param.getKey().replace("@", ""), param.getValue()); } + continue; + } + + switch (param.getKey().toLowerCase()) { + case "map_script": + scriptedMetricBuilder.mapScript(new Script(paramValue)); + break; + case "map_script_id": + scriptedMetricBuilder.mapScript( + new Script( + ScriptType.STORED, Script.DEFAULT_SCRIPT_LANG, paramValue, new HashMap<>())); + break; + case "init_script": + scriptedMetricBuilder.initScript(new Script(paramValue)); + break; + case "init_script_id": + scriptedMetricBuilder.initScript( + new Script( + ScriptType.STORED, Script.DEFAULT_SCRIPT_LANG, paramValue, new HashMap<>())); + break; + case "combine_script": + scriptedMetricBuilder.combineScript(new Script(paramValue)); + break; + case "combine_script_id": + scriptedMetricBuilder.combineScript( + new Script( + ScriptType.STORED, Script.DEFAULT_SCRIPT_LANG, paramValue, new HashMap<>())); + break; + case "reduce_script": + scriptedMetricBuilder.reduceScript( + new Script( + ScriptType.INLINE, + Script.DEFAULT_SCRIPT_LANG, + paramValue, + reduceScriptAdditionalParams)); + break; + case "reduce_script_id": + scriptedMetricBuilder.reduceScript( + new Script( + ScriptType.STORED, + Script.DEFAULT_SCRIPT_LANG, + paramValue, + reduceScriptAdditionalParams)); + break; + case "alias": + case "nested": + case "reverse_nested": + case "children": + break; + default: + throw new SqlParseException("scripted_metric err or not define field " + param.getKey()); + } + } + if (scriptAdditionalParams.size() > 0) { + scriptAdditionalParams.put("_agg", new HashMap<>()); + scriptedMetricBuilder.params(scriptAdditionalParams); + } - return dateRange; + return scriptedMetricBuilder; + } + + private AggregationBuilder geohashGrid(MethodField field) throws SqlParseException { + String aggName = gettAggNameFromParamsOrAlias(field); + GeoGridAggregationBuilder geoHashGrid = new GeoHashGridAggregationBuilder(aggName); + String value; + for (KVValue kv : field.getParams()) { + value = kv.value.toString(); + switch (kv.key.toLowerCase()) { + case "precision": + geoHashGrid.precision(Integer.parseInt(value)); + break; + case "field": + geoHashGrid.field(value); + break; + case "size": + geoHashGrid.size(Integer.parseInt(value)); + break; + case "shard_size": + geoHashGrid.shardSize(Integer.parseInt(value)); + break; + case "alias": + case "nested": + case "reverse_nested": + case "children": + break; + default: + throw new SqlParseException("geohash grid err or not define field " + kv.toString()); + } + } + return geoHashGrid; + } + + private static final String TIME_FARMAT = "yyyy-MM-dd HH:mm:ss"; + + private ValuesSourceAggregationBuilder dateRange(MethodField field) { + String alias = gettAggNameFromParamsOrAlias(field); + DateRangeAggregationBuilder dateRange = + AggregationBuilders.dateRange(alias).format(TIME_FARMAT); + + String value; + List ranges = new ArrayList<>(); + for (KVValue kv : field.getParams()) { + value = kv.value.toString(); + if ("field".equals(kv.key)) { + dateRange.field(value); + } else if ("format".equals(kv.key)) { + dateRange.format(value); + } else if ("time_zone".equals(kv.key)) { + dateRange.timeZone(ZoneOffset.of(value)); + } else if ("from".equals(kv.key)) { + dateRange.addUnboundedFrom(kv.value.toString()); + } else if ("to".equals(kv.key)) { + dateRange.addUnboundedTo(kv.value.toString()); + } else if (!"alias".equals(kv.key) + && !"nested".equals(kv.key) + && !"children".equals(kv.key)) { + ranges.add(value); + } } - /** - * - * - * @param field - * @return - * @throws SqlParseException - */ - private DateHistogramAggregationBuilder dateHistogram(MethodField field) throws SqlParseException { - String alias = gettAggNameFromParamsOrAlias(field); - DateHistogramAggregationBuilder dateHistogram = AggregationBuilders.dateHistogram(alias).format(TIME_FARMAT); - String value; - for (KVValue kv : field.getParams()) { - if (kv.value.toString().contains("doc[")) { - String script = kv.value + "; return " + kv.key; - dateHistogram.script(new Script(script)); - } else { - value = kv.value.toString(); - switch (kv.key.toLowerCase()) { - case "interval": - dateHistogram.dateHistogramInterval(new DateHistogramInterval(kv.value.toString())); - break; - case "fixed_interval": - dateHistogram.fixedInterval(new DateHistogramInterval(kv.value.toString())); - break; - case "field": - dateHistogram.field(value); - break; - case "format": - dateHistogram.format(value); - break; - case "time_zone": - dateHistogram.timeZone(ZoneOffset.of(value)); - break; - case "min_doc_count": - dateHistogram.minDocCount(Long.parseLong(value)); - break; - case "order": - dateHistogram.order("desc".equalsIgnoreCase(value) ? BucketOrder.key(false) : - BucketOrder.key(true)); - break; - case "extended_bounds": - String[] bounds = value.split(":"); - if (bounds.length == 2) { - dateHistogram.extendedBounds(new LongBounds(bounds[0], bounds[1])); - } - break; - - case "alias": - case "nested": - case "reverse_nested": - case "children": - break; - default: - throw new SqlParseException("date range err or not define field " + kv.toString()); - } - } - } - return dateHistogram; + for (int i = 1; i < ranges.size(); i++) { + dateRange.addRange(ranges.get(i - 1), ranges.get(i)); } - private String gettAggNameFromParamsOrAlias(MethodField field) { - String alias = field.getAlias(); - for (KVValue kv : field.getParams()) { - if (kv.key != null && kv.key.equals("alias")) { - alias = kv.value.toString(); + return dateRange; + } + + /** + * @param field + * @return + * @throws SqlParseException + */ + private DateHistogramAggregationBuilder dateHistogram(MethodField field) + throws SqlParseException { + String alias = gettAggNameFromParamsOrAlias(field); + DateHistogramAggregationBuilder dateHistogram = + AggregationBuilders.dateHistogram(alias).format(TIME_FARMAT); + String value; + for (KVValue kv : field.getParams()) { + if (kv.value.toString().contains("doc[")) { + String script = kv.value + "; return " + kv.key; + dateHistogram.script(new Script(script)); + } else { + value = kv.value.toString(); + switch (kv.key.toLowerCase()) { + case "interval": + dateHistogram.dateHistogramInterval(new DateHistogramInterval(kv.value.toString())); + break; + case "fixed_interval": + dateHistogram.fixedInterval(new DateHistogramInterval(kv.value.toString())); + break; + case "field": + dateHistogram.field(value); + break; + case "format": + dateHistogram.format(value); + break; + case "time_zone": + dateHistogram.timeZone(ZoneOffset.of(value)); + break; + case "min_doc_count": + dateHistogram.minDocCount(Long.parseLong(value)); + break; + case "order": + dateHistogram.order( + "desc".equalsIgnoreCase(value) ? BucketOrder.key(false) : BucketOrder.key(true)); + break; + case "extended_bounds": + String[] bounds = value.split(":"); + if (bounds.length == 2) { + dateHistogram.extendedBounds(new LongBounds(bounds[0], bounds[1])); } + break; + + case "alias": + case "nested": + case "reverse_nested": + case "children": + break; + default: + throw new SqlParseException("date range err or not define field " + kv.toString()); } - return alias; + } } - - private HistogramAggregationBuilder histogram(MethodField field) throws SqlParseException { - String aggName = gettAggNameFromParamsOrAlias(field); - HistogramAggregationBuilder histogram = AggregationBuilders.histogram(aggName); - String value; - for (KVValue kv : field.getParams()) { - if (kv.value.toString().contains("doc[")) { - String script = kv.value + "; return " + kv.key; - histogram.script(new Script(script)); - } else { - value = kv.value.toString(); - switch (kv.key.toLowerCase()) { - case "interval": - histogram.interval(Long.parseLong(value)); - break; - case "field": - histogram.field(value); - break; - case "min_doc_count": - histogram.minDocCount(Long.parseLong(value)); - break; - case "extended_bounds": - String[] bounds = value.split(":"); - if (bounds.length == 2) { - histogram.extendedBounds(Long.valueOf(bounds[0]), Long.valueOf(bounds[1])); - } - break; - case "alias": - case "nested": - case "reverse_nested": - case "children": - break; - case "order": - final BucketOrder order; - switch (value) { - case "key_desc": - order = BucketOrder.key(false); - break; - case "count_asc": - order = BucketOrder.count(true); - break; - case "count_desc": - order = BucketOrder.count(false); - break; - case "key_asc": - default: - order = BucketOrder.key(true); - break; - } - histogram.order(order); - break; - default: - throw new SqlParseException("histogram err or not define field " + kv.toString()); - } + return dateHistogram; + } + + private String gettAggNameFromParamsOrAlias(MethodField field) { + String alias = field.getAlias(); + for (KVValue kv : field.getParams()) { + if (kv.key != null && kv.key.equals("alias")) { + alias = kv.value.toString(); + } + } + return alias; + } + + private HistogramAggregationBuilder histogram(MethodField field) throws SqlParseException { + String aggName = gettAggNameFromParamsOrAlias(field); + HistogramAggregationBuilder histogram = AggregationBuilders.histogram(aggName); + String value; + for (KVValue kv : field.getParams()) { + if (kv.value.toString().contains("doc[")) { + String script = kv.value + "; return " + kv.key; + histogram.script(new Script(script)); + } else { + value = kv.value.toString(); + switch (kv.key.toLowerCase()) { + case "interval": + histogram.interval(Long.parseLong(value)); + break; + case "field": + histogram.field(value); + break; + case "min_doc_count": + histogram.minDocCount(Long.parseLong(value)); + break; + case "extended_bounds": + String[] bounds = value.split(":"); + if (bounds.length == 2) { + histogram.extendedBounds(Long.valueOf(bounds[0]), Long.valueOf(bounds[1])); } + break; + case "alias": + case "nested": + case "reverse_nested": + case "children": + break; + case "order": + final BucketOrder order; + switch (value) { + case "key_desc": + order = BucketOrder.key(false); + break; + case "count_asc": + order = BucketOrder.count(true); + break; + case "count_desc": + order = BucketOrder.count(false); + break; + case "key_asc": + default: + order = BucketOrder.key(true); + break; + } + histogram.order(order); + break; + default: + throw new SqlParseException("histogram err or not define field " + kv.toString()); } - return histogram; + } } + return histogram; + } - /** - * - * - * @param field - * @return - */ - private RangeAggregationBuilder rangeBuilder(MethodField field) { - - // ignore alias param - LinkedList params = field.getParams().stream().filter(kv -> !"alias".equals(kv.key)) - .collect(Collectors.toCollection(LinkedList::new)); + /** + * @param field + * @return + */ + private RangeAggregationBuilder rangeBuilder(MethodField field) { - String fieldName = params.poll().toString(); + // ignore alias param + LinkedList params = + field.getParams().stream() + .filter(kv -> !"alias".equals(kv.key)) + .collect(Collectors.toCollection(LinkedList::new)); - double[] ds = Util.KV2DoubleArr(params); + String fieldName = params.poll().toString(); - RangeAggregationBuilder range = AggregationBuilders.range(field.getAlias()).field(fieldName); + double[] ds = Util.KV2DoubleArr(params); - for (int i = 1; i < ds.length; i++) { - range.addRange(ds[i - 1], ds[i]); - } + RangeAggregationBuilder range = AggregationBuilders.range(field.getAlias()).field(fieldName); - return range; + for (int i = 1; i < ds.length; i++) { + range.addRange(ds[i - 1], ds[i]); } + return range; + } + + /** + * Create count aggregation. + * + * @param field The count function + * @return AggregationBuilder use to count result + */ + private ValuesSourceAggregationBuilder makeCountAgg(MethodField field) { + + // Cardinality is approximate DISTINCT. + if (SQLAggregateOption.DISTINCT.equals(field.getOption())) { + + if (field.getParams().size() == 1) { + return AggregationBuilders.cardinality(field.getAlias()) + .field(field.getParams().get(0).value.toString()); + } else { + Integer precision_threshold = (Integer) (field.getParams().get(1).value); + return AggregationBuilders.cardinality(field.getAlias()) + .precisionThreshold(precision_threshold) + .field(field.getParams().get(0).value.toString()); + } + } - /** - * Create count aggregation. - * - * @param field The count function - * @return AggregationBuilder use to count result - */ - private ValuesSourceAggregationBuilder makeCountAgg(MethodField field) { - - // Cardinality is approximate DISTINCT. - if (SQLAggregateOption.DISTINCT.equals(field.getOption())) { - - if (field.getParams().size() == 1) { - return AggregationBuilders.cardinality(field.getAlias()).field(field.getParams().get(0).value - .toString()); - } else { - Integer precision_threshold = (Integer) (field.getParams().get(1).value); - return AggregationBuilders.cardinality(field.getAlias()).precisionThreshold(precision_threshold) - .field(field.getParams().get(0).value.toString()); - } - - } - - String fieldName = field.getParams().get(0).value.toString(); + String fieldName = field.getParams().get(0).value.toString(); - // In case of count(*) we use '_index' as field parameter to count all documents - if ("*".equals(fieldName)) { - KVValue kvValue = new KVValue(null, "_index"); - field.getParams().set(0, kvValue); - return AggregationBuilders.count(field.getAlias()).field(kvValue.toString()); - } else { - return AggregationBuilders.count(field.getAlias()).field(fieldName); - } + // In case of count(*) we use '_index' as field parameter to count all documents + if ("*".equals(fieldName)) { + KVValue kvValue = new KVValue(null, "_index"); + field.getParams().set(0, kvValue); + return AggregationBuilders.count(field.getAlias()).field(kvValue.toString()); + } else { + return AggregationBuilders.count(field.getAlias()).field(fieldName); } - - /** - * TOPHITS - * - * @param field - * @return - */ - private AbstractAggregationBuilder makeTopHitsAgg(MethodField field) { - String alias = gettAggNameFromParamsOrAlias(field); - TopHitsAggregationBuilder topHits = AggregationBuilders.topHits(alias); - List params = field.getParams(); - String[] include = null; - String[] exclude = null; - for (KVValue kv : params) { - switch (kv.key) { - case "from": - topHits.from((int) kv.value); - break; - case "size": - topHits.size((int) kv.value); - break; - case "include": - include = kv.value.toString().split(","); - break; - case "exclude": - exclude = kv.value.toString().split(","); - break; - case "alias": - case "nested": - case "reverse_nested": - case "children": - break; - default: - topHits.sort(kv.key, SortOrder.valueOf(kv.value.toString().toUpperCase())); - break; - } - } - if (include != null || exclude != null) { - topHits.fetchSource(include, exclude); - } - return topHits; + } + + /** + * TOPHITS + * + * @param field + * @return + */ + private AbstractAggregationBuilder makeTopHitsAgg(MethodField field) { + String alias = gettAggNameFromParamsOrAlias(field); + TopHitsAggregationBuilder topHits = AggregationBuilders.topHits(alias); + List params = field.getParams(); + String[] include = null; + String[] exclude = null; + for (KVValue kv : params) { + switch (kv.key) { + case "from": + topHits.from((int) kv.value); + break; + case "size": + topHits.size((int) kv.value); + break; + case "include": + include = kv.value.toString().split(","); + break; + case "exclude": + exclude = kv.value.toString().split(","); + break; + case "alias": + case "nested": + case "reverse_nested": + case "children": + break; + default: + topHits.sort(kv.key, SortOrder.valueOf(kv.value.toString().toUpperCase())); + break; + } } - - public Map getGroupMap() { - return this.groupMap; + if (include != null || exclude != null) { + topHits.fetchSource(include, exclude); } - - /** - * Wrap the Metric Aggregation with Filter Aggregation if necessary. - * The Filter Aggregation condition is constructed from the nested condition in where clause. - */ - private AggregationBuilder wrapWithFilterAgg(NestedType nestedType, ValuesSourceAggregationBuilder builder) - throws SqlParseException { - if (where != null && where.getWheres() != null) { - List nestedConditionList = where.getWheres().stream() - .filter(condition -> condition instanceof Condition) - .map(condition -> (Condition) condition) - .filter(condition -> condition.isNestedComplex() - || nestedType.path.equalsIgnoreCase(condition.getNestedPath())) - // ignore the OR condition on nested field. - .filter(condition -> CONN.AND.equals(condition.getConn())) - .collect(Collectors.toList()); - if (!nestedConditionList.isEmpty()) { - Where filterWhere = new Where(where.getConn()); - nestedConditionList.forEach(condition -> { - if (condition.isNestedComplex()) { - ((Where) condition.getValue()).getWheres().forEach(filterWhere::addWhere); - } else { - // Since the filter condition is used inside Nested Aggregation,remove the nested attribute. - condition.setNested(false); - condition.setNestedPath(""); - filterWhere.addWhere(condition); - } - }); - FilterAggregationBuilder filterAgg = AggregationBuilders.filter( - nestedType.getFilterAggName(), - QueryMaker.explain(filterWhere)); - nestedType.addBucketPath(Path.getAggPath(filterAgg.getName())); - return filterAgg.subAggregation(builder); - } - } - return builder; + return topHits; + } + + public Map getGroupMap() { + return this.groupMap; + } + + /** + * Wrap the Metric Aggregation with Filter Aggregation if necessary. The Filter Aggregation + * condition is constructed from the nested condition in where clause. + */ + private AggregationBuilder wrapWithFilterAgg( + NestedType nestedType, ValuesSourceAggregationBuilder builder) throws SqlParseException { + if (where != null && where.getWheres() != null) { + List nestedConditionList = + where.getWheres().stream() + .filter(condition -> condition instanceof Condition) + .map(condition -> (Condition) condition) + .filter( + condition -> + condition.isNestedComplex() + || nestedType.path.equalsIgnoreCase(condition.getNestedPath())) + // ignore the OR condition on nested field. + .filter(condition -> CONN.AND.equals(condition.getConn())) + .collect(Collectors.toList()); + if (!nestedConditionList.isEmpty()) { + Where filterWhere = new Where(where.getConn()); + nestedConditionList.forEach( + condition -> { + if (condition.isNestedComplex()) { + ((Where) condition.getValue()).getWheres().forEach(filterWhere::addWhere); + } else { + // Since the filter condition is used inside Nested Aggregation,remove the nested + // attribute. + condition.setNested(false); + condition.setNestedPath(""); + filterWhere.addWhere(condition); + } + }); + FilterAggregationBuilder filterAgg = + AggregationBuilders.filter( + nestedType.getFilterAggName(), QueryMaker.explain(filterWhere)); + nestedType.addBucketPath(Path.getAggPath(filterAgg.getName())); + return filterAgg.subAggregation(builder); + } } - - /** - * The groupMap is used when parsing order by to find out the corresponding field in aggregation. - * There are two cases. - * 1) using alias in order by, e.g. SELECT COUNT(*) as c FROM T GROUP BY age ORDER BY c - * 2) using full name in order by, e.g. SELECT COUNT(*) as c FROM T GROUP BY age ORDER BY COUNT(*) - * Then, the groupMap should support these two cases by maintain the mapping of - * {alias, value} and {full_name, value} - */ - private void extendGroupMap(Field field, KVValue value) { - groupMap.put(field.toString(), value); - if (!StringUtils.isEmpty(field.getAlias())) { - groupMap.putIfAbsent(field.getAlias(), value); - } + return builder; + } + + /** + * The groupMap is used when parsing order by to find out the corresponding field in aggregation. + * There are two cases. 1) using alias in order by, e.g. SELECT COUNT(*) as c FROM T GROUP BY age + * ORDER BY c 2) using full name in order by, e.g. SELECT COUNT(*) as c FROM T GROUP BY age ORDER + * BY COUNT(*) Then, the groupMap should support these two cases by maintain the mapping of + * {alias, value} and {full_name, value} + */ + private void extendGroupMap(Field field, KVValue value) { + groupMap.put(field.toString(), value); + if (!StringUtils.isEmpty(field.getAlias())) { + groupMap.putIfAbsent(field.getAlias(), value); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/maker/Maker.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/maker/Maker.java index 08018d94de..302af70ea8 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/maker/Maker.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/maker/Maker.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.maker; import static org.opensearch.sql.legacy.parser.WhereParser.getConditionForMethod; @@ -66,460 +65,472 @@ public abstract class Maker { - /** - * UTC. - */ - private static final ZoneId UTC = ZoneId.of("UTC"); - - public static final Object NONE = new Object(); - - public static final Set queryFunctions = Sets.newHashSet( - "query", - "matchquery", "match_query", // match - "multimatchquery", "multi_match", "multimatch", // multi-match - "score", "scorequery", "score_query", // score - "wildcardquery", "wildcard_query", // wildcard - "matchphrasequery", "match_phrase", "matchphrase" // match-phrase - ); - - private static final Set NOT_OPERATOR_SET = ImmutableSet.of( - Condition.OPERATOR.N, Condition.OPERATOR.NIN, Condition.OPERATOR.ISN, Condition.OPERATOR.NBETWEEN, - Condition.OPERATOR.NLIKE, Condition.OPERATOR.NIN_TERMS, Condition.OPERATOR.NTERM, - Condition.OPERATOR.NOT_EXISTS_NESTED_COMPLEX, Condition.OPERATOR.NREGEXP - ); - - protected Maker(Boolean isQuery) { - + /** UTC. */ + private static final ZoneId UTC = ZoneId.of("UTC"); + + public static final Object NONE = new Object(); + + public static final Set queryFunctions = + Sets.newHashSet( + "query", + "matchquery", + "match_query", // match + "multimatchquery", + "multi_match", + "multimatch", // multi-match + "score", + "scorequery", + "score_query", // score + "wildcardquery", + "wildcard_query", // wildcard + "matchphrasequery", + "match_phrase", + "matchphrase" // match-phrase + ); + + private static final Set NOT_OPERATOR_SET = + ImmutableSet.of( + Condition.OPERATOR.N, + Condition.OPERATOR.NIN, + Condition.OPERATOR.ISN, + Condition.OPERATOR.NBETWEEN, + Condition.OPERATOR.NLIKE, + Condition.OPERATOR.NIN_TERMS, + Condition.OPERATOR.NTERM, + Condition.OPERATOR.NOT_EXISTS_NESTED_COMPLEX, + Condition.OPERATOR.NREGEXP); + + protected Maker(Boolean isQuery) {} + + /** + * @param cond + * @return + * @throws SqlParseException + */ + protected ToXContent make(Condition cond) throws SqlParseException { + + String name = cond.getName(); + Object value = cond.getValue(); + + ToXContent toXContent = null; + + if (value instanceof SQLMethodInvokeExpr) { + toXContent = make(cond, name, (SQLMethodInvokeExpr) value); + } else if (value instanceof SubQueryExpression) { + toXContent = make(cond, name, ((SubQueryExpression) value).getValues()); + } else { + if (cond.getValue() == NONE) { + toXContent = new MatchNoneQueryBuilder(); + } else { + toXContent = make(cond, name, value); + } } - /** - * - * - * @param cond - * @return - * @throws SqlParseException - */ - protected ToXContent make(Condition cond) throws SqlParseException { - - String name = cond.getName(); - Object value = cond.getValue(); - - ToXContent toXContent = null; - - if (value instanceof SQLMethodInvokeExpr) { - toXContent = make(cond, name, (SQLMethodInvokeExpr) value); - } else if (value instanceof SubQueryExpression) { - toXContent = make(cond, name, ((SubQueryExpression) value).getValues()); - } else { - if (cond.getValue() == NONE) { - toXContent = new MatchNoneQueryBuilder(); - } else { - toXContent = make(cond, name, value); - } + return toXContent; + } + + private ToXContent make(Condition cond, String name, SQLMethodInvokeExpr value) + throws SqlParseException { + ToXContent bqb = null; + Paramer paramer = null; + switch (value.getMethodName().toLowerCase()) { + case "query": + paramer = Paramer.parseParamer(value); + QueryStringQueryBuilder queryString = QueryBuilders.queryStringQuery(paramer.value); + bqb = Paramer.fullParamer(queryString, paramer); + bqb = applyNot(cond.getOPERATOR(), bqb); + break; + case "matchquery": + case "match_query": + paramer = Paramer.parseParamer(value); + MatchQueryBuilder matchQuery = QueryBuilders.matchQuery(name, paramer.value); + bqb = Paramer.fullParamer(matchQuery, paramer); + bqb = applyNot(cond.getOPERATOR(), bqb); + break; + case "score": + case "scorequery": + case "score_query": + Float boost = Float.parseFloat(value.getParameters().get(1).toString()); + Condition subCond = getConditionForMethod(value.getParameters().get(0), cond.getConn()); + QueryBuilder subQuery = (QueryBuilder) make(subCond); + if (subCond.isNested()) { + subQuery = QueryBuilders.nestedQuery(subCond.getNestedPath(), subQuery, ScoreMode.None); } - - return toXContent; + bqb = QueryBuilders.constantScoreQuery(subQuery).boost(boost); + break; + case "wildcardquery": + case "wildcard_query": + paramer = Paramer.parseParamer(value); + WildcardQueryBuilder wildcardQuery = QueryBuilders.wildcardQuery(name, paramer.value); + bqb = Paramer.fullParamer(wildcardQuery, paramer); + break; + + case "matchphrasequery": + case "match_phrase": + case "matchphrase": + paramer = Paramer.parseParamer(value); + MatchPhraseQueryBuilder matchPhraseQuery = + QueryBuilders.matchPhraseQuery(name, paramer.value); + bqb = Paramer.fullParamer(matchPhraseQuery, paramer); + break; + + case "multimatchquery": + case "multi_match": + case "multimatch": + paramer = Paramer.parseParamer(value); + MultiMatchQueryBuilder multiMatchQuery = + QueryBuilders.multiMatchQuery(paramer.value).fields(paramer.fieldsBoosts); + bqb = Paramer.fullParamer(multiMatchQuery, paramer); + break; + default: + throw new SqlParseException( + "The following query method is not supported: " + value.getMethodName()); } - private ToXContent make(Condition cond, String name, SQLMethodInvokeExpr value) throws SqlParseException { - ToXContent bqb = null; - Paramer paramer = null; - switch (value.getMethodName().toLowerCase()) { - case "query": - paramer = Paramer.parseParamer(value); - QueryStringQueryBuilder queryString = QueryBuilders.queryStringQuery(paramer.value); - bqb = Paramer.fullParamer(queryString, paramer); - bqb = applyNot(cond.getOPERATOR(), bqb); - break; - case "matchquery": - case "match_query": - paramer = Paramer.parseParamer(value); - MatchQueryBuilder matchQuery = QueryBuilders.matchQuery(name, paramer.value); - bqb = Paramer.fullParamer(matchQuery, paramer); - bqb = applyNot(cond.getOPERATOR(), bqb); - break; - case "score": - case "scorequery": - case "score_query": - Float boost = Float.parseFloat(value.getParameters().get(1).toString()); - Condition subCond = getConditionForMethod(value.getParameters().get(0), cond.getConn()); - QueryBuilder subQuery = (QueryBuilder) make(subCond); - if (subCond.isNested()) { - subQuery = QueryBuilders.nestedQuery(subCond.getNestedPath(), subQuery, ScoreMode.None); - } - bqb = QueryBuilders.constantScoreQuery(subQuery).boost(boost); - break; - case "wildcardquery": - case "wildcard_query": - paramer = Paramer.parseParamer(value); - WildcardQueryBuilder wildcardQuery = QueryBuilders.wildcardQuery(name, paramer.value); - bqb = Paramer.fullParamer(wildcardQuery, paramer); - break; - - case "matchphrasequery": - case "match_phrase": - case "matchphrase": - paramer = Paramer.parseParamer(value); - MatchPhraseQueryBuilder matchPhraseQuery = QueryBuilders.matchPhraseQuery(name, paramer.value); - bqb = Paramer.fullParamer(matchPhraseQuery, paramer); - break; - - case "multimatchquery": - case "multi_match": - case "multimatch": - paramer = Paramer.parseParamer(value); - MultiMatchQueryBuilder multiMatchQuery = QueryBuilders.multiMatchQuery(paramer.value) - .fields(paramer.fieldsBoosts); - bqb = Paramer.fullParamer(multiMatchQuery, paramer); - break; - default: - throw new SqlParseException("The following query method is not supported: " + value.getMethodName()); + return bqb; + } + + private ToXContent make(Condition cond, String name, Object value) throws SqlParseException { + ToXContent toXContent = null; + switch (cond.getOPERATOR()) { + case ISN: + case IS: + case N: + case EQ: + if (value == null || value instanceof SQLIdentifierExpr) { + // todo: change to exists + if (value == null || ((SQLIdentifierExpr) value).getName().equalsIgnoreCase("missing")) { + toXContent = QueryBuilders.boolQuery().mustNot(QueryBuilders.existsQuery(name)); + } else { + throw new SqlParseException( + String.format( + "Cannot recoginze Sql identifer %s", ((SQLIdentifierExpr) value).getName())); + } + break; + } else { + toXContent = QueryBuilders.termQuery(name, value); + break; } - - return bqb; - } - - private ToXContent make(Condition cond, String name, Object value) throws SqlParseException { - ToXContent toXContent = null; - switch (cond.getOPERATOR()) { - case ISN: - case IS: - case N: - case EQ: - if (value == null || value instanceof SQLIdentifierExpr) { - //todo: change to exists - if (value == null || ((SQLIdentifierExpr) value).getName().equalsIgnoreCase("missing")) { - toXContent = QueryBuilders.boolQuery().mustNot(QueryBuilders.existsQuery(name)); - } else { - throw new SqlParseException(String.format("Cannot recoginze Sql identifer %s", - ((SQLIdentifierExpr) value).getName())); - } - break; - } else { - toXContent = QueryBuilders.termQuery(name, value); - break; - } - case LIKE: - case NLIKE: - String queryStr = ((String) value); - queryStr = queryStr.replace('%', '*').replace('_', '?'); - queryStr = queryStr.replace("&PERCENT", "%").replace("&UNDERSCORE", "_"); - toXContent = QueryBuilders.wildcardQuery(name, queryStr); - break; - case REGEXP: - case NREGEXP: - Object[] values = (Object[]) value; - RegexpQueryBuilder regexpQuery = QueryBuilders.regexpQuery(name, values[0].toString()); - if (1 < values.length) { - String[] flags = values[1].toString().split("\\|"); - RegexpFlag[] regexpFlags = new RegexpFlag[flags.length]; - for (int i = 0; i < flags.length; ++i) { - regexpFlags[i] = RegexpFlag.valueOf(flags[i]); - } - regexpQuery.flags(regexpFlags); - } - if (2 < values.length) { - regexpQuery.maxDeterminizedStates(Integer.parseInt(values[2].toString())); - } - toXContent = regexpQuery; - break; - case GT: - toXContent = QueryBuilders.rangeQuery(name).gt(value); - break; - case GTE: - toXContent = QueryBuilders.rangeQuery(name).gte(value); - break; - case LT: - toXContent = QueryBuilders.rangeQuery(name).lt(value); - break; - case LTE: - toXContent = QueryBuilders.rangeQuery(name).lte(value); - break; - case NIN: - case IN: - //todo: value is subquery? here or before - values = (Object[]) value; - TermQueryBuilder[] termQueries = new TermQueryBuilder[values.length]; - for (int i = 0; i < values.length; i++) { - termQueries[i] = QueryBuilders.termQuery(name, values[i]); - } - - BoolQueryBuilder boolQuery = QueryBuilders.boolQuery(); - for (TermQueryBuilder termQuery : termQueries) { - boolQuery.should(termQuery); - } - toXContent = boolQuery; - break; - case BETWEEN: - case NBETWEEN: - toXContent = QueryBuilders.rangeQuery(name).gte(((Object[]) value)[0]).lte(((Object[]) value)[1]); - break; - case GEO_INTERSECTS: - String wkt = cond.getValue().toString(); - try { - ShapeBuilder shapeBuilder = getShapeBuilderFromString(wkt); - toXContent = QueryBuilders.geoShapeQuery(cond.getName(), shapeBuilder); - } catch (IOException e) { - e.printStackTrace(); - throw new SqlParseException(StringUtils.format("Failed to create shapeBuilder from [%s]", wkt)); - } - break; - case GEO_BOUNDING_BOX: - BoundingBoxFilterParams boxFilterParams = (BoundingBoxFilterParams) cond.getValue(); - Point topLeft = boxFilterParams.getTopLeft(); - Point bottomRight = boxFilterParams.getBottomRight(); - toXContent = QueryBuilders.geoBoundingBoxQuery(cond.getName()).setCorners(topLeft.getLat(), - topLeft.getLon(), bottomRight.getLat(), bottomRight.getLon()); - break; - case GEO_DISTANCE: - DistanceFilterParams distanceFilterParams = (DistanceFilterParams) cond.getValue(); - Point fromPoint = distanceFilterParams.getFrom(); - String distance = trimApostrophes(distanceFilterParams.getDistance()); - toXContent = QueryBuilders.geoDistanceQuery(cond.getName()).distance(distance) - .point(fromPoint.getLat(), fromPoint.getLon()); - break; - case GEO_POLYGON: - PolygonFilterParams polygonFilterParams = (PolygonFilterParams) cond.getValue(); - ArrayList geoPoints = new ArrayList(); - for (Point p : polygonFilterParams.getPolygon()) { - geoPoints.add(new GeoPoint(p.getLat(), p.getLon())); - } - GeoPolygonQueryBuilder polygonFilterBuilder = QueryBuilders.geoPolygonQuery(cond.getName(), geoPoints); - toXContent = polygonFilterBuilder; - break; - case NIN_TERMS: - case IN_TERMS: - Object[] termValues = (Object[]) value; - if (termValues.length == 1 && termValues[0] instanceof SubQueryExpression) { - termValues = ((SubQueryExpression) termValues[0]).getValues(); - } - Object[] termValuesObjects = new Object[termValues.length]; - for (int i = 0; i < termValues.length; i++) { - termValuesObjects[i] = parseTermValue(termValues[i]); - } - toXContent = QueryBuilders.termsQuery(name, termValuesObjects); - break; - case NTERM: - case TERM: - Object term = ((Object[]) value)[0]; - toXContent = QueryBuilders.termQuery(name, parseTermValue(term)); - break; - case IDS_QUERY: - Object[] idsParameters = (Object[]) value; - String[] ids; - if (idsParameters.length == 2 && idsParameters[1] instanceof SubQueryExpression) { - Object[] idsFromSubQuery = ((SubQueryExpression) idsParameters[1]).getValues(); - ids = arrayOfObjectsToStringArray(idsFromSubQuery, 0, idsFromSubQuery.length - 1); - } else { - ids = arrayOfObjectsToStringArray(idsParameters, 1, idsParameters.length - 1); - } - toXContent = QueryBuilders.idsQuery().addIds(ids); - break; - case NESTED_COMPLEX: - case NOT_EXISTS_NESTED_COMPLEX: - if (value == null || !(value instanceof Where)) { - throw new SqlParseException("unsupported nested condition"); - } - - Where whereNested = (Where) value; - BoolQueryBuilder nestedFilter = QueryMaker.explain(whereNested); - - toXContent = QueryBuilders.nestedQuery(name, nestedFilter, ScoreMode.None); - break; - case CHILDREN_COMPLEX: - if (value == null || !(value instanceof Where)) { - throw new SqlParseException("unsupported nested condition"); - } - - Where whereChildren = (Where) value; - BoolQueryBuilder childrenFilter = QueryMaker.explain(whereChildren); - //todo: pass score mode - toXContent = JoinQueryBuilders.hasChildQuery(name, childrenFilter, ScoreMode.None); - - break; - case SCRIPT: - ScriptFilter scriptFilter = (ScriptFilter) value; - Map params = new HashMap<>(); - if (scriptFilter.containsParameters()) { - params = scriptFilter.getArgs(); - } - - SQLExpr nameExpr = cond.getNameExpr(); - SQLExpr valueExpr = cond.getValueExpr(); - if (nameExpr instanceof SQLMethodInvokeExpr - && ((SQLMethodInvokeExpr) nameExpr).getMethodName().equalsIgnoreCase("date_format")) { - toXContent = makeForDateFormat((SQLMethodInvokeExpr) nameExpr, (SQLCharExpr) valueExpr); - } else { - toXContent = QueryBuilders.scriptQuery( - new Script( - scriptFilter.getScriptType(), - Script.DEFAULT_SCRIPT_LANG, - scriptFilter.getScript(), - params)); - } - break; - default: - throw new SqlParseException("Undefined condition: " + cond.getName()); + case LIKE: + case NLIKE: + String queryStr = ((String) value); + queryStr = queryStr.replace('%', '*').replace('_', '?'); + queryStr = queryStr.replace("&PERCENT", "%").replace("&UNDERSCORE", "_"); + toXContent = QueryBuilders.wildcardQuery(name, queryStr); + break; + case REGEXP: + case NREGEXP: + Object[] values = (Object[]) value; + RegexpQueryBuilder regexpQuery = QueryBuilders.regexpQuery(name, values[0].toString()); + if (1 < values.length) { + String[] flags = values[1].toString().split("\\|"); + RegexpFlag[] regexpFlags = new RegexpFlag[flags.length]; + for (int i = 0; i < flags.length; ++i) { + regexpFlags[i] = RegexpFlag.valueOf(flags[i]); + } + regexpQuery.flags(regexpFlags); + } + if (2 < values.length) { + regexpQuery.maxDeterminizedStates(Integer.parseInt(values[2].toString())); + } + toXContent = regexpQuery; + break; + case GT: + toXContent = QueryBuilders.rangeQuery(name).gt(value); + break; + case GTE: + toXContent = QueryBuilders.rangeQuery(name).gte(value); + break; + case LT: + toXContent = QueryBuilders.rangeQuery(name).lt(value); + break; + case LTE: + toXContent = QueryBuilders.rangeQuery(name).lte(value); + break; + case NIN: + case IN: + // todo: value is subquery? here or before + values = (Object[]) value; + TermQueryBuilder[] termQueries = new TermQueryBuilder[values.length]; + for (int i = 0; i < values.length; i++) { + termQueries[i] = QueryBuilders.termQuery(name, values[i]); } - toXContent = applyNot(cond.getOPERATOR(), toXContent); - return toXContent; - } - - public static boolean isQueryFunction(String methodName) { - return queryFunctions.contains(methodName.toLowerCase()); - } - - /** - * Helper method used to form a range query object for the date_format function. - *

- * Example: WHERE date_format(dateField, "YYYY-MM-dd") > "2012-01-01" - * Expected range query: - * "range": { - * "dateField": { - * "from": "2012-01-01", - * "to": null, - * "include_lower": false, - * "include_upper": true, - * "time_zone": "America/Los_Angeles", - * "format": "YYYY-MM-dd", - * "boost": 1 - * } - * } - * - * @param nameExpr SQL method expression (ex. date_format(dateField, "YYYY-MM-dd")) - * @param valueExpr Value expression being compared to the SQL method result (ex. "2012-01-01") - * @throws SqlParseException - */ - private ToXContent makeForDateFormat(SQLMethodInvokeExpr nameExpr, SQLCharExpr valueExpr) throws SqlParseException { - ToXContent toXContent = null; - List params = nameExpr.getParameters(); - - String field = params.get(0).toString(); - String format = removeSingleQuote(params.get(1).toString()); - String dateToCompare = valueExpr.getText(); - String oper = ((SQLBinaryOpExpr) nameExpr.getParent()).getOperator().name; - - String zoneId; - if (params.size() > 2) { - zoneId = ZoneId.of(removeSingleQuote(params.get(2).toString())).toString(); + BoolQueryBuilder boolQuery = QueryBuilders.boolQuery(); + for (TermQueryBuilder termQuery : termQueries) { + boolQuery.should(termQuery); + } + toXContent = boolQuery; + break; + case BETWEEN: + case NBETWEEN: + toXContent = + QueryBuilders.rangeQuery(name).gte(((Object[]) value)[0]).lte(((Object[]) value)[1]); + break; + case GEO_INTERSECTS: + String wkt = cond.getValue().toString(); + try { + ShapeBuilder shapeBuilder = getShapeBuilderFromString(wkt); + toXContent = QueryBuilders.geoShapeQuery(cond.getName(), shapeBuilder); + } catch (IOException e) { + e.printStackTrace(); + throw new SqlParseException( + StringUtils.format("Failed to create shapeBuilder from [%s]", wkt)); + } + break; + case GEO_BOUNDING_BOX: + BoundingBoxFilterParams boxFilterParams = (BoundingBoxFilterParams) cond.getValue(); + Point topLeft = boxFilterParams.getTopLeft(); + Point bottomRight = boxFilterParams.getBottomRight(); + toXContent = + QueryBuilders.geoBoundingBoxQuery(cond.getName()) + .setCorners( + topLeft.getLat(), topLeft.getLon(), bottomRight.getLat(), bottomRight.getLon()); + break; + case GEO_DISTANCE: + DistanceFilterParams distanceFilterParams = (DistanceFilterParams) cond.getValue(); + Point fromPoint = distanceFilterParams.getFrom(); + String distance = trimApostrophes(distanceFilterParams.getDistance()); + toXContent = + QueryBuilders.geoDistanceQuery(cond.getName()) + .distance(distance) + .point(fromPoint.getLat(), fromPoint.getLon()); + break; + case GEO_POLYGON: + PolygonFilterParams polygonFilterParams = (PolygonFilterParams) cond.getValue(); + ArrayList geoPoints = new ArrayList(); + for (Point p : polygonFilterParams.getPolygon()) { + geoPoints.add(new GeoPoint(p.getLat(), p.getLon())); + } + GeoPolygonQueryBuilder polygonFilterBuilder = + QueryBuilders.geoPolygonQuery(cond.getName(), geoPoints); + toXContent = polygonFilterBuilder; + break; + case NIN_TERMS: + case IN_TERMS: + Object[] termValues = (Object[]) value; + if (termValues.length == 1 && termValues[0] instanceof SubQueryExpression) { + termValues = ((SubQueryExpression) termValues[0]).getValues(); + } + Object[] termValuesObjects = new Object[termValues.length]; + for (int i = 0; i < termValues.length; i++) { + termValuesObjects[i] = parseTermValue(termValues[i]); + } + toXContent = QueryBuilders.termsQuery(name, termValuesObjects); + break; + case NTERM: + case TERM: + Object term = ((Object[]) value)[0]; + toXContent = QueryBuilders.termQuery(name, parseTermValue(term)); + break; + case IDS_QUERY: + Object[] idsParameters = (Object[]) value; + String[] ids; + if (idsParameters.length == 2 && idsParameters[1] instanceof SubQueryExpression) { + Object[] idsFromSubQuery = ((SubQueryExpression) idsParameters[1]).getValues(); + ids = arrayOfObjectsToStringArray(idsFromSubQuery, 0, idsFromSubQuery.length - 1); } else { - // Using UTC, if there is no Zone provided. - zoneId = UTC.getId(); + ids = arrayOfObjectsToStringArray(idsParameters, 1, idsParameters.length - 1); } - - RangeQueryBuilder rangeQuery = QueryBuilders.rangeQuery(field).format(format).timeZone(zoneId); - switch (oper) { - case "<>": - case "=": - toXContent = rangeQuery.gte(dateToCompare).lte(dateToCompare); - break; - case ">": - toXContent = rangeQuery.gt(dateToCompare); - break; - case "<": - toXContent = rangeQuery.lt(dateToCompare); - break; - case ">=": - toXContent = rangeQuery.gte(dateToCompare); - break; - case "<=": - toXContent = rangeQuery.lte(dateToCompare); - break; - case "BETWEEN": - case "NOT BETWEEN": - //todo: Add support for BETWEEN - break; - default: - throw new SqlParseException("date_format does not support the operation " + oper); + toXContent = QueryBuilders.idsQuery().addIds(ids); + break; + case NESTED_COMPLEX: + case NOT_EXISTS_NESTED_COMPLEX: + if (value == null || !(value instanceof Where)) { + throw new SqlParseException("unsupported nested condition"); } - toXContent = applyNot(Condition.OPERATOR.operStringToOpear.get(oper), toXContent); - return toXContent; - } + Where whereNested = (Where) value; + BoolQueryBuilder nestedFilter = QueryMaker.explain(whereNested); - private String removeSingleQuote(String param) { - return param.replaceAll("\'", ""); - } + toXContent = QueryBuilders.nestedQuery(name, nestedFilter, ScoreMode.None); + break; + case CHILDREN_COMPLEX: + if (value == null || !(value instanceof Where)) { + throw new SqlParseException("unsupported nested condition"); + } - private String[] arrayOfObjectsToStringArray(Object[] values, int from, int to) { - String[] strings = new String[to - from + 1]; - int counter = 0; - for (int i = from; i <= to; i++) { - strings[counter] = values[i].toString(); - counter++; + Where whereChildren = (Where) value; + BoolQueryBuilder childrenFilter = QueryMaker.explain(whereChildren); + // todo: pass score mode + toXContent = JoinQueryBuilders.hasChildQuery(name, childrenFilter, ScoreMode.None); + + break; + case SCRIPT: + ScriptFilter scriptFilter = (ScriptFilter) value; + Map params = new HashMap<>(); + if (scriptFilter.containsParameters()) { + params = scriptFilter.getArgs(); } - return strings; - } - private ShapeBuilder getShapeBuilderFromString(String str) throws IOException, SqlParseException { - String json; - if (str.contains("{")) { - json = fixJsonFromElastic(str); + SQLExpr nameExpr = cond.getNameExpr(); + SQLExpr valueExpr = cond.getValueExpr(); + if (nameExpr instanceof SQLMethodInvokeExpr + && ((SQLMethodInvokeExpr) nameExpr).getMethodName().equalsIgnoreCase("date_format")) { + toXContent = makeForDateFormat((SQLMethodInvokeExpr) nameExpr, (SQLCharExpr) valueExpr); } else { - json = WktToGeoJsonConverter.toGeoJson(trimApostrophes(str)); + toXContent = + QueryBuilders.scriptQuery( + new Script( + scriptFilter.getScriptType(), + Script.DEFAULT_SCRIPT_LANG, + scriptFilter.getScript(), + params)); } - - return getShapeBuilderFromJson(json); + break; + default: + throw new SqlParseException("Undefined condition: " + cond.getName()); } - /* - * elastic sends {coordinates=[[[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]]], - * type=Polygon} - * proper form is {"coordinates":[[[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]]], - * "type":"Polygon"} - * */ - private String fixJsonFromElastic(String elasticJson) { - String properJson = elasticJson.replaceAll("=", ":"); - properJson = properJson.replaceAll("(type)(:)([a-zA-Z]+)", "\"type\":\"$3\""); - properJson = properJson.replaceAll("coordinates", "\"coordinates\""); - return properJson; + toXContent = applyNot(cond.getOPERATOR(), toXContent); + return toXContent; + } + + public static boolean isQueryFunction(String methodName) { + return queryFunctions.contains(methodName.toLowerCase()); + } + + /** + * Helper method used to form a range query object for the date_format function. + * + *

Example: WHERE date_format(dateField, "YYYY-MM-dd") > "2012-01-01" Expected range query: + * "range": { "dateField": { "from": "2012-01-01", "to": null, "include_lower": false, + * "include_upper": true, "time_zone": "America/Los_Angeles", "format": "YYYY-MM-dd", "boost": 1 } + * } + * + * @param nameExpr SQL method expression (ex. date_format(dateField, "YYYY-MM-dd")) + * @param valueExpr Value expression being compared to the SQL method result (ex. "2012-01-01") + * @throws SqlParseException + */ + private ToXContent makeForDateFormat(SQLMethodInvokeExpr nameExpr, SQLCharExpr valueExpr) + throws SqlParseException { + ToXContent toXContent = null; + List params = nameExpr.getParameters(); + + String field = params.get(0).toString(); + String format = removeSingleQuote(params.get(1).toString()); + String dateToCompare = valueExpr.getText(); + String oper = ((SQLBinaryOpExpr) nameExpr.getParent()).getOperator().name; + + String zoneId; + if (params.size() > 2) { + zoneId = ZoneId.of(removeSingleQuote(params.get(2).toString())).toString(); + } else { + // Using UTC, if there is no Zone provided. + zoneId = UTC.getId(); } - private ShapeBuilder getShapeBuilderFromJson(String json) throws IOException { - XContentParser parser = null; - parser = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, json); - parser.nextToken(); - return ShapeParser.parse(parser); + RangeQueryBuilder rangeQuery = QueryBuilders.rangeQuery(field).format(format).timeZone(zoneId); + switch (oper) { + case "<>": + case "=": + toXContent = rangeQuery.gte(dateToCompare).lte(dateToCompare); + break; + case ">": + toXContent = rangeQuery.gt(dateToCompare); + break; + case "<": + toXContent = rangeQuery.lt(dateToCompare); + break; + case ">=": + toXContent = rangeQuery.gte(dateToCompare); + break; + case "<=": + toXContent = rangeQuery.lte(dateToCompare); + break; + case "BETWEEN": + case "NOT BETWEEN": + // todo: Add support for BETWEEN + break; + default: + throw new SqlParseException("date_format does not support the operation " + oper); } - private String trimApostrophes(String str) { - return str.substring(1, str.length() - 1); - } + toXContent = applyNot(Condition.OPERATOR.operStringToOpear.get(oper), toXContent); + return toXContent; + } - /** - * Applies negation to query builder if the operation is a "not" operation. - */ - private ToXContent applyNot(Condition.OPERATOR OPERATOR, ToXContent bqb) { - if (NOT_OPERATOR_SET.contains(OPERATOR)) { - bqb = QueryBuilders.boolQuery().mustNot((QueryBuilder) bqb); - } - return bqb; - } + private String removeSingleQuote(String param) { + return param.replaceAll("\'", ""); + } - private Object parseTermValue(Object termValue) { - if (termValue instanceof SQLNumericLiteralExpr) { - termValue = ((SQLNumericLiteralExpr) termValue).getNumber(); - if (termValue instanceof BigDecimal || termValue instanceof Double) { - termValue = ((Number) termValue).doubleValue(); - } else if (termValue instanceof Float) { - termValue = ((Number) termValue).floatValue(); - } else if (termValue instanceof BigInteger || termValue instanceof Long) { - termValue = ((Number) termValue).longValue(); - } else if (termValue instanceof Integer) { - termValue = ((Number) termValue).intValue(); - } else if (termValue instanceof Short) { - termValue = ((Number) termValue).shortValue(); - } else if (termValue instanceof Byte) { - termValue = ((Number) termValue).byteValue(); - } - } else if (termValue instanceof SQLBooleanExpr) { - termValue = ((SQLBooleanExpr) termValue).getValue(); - } else { - termValue = termValue.toString(); - } + private String[] arrayOfObjectsToStringArray(Object[] values, int from, int to) { + String[] strings = new String[to - from + 1]; + int counter = 0; + for (int i = from; i <= to; i++) { + strings[counter] = values[i].toString(); + counter++; + } + return strings; + } + + private ShapeBuilder getShapeBuilderFromString(String str) throws IOException, SqlParseException { + String json; + if (str.contains("{")) { + json = fixJsonFromElastic(str); + } else { + json = WktToGeoJsonConverter.toGeoJson(trimApostrophes(str)); + } - return termValue; + return getShapeBuilderFromJson(json); + } + + /* + * elastic sends {coordinates=[[[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]]], + * type=Polygon} + * proper form is {"coordinates":[[[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]]], + * "type":"Polygon"} + * */ + private String fixJsonFromElastic(String elasticJson) { + String properJson = elasticJson.replaceAll("=", ":"); + properJson = properJson.replaceAll("(type)(:)([a-zA-Z]+)", "\"type\":\"$3\""); + properJson = properJson.replaceAll("coordinates", "\"coordinates\""); + return properJson; + } + + private ShapeBuilder getShapeBuilderFromJson(String json) throws IOException { + XContentParser parser = null; + parser = + JsonXContent.jsonXContent.createParser( + NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, json); + parser.nextToken(); + return ShapeParser.parse(parser); + } + + private String trimApostrophes(String str) { + return str.substring(1, str.length() - 1); + } + + /** Applies negation to query builder if the operation is a "not" operation. */ + private ToXContent applyNot(Condition.OPERATOR OPERATOR, ToXContent bqb) { + if (NOT_OPERATOR_SET.contains(OPERATOR)) { + bqb = QueryBuilders.boolQuery().mustNot((QueryBuilder) bqb); } + return bqb; + } + + private Object parseTermValue(Object termValue) { + if (termValue instanceof SQLNumericLiteralExpr) { + termValue = ((SQLNumericLiteralExpr) termValue).getNumber(); + if (termValue instanceof BigDecimal || termValue instanceof Double) { + termValue = ((Number) termValue).doubleValue(); + } else if (termValue instanceof Float) { + termValue = ((Number) termValue).floatValue(); + } else if (termValue instanceof BigInteger || termValue instanceof Long) { + termValue = ((Number) termValue).longValue(); + } else if (termValue instanceof Integer) { + termValue = ((Number) termValue).intValue(); + } else if (termValue instanceof Short) { + termValue = ((Number) termValue).shortValue(); + } else if (termValue instanceof Byte) { + termValue = ((Number) termValue).byteValue(); + } + } else if (termValue instanceof SQLBooleanExpr) { + termValue = ((SQLBooleanExpr) termValue).getValue(); + } else { + termValue = termValue.toString(); + } + + return termValue; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/maker/QueryMaker.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/maker/QueryMaker.java index f36bca2686..75f3538981 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/maker/QueryMaker.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/maker/QueryMaker.java @@ -3,10 +3,8 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.maker; - import org.apache.lucene.search.join.ScoreMode; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.QueryBuilder; @@ -18,76 +16,76 @@ public class QueryMaker extends Maker { - /** - * - * - * @param where - * @return - * @throws SqlParseException - */ - public static BoolQueryBuilder explain(Where where) throws SqlParseException { - return explain(where, true); - } + /** + * @param where + * @return + * @throws SqlParseException + */ + public static BoolQueryBuilder explain(Where where) throws SqlParseException { + return explain(where, true); + } - public static BoolQueryBuilder explain(Where where, boolean isQuery) throws SqlParseException { - BoolQueryBuilder boolQuery = QueryBuilders.boolQuery(); - while (where.getWheres().size() == 1) { - where = where.getWheres().getFirst(); - } - new QueryMaker().explanWhere(boolQuery, where); - if (isQuery) { - return boolQuery; - } - return QueryBuilders.boolQuery().filter(boolQuery); + public static BoolQueryBuilder explain(Where where, boolean isQuery) throws SqlParseException { + BoolQueryBuilder boolQuery = QueryBuilders.boolQuery(); + while (where.getWheres().size() == 1) { + where = where.getWheres().getFirst(); } - - private QueryMaker() { - super(true); + new QueryMaker().explanWhere(boolQuery, where); + if (isQuery) { + return boolQuery; } + return QueryBuilders.boolQuery().filter(boolQuery); + } - private void explanWhere(BoolQueryBuilder boolQuery, Where where) throws SqlParseException { - if (where instanceof Condition) { - addSubQuery(boolQuery, where, (QueryBuilder) make((Condition) where)); - } else { - BoolQueryBuilder subQuery = QueryBuilders.boolQuery(); - addSubQuery(boolQuery, where, subQuery); - for (Where subWhere : where.getWheres()) { - explanWhere(subQuery, subWhere); - } - } - } + private QueryMaker() { + super(true); + } - /** - * - * - * @param boolQuery - * @param where - * @param subQuery - */ - private void addSubQuery(BoolQueryBuilder boolQuery, Where where, QueryBuilder subQuery) { - if (where instanceof Condition) { - Condition condition = (Condition) where; + private void explanWhere(BoolQueryBuilder boolQuery, Where where) throws SqlParseException { + if (where instanceof Condition) { + addSubQuery(boolQuery, where, (QueryBuilder) make((Condition) where)); + } else { + BoolQueryBuilder subQuery = QueryBuilders.boolQuery(); + addSubQuery(boolQuery, where, subQuery); + for (Where subWhere : where.getWheres()) { + explanWhere(subQuery, subWhere); + } + } + } - if (condition.isNested()) { - // bugfix #628 - if ("missing".equalsIgnoreCase(String.valueOf(condition.getValue())) - && (condition.getOPERATOR() == Condition.OPERATOR.IS - || condition.getOPERATOR() == Condition.OPERATOR.EQ)) { - boolQuery.mustNot(QueryBuilders.nestedQuery(condition.getNestedPath(), - QueryBuilders.boolQuery().mustNot(subQuery), ScoreMode.None)); - return; - } + /** + * @param boolQuery + * @param where + * @param subQuery + */ + private void addSubQuery(BoolQueryBuilder boolQuery, Where where, QueryBuilder subQuery) { + if (where instanceof Condition) { + Condition condition = (Condition) where; - subQuery = QueryBuilders.nestedQuery(condition.getNestedPath(), subQuery, ScoreMode.None); - } else if (condition.isChildren()) { - subQuery = JoinQueryBuilders.hasChildQuery(condition.getChildType(), subQuery, ScoreMode.None); - } + if (condition.isNested()) { + // bugfix #628 + if ("missing".equalsIgnoreCase(String.valueOf(condition.getValue())) + && (condition.getOPERATOR() == Condition.OPERATOR.IS + || condition.getOPERATOR() == Condition.OPERATOR.EQ)) { + boolQuery.mustNot( + QueryBuilders.nestedQuery( + condition.getNestedPath(), + QueryBuilders.boolQuery().mustNot(subQuery), + ScoreMode.None)); + return; } - if (where.getConn() == Where.CONN.AND) { - boolQuery.must(subQuery); - } else { - boolQuery.should(subQuery); - } + subQuery = QueryBuilders.nestedQuery(condition.getNestedPath(), subQuery, ScoreMode.None); + } else if (condition.isChildren()) { + subQuery = + JoinQueryBuilders.hasChildQuery(condition.getChildType(), subQuery, ScoreMode.None); + } + } + + if (where.getConn() == Where.CONN.AND) { + boolQuery.must(subQuery); + } else { + boolQuery.should(subQuery); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQueryAction.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQueryAction.java index cd9b1f4030..a9eb6113f7 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQueryAction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQueryAction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.multi; import java.util.HashSet; @@ -18,66 +17,68 @@ import org.opensearch.sql.legacy.query.QueryAction; import org.opensearch.sql.legacy.query.SqlElasticRequestBuilder; -/** - * Created by Eliran on 19/8/2016. - */ +/** Created by Eliran on 19/8/2016. */ public class MultiQueryAction extends QueryAction { - private MultiQuerySelect multiQuerySelect; + private MultiQuerySelect multiQuerySelect; + + public MultiQueryAction(Client client, MultiQuerySelect multiSelect) { + super(client, null); + this.multiQuerySelect = multiSelect; + } - public MultiQueryAction(Client client, MultiQuerySelect multiSelect) { - super(client, null); - this.multiQuerySelect = multiSelect; + @Override + public SqlElasticRequestBuilder explain() throws SqlParseException { + if (!isValidMultiSelectReturnFields()) { + throw new SqlParseException( + "on multi query fields/aliases of one table should be subset of other"); } + MultiQueryRequestBuilder requestBuilder = new MultiQueryRequestBuilder(this.multiQuerySelect); + requestBuilder.setFirstSearchRequest( + createRequestBuilder(this.multiQuerySelect.getFirstSelect())); + requestBuilder.setSecondSearchRequest( + createRequestBuilder(this.multiQuerySelect.getSecondSelect())); + requestBuilder.fillTableAliases( + this.multiQuerySelect.getFirstSelect().getFields(), + this.multiQuerySelect.getSecondSelect().getFields()); - @Override - public SqlElasticRequestBuilder explain() throws SqlParseException { - if (!isValidMultiSelectReturnFields()) { - throw new SqlParseException("on multi query fields/aliases of one table should be subset of other"); - } - MultiQueryRequestBuilder requestBuilder = new MultiQueryRequestBuilder(this.multiQuerySelect); - requestBuilder.setFirstSearchRequest(createRequestBuilder(this.multiQuerySelect.getFirstSelect())); - requestBuilder.setSecondSearchRequest(createRequestBuilder(this.multiQuerySelect.getSecondSelect())); - requestBuilder.fillTableAliases(this.multiQuerySelect.getFirstSelect().getFields(), - this.multiQuerySelect.getSecondSelect().getFields()); + return requestBuilder; + } - return requestBuilder; + private boolean isValidMultiSelectReturnFields() { + List firstQueryFields = multiQuerySelect.getFirstSelect().getFields(); + List secondQueryFields = multiQuerySelect.getSecondSelect().getFields(); + if (firstQueryFields.size() > secondQueryFields.size()) { + return isSubsetFields(firstQueryFields, secondQueryFields); } + return isSubsetFields(secondQueryFields, firstQueryFields); + } - private boolean isValidMultiSelectReturnFields() { - List firstQueryFields = multiQuerySelect.getFirstSelect().getFields(); - List secondQueryFields = multiQuerySelect.getSecondSelect().getFields(); - if (firstQueryFields.size() > secondQueryFields.size()) { - return isSubsetFields(firstQueryFields, secondQueryFields); - } - return isSubsetFields(secondQueryFields, firstQueryFields); + private boolean isSubsetFields(List bigGroup, List smallerGroup) { + Set biggerGroup = new HashSet<>(); + for (Field field : bigGroup) { + String fieldName = getNameOrAlias(field); + biggerGroup.add(fieldName); } - - private boolean isSubsetFields(List bigGroup, List smallerGroup) { - Set biggerGroup = new HashSet<>(); - for (Field field : bigGroup) { - String fieldName = getNameOrAlias(field); - biggerGroup.add(fieldName); - } - for (Field field : smallerGroup) { - String fieldName = getNameOrAlias(field); - if (!biggerGroup.contains(fieldName)) { - return false; - } - } - return true; + for (Field field : smallerGroup) { + String fieldName = getNameOrAlias(field); + if (!biggerGroup.contains(fieldName)) { + return false; + } } + return true; + } - private String getNameOrAlias(Field field) { - String fieldName = field.getName(); - if (field.getAlias() != null && !field.getAlias().isEmpty()) { - fieldName = field.getAlias(); - } - return fieldName; + private String getNameOrAlias(Field field) { + String fieldName = field.getName(); + if (field.getAlias() != null && !field.getAlias().isEmpty()) { + fieldName = field.getAlias(); } + return fieldName; + } - protected SearchRequestBuilder createRequestBuilder(Select select) throws SqlParseException { - DefaultQueryAction queryAction = new DefaultQueryAction(client, select); - queryAction.explain(); - return queryAction.getRequestBuilder(); - } + protected SearchRequestBuilder createRequestBuilder(Select select) throws SqlParseException { + DefaultQueryAction queryAction = new DefaultQueryAction(client, select); + queryAction.explain(); + return queryAction.getRequestBuilder(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQueryRequestBuilder.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQueryRequestBuilder.java index 5340a701ed..b4e92a8de6 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQueryRequestBuilder.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQueryRequestBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.multi; import com.alibaba.druid.sql.ast.statement.SQLUnionOperator; @@ -23,108 +22,108 @@ import org.opensearch.sql.legacy.domain.Select; import org.opensearch.sql.legacy.query.SqlElasticRequestBuilder; -/** - * Created by Eliran on 19/8/2016. - */ +/** Created by Eliran on 19/8/2016. */ public class MultiQueryRequestBuilder implements SqlElasticRequestBuilder { - private SearchRequestBuilder firstSearchRequest; - private SearchRequestBuilder secondSearchRequest; - private Map firstTableFieldToAlias; - private Map secondTableFieldToAlias; - private MultiQuerySelect multiQuerySelect; - private SQLUnionOperator relation; - - - public MultiQueryRequestBuilder(MultiQuerySelect multiQuerySelect) { - this.multiQuerySelect = multiQuerySelect; - this.relation = multiQuerySelect.getOperation(); - this.firstTableFieldToAlias = new HashMap<>(); - this.secondTableFieldToAlias = new HashMap<>(); - } - - @Override - public ActionRequest request() { - return null; - } - - - @Override - public String explain() { - - try { - XContentBuilder firstBuilder = XContentFactory.jsonBuilder().prettyPrint(); - this.firstSearchRequest.request().source().toXContent(firstBuilder, ToXContent.EMPTY_PARAMS); - - XContentBuilder secondBuilder = XContentFactory.jsonBuilder().prettyPrint(); - this.secondSearchRequest.request().source().toXContent(secondBuilder, ToXContent.EMPTY_PARAMS); - return String.format("performing %s on :\n left query:\n%s\n right query:\n%s", - this.relation.name, BytesReference.bytes(firstBuilder).utf8ToString(), - BytesReference.bytes(secondBuilder).utf8ToString()); - - } catch (IOException e) { - e.printStackTrace(); - } - return null; - } - - @Override - public ActionResponse get() { - return null; - } - - @Override - public ActionRequestBuilder getBuilder() { - return null; + private SearchRequestBuilder firstSearchRequest; + private SearchRequestBuilder secondSearchRequest; + private Map firstTableFieldToAlias; + private Map secondTableFieldToAlias; + private MultiQuerySelect multiQuerySelect; + private SQLUnionOperator relation; + + public MultiQueryRequestBuilder(MultiQuerySelect multiQuerySelect) { + this.multiQuerySelect = multiQuerySelect; + this.relation = multiQuerySelect.getOperation(); + this.firstTableFieldToAlias = new HashMap<>(); + this.secondTableFieldToAlias = new HashMap<>(); + } + + @Override + public ActionRequest request() { + return null; + } + + @Override + public String explain() { + + try { + XContentBuilder firstBuilder = XContentFactory.jsonBuilder().prettyPrint(); + this.firstSearchRequest.request().source().toXContent(firstBuilder, ToXContent.EMPTY_PARAMS); + + XContentBuilder secondBuilder = XContentFactory.jsonBuilder().prettyPrint(); + this.secondSearchRequest + .request() + .source() + .toXContent(secondBuilder, ToXContent.EMPTY_PARAMS); + return String.format( + "performing %s on :\n left query:\n%s\n right query:\n%s", + this.relation.name, + BytesReference.bytes(firstBuilder).utf8ToString(), + BytesReference.bytes(secondBuilder).utf8ToString()); + + } catch (IOException e) { + e.printStackTrace(); } - - - public SearchRequestBuilder getFirstSearchRequest() { - return firstSearchRequest; - } - - public SearchRequestBuilder getSecondSearchRequest() { - return secondSearchRequest; - } - - public SQLUnionOperator getRelation() { - return relation; - } - - public void setFirstSearchRequest(SearchRequestBuilder firstSearchRequest) { - this.firstSearchRequest = firstSearchRequest; + return null; + } + + @Override + public ActionResponse get() { + return null; + } + + @Override + public ActionRequestBuilder getBuilder() { + return null; + } + + public SearchRequestBuilder getFirstSearchRequest() { + return firstSearchRequest; + } + + public SearchRequestBuilder getSecondSearchRequest() { + return secondSearchRequest; + } + + public SQLUnionOperator getRelation() { + return relation; + } + + public void setFirstSearchRequest(SearchRequestBuilder firstSearchRequest) { + this.firstSearchRequest = firstSearchRequest; + } + + public void setSecondSearchRequest(SearchRequestBuilder secondSearchRequest) { + this.secondSearchRequest = secondSearchRequest; + } + + public void fillTableAliases(List firstTableFields, List secondTableFields) { + fillTableToAlias(this.firstTableFieldToAlias, firstTableFields); + fillTableToAlias(this.secondTableFieldToAlias, secondTableFields); + } + + private void fillTableToAlias(Map fieldToAlias, List fields) { + for (Field field : fields) { + if (field.getAlias() != null && !field.getAlias().isEmpty()) { + fieldToAlias.put(field.getName(), field.getAlias()); + } } + } - public void setSecondSearchRequest(SearchRequestBuilder secondSearchRequest) { - this.secondSearchRequest = secondSearchRequest; - } - - public void fillTableAliases(List firstTableFields, List secondTableFields) { - fillTableToAlias(this.firstTableFieldToAlias, firstTableFields); - fillTableToAlias(this.secondTableFieldToAlias, secondTableFields); - } - - private void fillTableToAlias(Map fieldToAlias, List fields) { - for (Field field : fields) { - if (field.getAlias() != null && !field.getAlias().isEmpty()) { - fieldToAlias.put(field.getName(), field.getAlias()); - } - } - } - - public Map getFirstTableFieldToAlias() { - return firstTableFieldToAlias; - } + public Map getFirstTableFieldToAlias() { + return firstTableFieldToAlias; + } - public Map getSecondTableFieldToAlias() { - return secondTableFieldToAlias; - } + public Map getSecondTableFieldToAlias() { + return secondTableFieldToAlias; + } - public Select getOriginalSelect(boolean first) { - if (first) { - return this.multiQuerySelect.getFirstSelect(); - } else { - return this.multiQuerySelect.getSecondSelect(); - } + public Select getOriginalSelect(boolean first) { + if (first) { + return this.multiQuerySelect.getFirstSelect(); + } else { + return this.multiQuerySelect.getSecondSelect(); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQuerySelect.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQuerySelect.java index e5dd1716ed..72e7232a30 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQuerySelect.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/MultiQuerySelect.java @@ -3,35 +3,32 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.multi; import com.alibaba.druid.sql.ast.statement.SQLUnionOperator; import org.opensearch.sql.legacy.domain.Select; -/** - * Created by Eliran on 19/8/2016. - */ +/** Created by Eliran on 19/8/2016. */ public class MultiQuerySelect { - private SQLUnionOperator operation; - private Select firstSelect; - private Select secondSelect; - - public MultiQuerySelect(SQLUnionOperator operation, Select firstSelect, Select secondSelect) { - this.operation = operation; - this.firstSelect = firstSelect; - this.secondSelect = secondSelect; - } - - public SQLUnionOperator getOperation() { - return operation; - } - - public Select getFirstSelect() { - return firstSelect; - } - - public Select getSecondSelect() { - return secondSelect; - } + private SQLUnionOperator operation; + private Select firstSelect; + private Select secondSelect; + + public MultiQuerySelect(SQLUnionOperator operation, Select firstSelect, Select secondSelect) { + this.operation = operation; + this.firstSelect = firstSelect; + this.secondSelect = secondSelect; + } + + public SQLUnionOperator getOperation() { + return operation; + } + + public Select getFirstSelect() { + return firstSelect; + } + + public Select getSecondSelect() { + return secondSelect; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/OpenSearchMultiQueryActionFactory.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/OpenSearchMultiQueryActionFactory.java index be86fdef81..1f934e9a80 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/OpenSearchMultiQueryActionFactory.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/multi/OpenSearchMultiQueryActionFactory.java @@ -3,26 +3,23 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.multi; import org.opensearch.client.Client; import org.opensearch.sql.legacy.exception.SqlParseException; import org.opensearch.sql.legacy.query.QueryAction; -/** - * Created by Eliran on 19/8/2016. - */ +/** Created by Eliran on 19/8/2016. */ public class OpenSearchMultiQueryActionFactory { - public static QueryAction createMultiQueryAction(Client client, MultiQuerySelect multiSelect) - throws SqlParseException { - switch (multiSelect.getOperation()) { - case UNION_ALL: - case UNION: - return new MultiQueryAction(client, multiSelect); - default: - throw new SqlParseException("only supports union and union all"); - } + public static QueryAction createMultiQueryAction(Client client, MultiQuerySelect multiSelect) + throws SqlParseException { + switch (multiSelect.getOperation()) { + case UNION_ALL: + case UNION: + return new MultiQueryAction(client, multiSelect); + default: + throw new SqlParseException("only supports union and union all"); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/HashJoinQueryPlanRequestBuilder.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/HashJoinQueryPlanRequestBuilder.java index 312ade197a..28146aaacb 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/HashJoinQueryPlanRequestBuilder.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/HashJoinQueryPlanRequestBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner; import org.opensearch.client.Client; @@ -14,64 +13,50 @@ import org.opensearch.sql.legacy.request.SqlRequest; /** - * QueryPlanner builder for Hash Join query. In future, different queries could have its own builders to generate - * QueryPlanner. QueryPlanner would run all stages in its pipeline no matter how it be assembled. + * QueryPlanner builder for Hash Join query. In future, different queries could have its own + * builders to generate QueryPlanner. QueryPlanner would run all stages in its pipeline no matter + * how it be assembled. */ public class HashJoinQueryPlanRequestBuilder extends HashJoinElasticRequestBuilder { - /** - * Client connection to OpenSearch cluster - */ - private final Client client; - - /** - * Query request - */ - private final SqlRequest request; - - /** - * Query planner configuration - */ - private final Config config; - - - public HashJoinQueryPlanRequestBuilder(Client client, SqlRequest request) { - this.client = client; - this.request = request; - this.config = new Config(); - } - - @Override - public String explain() { - return plan().explain(); - } - - /** - * Planning for the query and create planner for explain/execute later. - * - * @return query planner - */ - public QueryPlanner plan() { - config.configureLimit( - getTotalLimit(), - getFirstTable().getHintLimit(), - getSecondTable().getHintLimit() - ); - config.configureTermsFilterOptimization(isUseTermFiltersOptimization()); - - return new QueryPlanner( - client, - config, - new QueryParams( - getFirstTable(), - getSecondTable(), - getJoinType(), - getT1ToT2FieldsComparison() - ) - ); - } - - public Config getConfig() { - return config; - } + /** Client connection to OpenSearch cluster */ + private final Client client; + + /** Query request */ + private final SqlRequest request; + + /** Query planner configuration */ + private final Config config; + + public HashJoinQueryPlanRequestBuilder(Client client, SqlRequest request) { + this.client = client; + this.request = request; + this.config = new Config(); + } + + @Override + public String explain() { + return plan().explain(); + } + + /** + * Planning for the query and create planner for explain/execute later. + * + * @return query planner + */ + public QueryPlanner plan() { + config.configureLimit( + getTotalLimit(), getFirstTable().getHintLimit(), getSecondTable().getHintLimit()); + config.configureTermsFilterOptimization(isUseTermFiltersOptimization()); + + return new QueryPlanner( + client, + config, + new QueryParams( + getFirstTable(), getSecondTable(), getJoinType(), getT1ToT2FieldsComparison())); + } + + public Config getConfig() { + return config; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/converter/SQLAggregationParser.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/converter/SQLAggregationParser.java index ac9a173212..b54e260fd4 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/converter/SQLAggregationParser.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/converter/SQLAggregationParser.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.converter; import com.alibaba.druid.sql.ast.SQLExpr; @@ -31,253 +30,281 @@ import org.opensearch.sql.legacy.query.planner.core.ColumnNode; /** - * The definition of SQL Aggregation Converter which will parse the query to project column node list and - * aggregation list - * e.g. parse the query: SELECT age, MAX(balance) - MIN(balance) FROM T GROUP BY age. - * will generate the - * node list: age, max_0 - min_0 - * aggregation list: age, max(balance) as max_0, min(balance) as min_0 - * + * The definition of SQL Aggregation Converter which will parse the query to project column node + * list and aggregation list e.g. parse the query: SELECT age, MAX(balance) - MIN(balance) FROM T + * GROUP BY age. will generate the node list: age, max_0 - min_0 aggregation list: age, max(balance) + * as max_0, min(balance) as min_0 */ @RequiredArgsConstructor public class SQLAggregationParser { - private final ColumnTypeProvider columnTypeProvider; - private Context context; - @Getter - private List columnNodes = new ArrayList<>(); - - public void parse(MySqlSelectQueryBlock queryBlock) { - context = new Context(constructSQLExprAliasMapFromSelect(queryBlock)); - - //1. extract raw names of selectItems - List selectItemNames = extractSelectItemNames(queryBlock.getSelectList()); - - //2. rewrite all the function name to lower case. - rewriteFunctionNameToLowerCase(queryBlock); - - //2. find all GroupKeyExpr from GroupBy expression. - findAllGroupKeyExprFromGroupByAndSelect(queryBlock); - findAllAggregationExprFromSelect(queryBlock); - - //3. parse the select list to expression - parseExprInSelectList(queryBlock, selectItemNames, new SQLExprToExpressionConverter(context)); - } - - public List selectItemList() { - List sqlSelectItems = new ArrayList<>(); - context.getGroupKeyExprMap().entrySet().forEach(entry -> sqlSelectItems - .add(new SQLSelectItem(entry.getKey(), entry.getValue().getExpression().toString()))); - context.getAggregationExprMap().entrySet().forEach(entry -> sqlSelectItems - .add(new SQLSelectItem(entry.getKey(), entry.getValue().getExpression().toString()))); - return sqlSelectItems; + private final ColumnTypeProvider columnTypeProvider; + private Context context; + @Getter private List columnNodes = new ArrayList<>(); + + public void parse(MySqlSelectQueryBlock queryBlock) { + context = new Context(constructSQLExprAliasMapFromSelect(queryBlock)); + + // 1. extract raw names of selectItems + List selectItemNames = extractSelectItemNames(queryBlock.getSelectList()); + + // 2. rewrite all the function name to lower case. + rewriteFunctionNameToLowerCase(queryBlock); + + // 2. find all GroupKeyExpr from GroupBy expression. + findAllGroupKeyExprFromGroupByAndSelect(queryBlock); + findAllAggregationExprFromSelect(queryBlock); + + // 3. parse the select list to expression + parseExprInSelectList(queryBlock, selectItemNames, new SQLExprToExpressionConverter(context)); + } + + public List selectItemList() { + List sqlSelectItems = new ArrayList<>(); + context + .getGroupKeyExprMap() + .entrySet() + .forEach( + entry -> + sqlSelectItems.add( + new SQLSelectItem( + entry.getKey(), entry.getValue().getExpression().toString()))); + context + .getAggregationExprMap() + .entrySet() + .forEach( + entry -> + sqlSelectItems.add( + new SQLSelectItem( + entry.getKey(), entry.getValue().getExpression().toString()))); + return sqlSelectItems; + } + + private Map constructSQLExprAliasMapFromSelect( + MySqlSelectQueryBlock queryBlock) { + return queryBlock.getSelectList().stream() + .filter(item -> !Strings.isNullOrEmpty(item.getAlias())) + .collect(Collectors.toMap(SQLSelectItem::getExpr, SQLSelectItem::getAlias)); + } + + /** + * The SQL-92 require nonaggregated name column in the select list must appear in the GROUP BY, + * But the existing uses cases violate this require. e.g. AggregationIT. countGroupByDateTest Ref + * the https://dev.mysql.com/doc/refman/8.0/en/group-by-handling.html for detail information + */ + private void findAllGroupKeyExprFromGroupByAndSelect(MySqlSelectQueryBlock queryBlock) { + if (queryBlock.getGroupBy() == null) { + return; } - - private Map constructSQLExprAliasMapFromSelect(MySqlSelectQueryBlock queryBlock) { - return queryBlock.getSelectList().stream().filter(item -> !Strings.isNullOrEmpty(item.getAlias())) - .collect(Collectors.toMap(SQLSelectItem::getExpr, SQLSelectItem::getAlias)); - } - - /** - * The SQL-92 require nonaggregated name column in the select list must appear in the GROUP BY, But the - * existing uses cases violate this require. e.g. AggregationIT. countGroupByDateTest - * Ref the https://dev.mysql.com/doc/refman/8.0/en/group-by-handling.html for detail information - */ - private void findAllGroupKeyExprFromGroupByAndSelect(MySqlSelectQueryBlock queryBlock) { - if (queryBlock.getGroupBy() == null) { - return; - } - // 1. fetch the expr from groupby clause. - List groupByKeyExprList = - queryBlock.getGroupBy().getItems().stream().map(item -> ((MySqlSelectGroupByExpr) item).getExpr()) - .collect(Collectors.toList()); - - // 2. find the group expr from select. - for (SQLSelectItem selectItem : queryBlock.getSelectList()) { - SQLExpr selectItemExpr = selectItem.getExpr(); - // extension, group key in select could not in group by. - if (selectItemExpr instanceof SQLIdentifierExpr) { - context.addGroupKeyExpr(selectItemExpr); - } else { - for (SQLExpr groupByExpr : groupByKeyExprList) { - // SQL-92,nonaggregated name column in the select list must appear in the GROUP BY - if (compareSelectExprAndGroupByExpr(selectItemExpr, selectItem.getAlias(), groupByExpr)) { - context.addGroupKeyExpr(selectItemExpr); - } else if (groupByExpr instanceof SQLIdentifierExpr) { - // support expression over group key, e.g. SELECT log(G), max(A) FROM T GROUP BY G. - String groupByName = ((SQLIdentifierExpr) groupByExpr).getName(); - selectItemExpr.accept(new MySqlASTVisitorAdapter() { - @Override - public boolean visit(SQLAggregateExpr x) { - return false; - } - - @Override - public boolean visit(SQLIdentifierExpr expr) { - if (groupByName.equalsIgnoreCase(expr.getName())) { - expr.setParent(selectItem.getParent()); - context.addGroupKeyExpr(expr); - } - return false; - } - }); + // 1. fetch the expr from groupby clause. + List groupByKeyExprList = + queryBlock.getGroupBy().getItems().stream() + .map(item -> ((MySqlSelectGroupByExpr) item).getExpr()) + .collect(Collectors.toList()); + + // 2. find the group expr from select. + for (SQLSelectItem selectItem : queryBlock.getSelectList()) { + SQLExpr selectItemExpr = selectItem.getExpr(); + // extension, group key in select could not in group by. + if (selectItemExpr instanceof SQLIdentifierExpr) { + context.addGroupKeyExpr(selectItemExpr); + } else { + for (SQLExpr groupByExpr : groupByKeyExprList) { + // SQL-92,nonaggregated name column in the select list must appear in the GROUP BY + if (compareSelectExprAndGroupByExpr(selectItemExpr, selectItem.getAlias(), groupByExpr)) { + context.addGroupKeyExpr(selectItemExpr); + } else if (groupByExpr instanceof SQLIdentifierExpr) { + // support expression over group key, e.g. SELECT log(G), max(A) FROM T GROUP BY G. + String groupByName = ((SQLIdentifierExpr) groupByExpr).getName(); + selectItemExpr.accept( + new MySqlASTVisitorAdapter() { + @Override + public boolean visit(SQLAggregateExpr x) { + return false; + } + + @Override + public boolean visit(SQLIdentifierExpr expr) { + if (groupByName.equalsIgnoreCase(expr.getName())) { + expr.setParent(selectItem.getParent()); + context.addGroupKeyExpr(expr); } - } - } + return false; + } + }); + } } + } + } + } + + private boolean compareSelectExprAndGroupByExpr( + SQLExpr selectItemExpr, String alias, SQLExpr groupByExpr) { + if (groupByExpr.equals(selectItemExpr)) { + return true; + } else if (groupByExpr instanceof SQLIdentifierExpr + && ((SQLIdentifierExpr) groupByExpr).getName().equalsIgnoreCase(alias)) { + return true; } + return false; + } + + private void findAllAggregationExprFromSelect(MySqlSelectQueryBlock queryBlock) { + queryBlock + .getSelectList() + .forEach( + selectItem -> + selectItem.accept( + new MySqlASTVisitorAdapter() { + @Override + public boolean visit(SQLAggregateExpr expr) { + context.addAggregationExpr(expr); + return true; + } + })); + } + + private void parseExprInSelectList( + MySqlSelectQueryBlock queryBlock, + List selectItemNames, + SQLExprToExpressionConverter exprConverter) { + List selectItems = queryBlock.getSelectList(); + for (int i = 0; i < selectItems.size(); i++) { + Expression expression = exprConverter.convert(selectItems.get(i).getExpr()); + ColumnNode columnNode = + ColumnNode.builder() + .name(selectItemNames.get(i)) + .alias(selectItems.get(i).getAlias()) + .type(columnTypeProvider.get(i)) + .expr(expression) + .build(); + columnNodes.add(columnNode); + } + } - private boolean compareSelectExprAndGroupByExpr(SQLExpr selectItemExpr, String alias, SQLExpr groupByExpr) { - if (groupByExpr.equals(selectItemExpr)) { + private List extractSelectItemNames(List selectItems) { + List selectItemNames = new ArrayList<>(); + for (SQLSelectItem selectItem : selectItems) { + selectItemNames.add(nameOfSelectItem(selectItem)); + } + return selectItemNames; + } + + private void rewriteFunctionNameToLowerCase(MySqlSelectQueryBlock query) { + query.accept( + new MySqlASTVisitorAdapter() { + @Override + public boolean visit(SQLMethodInvokeExpr x) { + x.setMethodName(x.getMethodName().toLowerCase()); return true; - } else if (groupByExpr instanceof SQLIdentifierExpr - && ((SQLIdentifierExpr) groupByExpr).getName().equalsIgnoreCase(alias)) { - return true; - } - return false; + } + }); + } + + private String nameOfSelectItem(SQLSelectItem selectItem) { + return Strings.isNullOrEmpty(selectItem.getAlias()) + ? Context.nameOfExpr(selectItem.getExpr()) + : selectItem.getAlias(); + } + + @RequiredArgsConstructor + public static class Context { + private final AliasGenerator aliasGenerator = new AliasGenerator(); + + private final Map selectSQLExprAliasMap; + + @Getter private final Map groupKeyExprMap = new LinkedHashMap<>(); + @Getter private final Map aggregationExprMap = new LinkedHashMap<>(); + + Optional resolve(SQLExpr expr) { + if (groupKeyExprMap.containsKey(expr)) { + return Optional.of(groupKeyExprMap.get(expr).getExpression()); + } else if (aggregationExprMap.containsKey(expr)) { + return Optional.of(aggregationExprMap.get(expr).getExpression()); + } else { + return Optional.empty(); + } } - private void findAllAggregationExprFromSelect(MySqlSelectQueryBlock queryBlock) { - queryBlock.getSelectList().forEach(selectItem -> selectItem.accept(new MySqlASTVisitorAdapter() { - @Override - public boolean visit(SQLAggregateExpr expr) { - context.addAggregationExpr(expr); - return true; - } - })); + public void addGroupKeyExpr(SQLExpr groupKeyExpr) { + if (!groupKeyExprMap.containsKey(groupKeyExpr)) { + groupKeyExprMap.put(groupKeyExpr, new GroupKeyExpr(groupKeyExpr)); + } } - private void parseExprInSelectList( - MySqlSelectQueryBlock queryBlock, List selectItemNames, - SQLExprToExpressionConverter exprConverter) { - List selectItems = queryBlock.getSelectList(); - for (int i = 0; i < selectItems.size(); i++) { - Expression expression = exprConverter.convert(selectItems.get(i).getExpr()); - ColumnNode columnNode = ColumnNode.builder() - .name(selectItemNames.get(i)) - .alias(selectItems.get(i).getAlias()) - .type(columnTypeProvider.get(i)) - .expr(expression) - .build(); - columnNodes.add(columnNode); - } + public void addAggregationExpr(SQLAggregateExpr aggregationExpr) { + if (!aggregationExprMap.containsKey(aggregationExpr)) { + aggregationExprMap.put(aggregationExpr, new AggregationExpr(aggregationExpr)); + } } - private List extractSelectItemNames(List selectItems) { - List selectItemNames = new ArrayList<>(); - for (SQLSelectItem selectItem: selectItems){ - selectItemNames.add(nameOfSelectItem(selectItem)); + @Getter + public class GroupKeyExpr { + private final SQLExpr expr; + private final Expression expression; + + public GroupKeyExpr(SQLExpr expr) { + this.expr = expr; + String exprName = nameOfExpr(expr).replace(".", "#"); + if (expr instanceof SQLIdentifierExpr + && selectSQLExprAliasMap.values().contains(((SQLIdentifierExpr) expr).getName())) { + exprName = ((SQLIdentifierExpr) expr).getName(); } - return selectItemNames; + this.expression = ExpressionFactory.ref(selectSQLExprAliasMap.getOrDefault(expr, exprName)); + } } - private void rewriteFunctionNameToLowerCase(MySqlSelectQueryBlock query) { - query.accept(new MySqlASTVisitorAdapter() { - @Override - public boolean visit(SQLMethodInvokeExpr x) { - x.setMethodName(x.getMethodName().toLowerCase()); - return true; - } - }); + @Getter + public class AggregationExpr { + private final SQLAggregateExpr expr; + private final Expression expression; + + public AggregationExpr(SQLAggregateExpr expr) { + this.expr = expr; + this.expression = + ExpressionFactory.ref( + selectSQLExprAliasMap.getOrDefault( + expr, aliasGenerator.nextAlias(expr.getMethodName()))); + } } - private String nameOfSelectItem(SQLSelectItem selectItem) { - return Strings.isNullOrEmpty(selectItem.getAlias()) ? Context - .nameOfExpr(selectItem.getExpr()) : selectItem.getAlias(); + public static String nameOfExpr(SQLExpr expr) { + String exprName = expr.toString().toLowerCase(); + if (expr instanceof SQLAggregateExpr) { + SQLAggregateExpr aggExpr = (SQLAggregateExpr) expr; + SQLAggregateOption option = aggExpr.getOption(); + exprName = + option == null + ? String.format("%s(%s)", aggExpr.getMethodName(), aggExpr.getArguments().get(0)) + : String.format( + "%s(%s %s)", + aggExpr.getMethodName(), option.name(), aggExpr.getArguments().get(0)); + } else if (expr instanceof SQLMethodInvokeExpr) { + exprName = + String.format( + "%s(%s)", + ((SQLMethodInvokeExpr) expr).getMethodName(), + nameOfExpr(((SQLMethodInvokeExpr) expr).getParameters().get(0))); + } else if (expr instanceof SQLIdentifierExpr) { + exprName = ((SQLIdentifierExpr) expr).getName(); + } else if (expr instanceof SQLCastExpr) { + exprName = + String.format( + "CAST(%s AS %s)", + ((SQLCastExpr) expr).getExpr(), ((SQLCastExpr) expr).getDataType().getName()); + } + return exprName; } - @RequiredArgsConstructor - public static class Context { - private final AliasGenerator aliasGenerator = new AliasGenerator(); - - private final Map selectSQLExprAliasMap; - - @Getter - private final Map groupKeyExprMap = new LinkedHashMap<>(); - @Getter - private final Map aggregationExprMap = new LinkedHashMap<>(); - - Optional resolve(SQLExpr expr) { - if (groupKeyExprMap.containsKey(expr)) { - return Optional.of(groupKeyExprMap.get(expr).getExpression()); - } else if (aggregationExprMap.containsKey(expr)) { - return Optional.of(aggregationExprMap.get(expr).getExpression()); - } else { - return Optional.empty(); - } - } + static class AliasGenerator { + private int aliasSuffix = 0; - public void addGroupKeyExpr(SQLExpr groupKeyExpr) { - if (!groupKeyExprMap.containsKey(groupKeyExpr)) { - groupKeyExprMap.put(groupKeyExpr, new GroupKeyExpr(groupKeyExpr)); - } - } - - public void addAggregationExpr(SQLAggregateExpr aggregationExpr) { - if (!aggregationExprMap.containsKey(aggregationExpr)) { - aggregationExprMap.put(aggregationExpr, new AggregationExpr(aggregationExpr)); - } - } + private String nextAlias(String name) { + return String.format("%s_%d", name, next()); + } - @Getter - public class GroupKeyExpr { - private final SQLExpr expr; - private final Expression expression; - - public GroupKeyExpr(SQLExpr expr) { - this.expr = expr; - String exprName = nameOfExpr(expr).replace(".", "#"); - if (expr instanceof SQLIdentifierExpr - && selectSQLExprAliasMap.values().contains(((SQLIdentifierExpr) expr).getName())) { - exprName = ((SQLIdentifierExpr) expr).getName(); - } - this.expression = ExpressionFactory.ref(selectSQLExprAliasMap.getOrDefault(expr, exprName)); - } - } - - @Getter - public class AggregationExpr { - private final SQLAggregateExpr expr; - private final Expression expression; - - public AggregationExpr(SQLAggregateExpr expr) { - this.expr = expr; - this.expression = - ExpressionFactory.ref(selectSQLExprAliasMap.getOrDefault(expr, aliasGenerator - .nextAlias(expr.getMethodName()))); - } - } - - public static String nameOfExpr(SQLExpr expr) { - String exprName = expr.toString().toLowerCase(); - if (expr instanceof SQLAggregateExpr) { - SQLAggregateExpr aggExpr = (SQLAggregateExpr) expr; - SQLAggregateOption option = aggExpr.getOption(); - exprName = option == null - ? String.format("%s(%s)", aggExpr.getMethodName(), aggExpr.getArguments().get(0)) - : String.format("%s(%s %s)", aggExpr.getMethodName(), option.name(), - aggExpr.getArguments().get(0)); - } else if (expr instanceof SQLMethodInvokeExpr) { - exprName = String.format("%s(%s)", ((SQLMethodInvokeExpr) expr).getMethodName(), - nameOfExpr(((SQLMethodInvokeExpr) expr).getParameters().get(0))); - } else if (expr instanceof SQLIdentifierExpr) { - exprName = ((SQLIdentifierExpr) expr).getName(); - } else if (expr instanceof SQLCastExpr) { - exprName = String.format("CAST(%s AS %s)", ((SQLCastExpr) expr).getExpr(), - ((SQLCastExpr) expr).getDataType().getName()); - } - return exprName; - } - - static class AliasGenerator { - private int aliasSuffix = 0; - - private String nextAlias(String name) { - return String.format("%s_%d", name, next()); - } - - private Integer next() { - return aliasSuffix++; - } - } + private Integer next() { + return aliasSuffix++; + } } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/converter/SQLExprToExpressionConverter.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/converter/SQLExprToExpressionConverter.java index 0315fef900..800dac8426 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/converter/SQLExprToExpressionConverter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/converter/SQLExprToExpressionConverter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.converter; import static org.opensearch.sql.legacy.expression.core.ExpressionFactory.cast; @@ -27,86 +26,86 @@ import org.opensearch.sql.legacy.expression.core.operator.ScalarOperation; import org.opensearch.sql.legacy.expression.model.ExprValueFactory; -/** - * The definition of {@link SQLExpr} to {@link Expression} converter. - */ +/** The definition of {@link SQLExpr} to {@link Expression} converter. */ @RequiredArgsConstructor public class SQLExprToExpressionConverter { - private static final Map binaryOperatorOperationMap = - new ImmutableMap.Builder() - .put(SQLBinaryOperator.Add, ScalarOperation.ADD) - .put(SQLBinaryOperator.Subtract, ScalarOperation.SUBTRACT) - .put(SQLBinaryOperator.Multiply, ScalarOperation.MULTIPLY) - .put(SQLBinaryOperator.Divide, ScalarOperation.DIVIDE) - .put(SQLBinaryOperator.Modulus, ScalarOperation.MODULES) - .build(); - private static final Map methodOperationMap = - new ImmutableMap.Builder() - .put(ScalarOperation.ABS.getName(), ScalarOperation.ABS) - .put(ScalarOperation.ACOS.getName(), ScalarOperation.ACOS) - .put(ScalarOperation.ASIN.getName(), ScalarOperation.ASIN) - .put(ScalarOperation.ATAN.getName(), ScalarOperation.ATAN) - .put(ScalarOperation.ATAN2.getName(), ScalarOperation.ATAN2) - .put(ScalarOperation.TAN.getName(), ScalarOperation.TAN) - .put(ScalarOperation.CBRT.getName(), ScalarOperation.CBRT) - .put(ScalarOperation.CEIL.getName(), ScalarOperation.CEIL) - .put(ScalarOperation.COS.getName(), ScalarOperation.COS) - .put(ScalarOperation.COSH.getName(), ScalarOperation.COSH) - .put(ScalarOperation.EXP.getName(), ScalarOperation.EXP) - .put(ScalarOperation.FLOOR.getName(), ScalarOperation.FLOOR) - .put(ScalarOperation.LN.getName(), ScalarOperation.LN) - .put(ScalarOperation.LOG.getName(), ScalarOperation.LOG) - .put(ScalarOperation.LOG2.getName(), ScalarOperation.LOG2) - .put(ScalarOperation.LOG10.getName(), ScalarOperation.LOG10) - .build(); - + private static final Map binaryOperatorOperationMap = + new ImmutableMap.Builder() + .put(SQLBinaryOperator.Add, ScalarOperation.ADD) + .put(SQLBinaryOperator.Subtract, ScalarOperation.SUBTRACT) + .put(SQLBinaryOperator.Multiply, ScalarOperation.MULTIPLY) + .put(SQLBinaryOperator.Divide, ScalarOperation.DIVIDE) + .put(SQLBinaryOperator.Modulus, ScalarOperation.MODULES) + .build(); + private static final Map methodOperationMap = + new ImmutableMap.Builder() + .put(ScalarOperation.ABS.getName(), ScalarOperation.ABS) + .put(ScalarOperation.ACOS.getName(), ScalarOperation.ACOS) + .put(ScalarOperation.ASIN.getName(), ScalarOperation.ASIN) + .put(ScalarOperation.ATAN.getName(), ScalarOperation.ATAN) + .put(ScalarOperation.ATAN2.getName(), ScalarOperation.ATAN2) + .put(ScalarOperation.TAN.getName(), ScalarOperation.TAN) + .put(ScalarOperation.CBRT.getName(), ScalarOperation.CBRT) + .put(ScalarOperation.CEIL.getName(), ScalarOperation.CEIL) + .put(ScalarOperation.COS.getName(), ScalarOperation.COS) + .put(ScalarOperation.COSH.getName(), ScalarOperation.COSH) + .put(ScalarOperation.EXP.getName(), ScalarOperation.EXP) + .put(ScalarOperation.FLOOR.getName(), ScalarOperation.FLOOR) + .put(ScalarOperation.LN.getName(), ScalarOperation.LN) + .put(ScalarOperation.LOG.getName(), ScalarOperation.LOG) + .put(ScalarOperation.LOG2.getName(), ScalarOperation.LOG2) + .put(ScalarOperation.LOG10.getName(), ScalarOperation.LOG10) + .build(); - private final SQLAggregationParser.Context context; + private final SQLAggregationParser.Context context; - /** - * Convert the {@link SQLExpr} to {@link Expression} - * - * @param expr {@link SQLExpr} - * @return expression {@link Expression} - */ - public Expression convert(SQLExpr expr) { - Optional resolvedExpression = context.resolve(expr); - if (resolvedExpression.isPresent()) { - return resolvedExpression.get(); - } else { - if (expr instanceof SQLBinaryOpExpr) { - return binaryOperatorToExpression((SQLBinaryOpExpr) expr, this::convert); - } else if (expr instanceof SQLMethodInvokeExpr) { - return methodToExpression((SQLMethodInvokeExpr) expr, this::convert); - } else if (expr instanceof SQLValuableExpr) { - return literal(ExprValueFactory.from(((SQLValuableExpr) expr).getValue())); - } else if (expr instanceof SQLCastExpr) { - return cast(convert(((SQLCastExpr) expr).getExpr())); - } else { - throw new RuntimeException("unsupported expr: " + expr); - } - } + /** + * Convert the {@link SQLExpr} to {@link Expression} + * + * @param expr {@link SQLExpr} + * @return expression {@link Expression} + */ + public Expression convert(SQLExpr expr) { + Optional resolvedExpression = context.resolve(expr); + if (resolvedExpression.isPresent()) { + return resolvedExpression.get(); + } else { + if (expr instanceof SQLBinaryOpExpr) { + return binaryOperatorToExpression((SQLBinaryOpExpr) expr, this::convert); + } else if (expr instanceof SQLMethodInvokeExpr) { + return methodToExpression((SQLMethodInvokeExpr) expr, this::convert); + } else if (expr instanceof SQLValuableExpr) { + return literal(ExprValueFactory.from(((SQLValuableExpr) expr).getValue())); + } else if (expr instanceof SQLCastExpr) { + return cast(convert(((SQLCastExpr) expr).getExpr())); + } else { + throw new RuntimeException("unsupported expr: " + expr); + } } + } - private Expression binaryOperatorToExpression(SQLBinaryOpExpr expr, - Function converter) { - if (binaryOperatorOperationMap.containsKey(expr.getOperator())) { - return ExpressionFactory.of(binaryOperatorOperationMap.get(expr.getOperator()), - Arrays.asList(converter.apply(expr.getLeft()), - converter.apply(expr.getRight()))); - } else { - throw new UnsupportedOperationException("unsupported operator: " + expr.getOperator().getName()); - } + private Expression binaryOperatorToExpression( + SQLBinaryOpExpr expr, Function converter) { + if (binaryOperatorOperationMap.containsKey(expr.getOperator())) { + return ExpressionFactory.of( + binaryOperatorOperationMap.get(expr.getOperator()), + Arrays.asList(converter.apply(expr.getLeft()), converter.apply(expr.getRight()))); + } else { + throw new UnsupportedOperationException( + "unsupported operator: " + expr.getOperator().getName()); } + } - private Expression methodToExpression(SQLMethodInvokeExpr expr, Function converter) { - String methodName = expr.getMethodName().toLowerCase(); - if (methodOperationMap.containsKey(methodName)) { + private Expression methodToExpression( + SQLMethodInvokeExpr expr, Function converter) { + String methodName = expr.getMethodName().toLowerCase(); + if (methodOperationMap.containsKey(methodName)) { - return ExpressionFactory.of(methodOperationMap.get(methodName), - expr.getParameters().stream().map(converter).collect(Collectors.toList())); - } else { - throw new UnsupportedOperationException("unsupported operator: " + expr.getMethodName()); - } + return ExpressionFactory.of( + methodOperationMap.get(methodName), + expr.getParameters().stream().map(converter).collect(Collectors.toList())); + } else { + throw new UnsupportedOperationException("unsupported operator: " + expr.getMethodName()); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/converter/SQLToOperatorConverter.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/converter/SQLToOperatorConverter.java index fbaff0ba18..4d1ab58160 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/converter/SQLToOperatorConverter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/converter/SQLToOperatorConverter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.converter; import com.alibaba.druid.sql.dialect.mysql.ast.statement.MySqlSelectQueryBlock; @@ -24,53 +23,49 @@ import org.opensearch.sql.legacy.query.planner.physical.node.project.PhysicalProject; import org.opensearch.sql.legacy.query.planner.physical.node.scroll.PhysicalScroll; -/** - * Definition of SQL to PhysicalOperator converter. - */ +/** Definition of SQL to PhysicalOperator converter. */ public class SQLToOperatorConverter extends MySqlASTVisitorAdapter { - private static final Logger LOG = LogManager.getLogger(SQLToOperatorConverter.class); - - private final Client client; - private final SQLAggregationParser aggregationParser; + private static final Logger LOG = LogManager.getLogger(SQLToOperatorConverter.class); - @Getter - private PhysicalOperator physicalOperator; + private final Client client; + private final SQLAggregationParser aggregationParser; - public SQLToOperatorConverter(Client client, ColumnTypeProvider columnTypeProvider) { - this.client = client; - this.aggregationParser = new SQLAggregationParser(columnTypeProvider); - } + @Getter private PhysicalOperator physicalOperator; - @Override - public boolean visit(MySqlSelectQueryBlock query) { + public SQLToOperatorConverter(Client client, ColumnTypeProvider columnTypeProvider) { + this.client = client; + this.aggregationParser = new SQLAggregationParser(columnTypeProvider); + } - //1. parse the aggregation - aggregationParser.parse(query); + @Override + public boolean visit(MySqlSelectQueryBlock query) { + // 1. parse the aggregation + aggregationParser.parse(query); - //2. construct the PhysicalOperator - physicalOperator = project(scroll(query)); - return false; - } + // 2. construct the PhysicalOperator + physicalOperator = project(scroll(query)); + return false; + } - /** - * Get list of {@link ColumnNode}. - * - * @return list of {@link ColumnNode}. - */ - public List getColumnNodes() { - return aggregationParser.getColumnNodes(); - } + /** + * Get list of {@link ColumnNode}. + * + * @return list of {@link ColumnNode}. + */ + public List getColumnNodes() { + return aggregationParser.getColumnNodes(); + } - private PhysicalOperator project(PhysicalOperator input) { - return new PhysicalProject(input, aggregationParser.getColumnNodes()); - } + private PhysicalOperator project(PhysicalOperator input) { + return new PhysicalProject(input, aggregationParser.getColumnNodes()); + } - @SneakyThrows - private PhysicalOperator scroll(MySqlSelectQueryBlock query) { - query.getSelectList().clear(); - query.getSelectList().addAll(aggregationParser.selectItemList()); - Select select = new SqlParser().parseSelect(query); - return new PhysicalScroll(new AggregationQueryAction(client, select)); - } + @SneakyThrows + private PhysicalOperator scroll(MySqlSelectQueryBlock query) { + query.getSelectList().clear(); + query.getSelectList().addAll(aggregationParser.selectItemList()); + Select select = new SqlParser().parseSelect(query); + return new PhysicalScroll(new AggregationQueryAction(client, select)); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/BindingTupleQueryPlanner.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/BindingTupleQueryPlanner.java index 01a0e78484..a8fb7cc53c 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/BindingTupleQueryPlanner.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/BindingTupleQueryPlanner.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.core; import com.alibaba.druid.sql.ast.expr.SQLQueryExpr; @@ -17,63 +16,63 @@ import org.opensearch.sql.legacy.query.planner.physical.PhysicalOperator; import org.opensearch.sql.legacy.query.planner.physical.node.scroll.PhysicalScroll; -/** - * The definition of QueryPlanner which return the {@link BindingTuple} as result. - */ +/** The definition of QueryPlanner which return the {@link BindingTuple} as result. */ public class BindingTupleQueryPlanner { - private PhysicalOperator physicalOperator; - @Getter - private List columnNodes; - - public BindingTupleQueryPlanner(Client client, SQLQueryExpr sqlExpr, ColumnTypeProvider columnTypeProvider) { - SQLToOperatorConverter converter = new SQLToOperatorConverter(client, columnTypeProvider); - sqlExpr.accept(converter); - this.physicalOperator = converter.getPhysicalOperator(); - this.columnNodes = converter.getColumnNodes(); - } + private PhysicalOperator physicalOperator; + @Getter private List columnNodes; - /** - * Execute the QueryPlanner. - * @return list of {@link BindingTuple}. - */ - public List execute() { - PhysicalOperator op = physicalOperator; - List tuples = new ArrayList<>(); - try { - op.open(null); - } catch (Exception e) { - throw new RuntimeException(e); - } + public BindingTupleQueryPlanner( + Client client, SQLQueryExpr sqlExpr, ColumnTypeProvider columnTypeProvider) { + SQLToOperatorConverter converter = new SQLToOperatorConverter(client, columnTypeProvider); + sqlExpr.accept(converter); + this.physicalOperator = converter.getPhysicalOperator(); + this.columnNodes = converter.getColumnNodes(); + } - while (op.hasNext()) { - tuples.add(op.next().data()); - } - return tuples; + /** + * Execute the QueryPlanner. + * + * @return list of {@link BindingTuple}. + */ + public List execute() { + PhysicalOperator op = physicalOperator; + List tuples = new ArrayList<>(); + try { + op.open(null); + } catch (Exception e) { + throw new RuntimeException(e); } - /** - * Explain the physical execution plan. - * @return execution plan. - */ - public String explain() { - Explanation explanation = new Explanation(); - physicalOperator.accept(explanation); - return explanation.explain(); + while (op.hasNext()) { + tuples.add(op.next().data()); } + return tuples; + } + + /** + * Explain the physical execution plan. + * + * @return execution plan. + */ + public String explain() { + Explanation explanation = new Explanation(); + physicalOperator.accept(explanation); + return explanation.explain(); + } - private static class Explanation implements PlanNode.Visitor { - private String explain; + private static class Explanation implements PlanNode.Visitor { + private String explain; - public String explain() { - return explain; - } + public String explain() { + return explain; + } - @Override - public boolean visit(PlanNode planNode) { - if (planNode instanceof PhysicalScroll) { - explain = planNode.toString(); - } - return true; - } + @Override + public boolean visit(PlanNode planNode) { + if (planNode instanceof PhysicalScroll) { + explain = planNode.toString(); + } + return true; } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/ColumnNode.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/ColumnNode.java index 753d5ac001..9dd969fb83 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/ColumnNode.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/ColumnNode.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.core; import com.google.common.base.Strings; @@ -14,20 +13,18 @@ import org.opensearch.sql.legacy.executor.format.Schema; import org.opensearch.sql.legacy.expression.core.Expression; -/** - * The definition of column node. - */ +/** The definition of column node. */ @Builder @Setter @Getter @ToString public class ColumnNode { - private String name; - private String alias; - private Schema.Type type; - private Expression expr; + private String name; + private String alias; + private Schema.Type type; + private Expression expr; - public String columnName() { - return Strings.isNullOrEmpty(alias) ? name : alias; - } + public String columnName() { + return Strings.isNullOrEmpty(alias) ? name : alias; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/Config.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/Config.java index 6e04c674cb..304a16756b 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/Config.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/Config.java @@ -3,156 +3,134 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.core; import org.opensearch.sql.legacy.query.planner.resource.blocksize.AdaptiveBlockSize; import org.opensearch.sql.legacy.query.planner.resource.blocksize.BlockSize; import org.opensearch.sql.legacy.query.planner.resource.blocksize.BlockSize.FixedBlockSize; -/** - * Query planner configuration - */ +/** Query planner configuration */ public class Config { - public static final int DEFAULT_BLOCK_SIZE = 10000; - public static final int DEFAULT_SCROLL_PAGE_SIZE = 10000; - public static final int DEFAULT_CIRCUIT_BREAK_LIMIT = 85; - public static final double[] DEFAULT_BACK_OFF_RETRY_INTERVALS = {4, 8 + 4, 16 + 4}; - public static final int DEFAULT_TIME_OUT = 60; - - /** - * Block size for join algorithm - */ - private BlockSize blockSize = new FixedBlockSize(DEFAULT_BLOCK_SIZE); - - /** - * Page size for scroll on each index - */ - private Integer[] scrollPageSizes = {DEFAULT_SCROLL_PAGE_SIZE, DEFAULT_SCROLL_PAGE_SIZE}; - - /** - * Circuit breaker trigger limit (percentage) - */ - private Integer circuitBreakLimit = DEFAULT_CIRCUIT_BREAK_LIMIT; - - /** - * Intervals for back off retry - */ - private double[] backOffRetryIntervals = DEFAULT_BACK_OFF_RETRY_INTERVALS; - - /** - * Total number of rows in final result specified by LIMIT - */ - private int totalLimit; - - /** - * Number of rows fetched from each table specified by JOIN_TABLES_LIMIT hint - */ - private int tableLimit1; - private int tableLimit2; - - /** - * Push down column values in ON of first table to query against second table - */ - private boolean isUseTermsFilterOptimization = false; - - /** - * Total time out (seconds) for the execution - */ - private int timeout = DEFAULT_TIME_OUT; - - - public BlockSize blockSize() { - return blockSize; - } + public static final int DEFAULT_BLOCK_SIZE = 10000; + public static final int DEFAULT_SCROLL_PAGE_SIZE = 10000; + public static final int DEFAULT_CIRCUIT_BREAK_LIMIT = 85; + public static final double[] DEFAULT_BACK_OFF_RETRY_INTERVALS = {4, 8 + 4, 16 + 4}; + public static final int DEFAULT_TIME_OUT = 60; - public void configureBlockSize(Object[] params) { - if (params.length > 0) { - Integer size = (Integer) params[0]; - if (size > 0) { - blockSize = new FixedBlockSize(size); - } else { - blockSize = new AdaptiveBlockSize(0); - } - } - } + /** Block size for join algorithm */ + private BlockSize blockSize = new FixedBlockSize(DEFAULT_BLOCK_SIZE); - public Integer[] scrollPageSize() { - return scrollPageSizes; - } + /** Page size for scroll on each index */ + private Integer[] scrollPageSizes = {DEFAULT_SCROLL_PAGE_SIZE, DEFAULT_SCROLL_PAGE_SIZE}; - public void configureScrollPageSize(Object[] params) { - if (params.length == 1) { - scrollPageSizes = new Integer[]{ - (Integer) params[0], - (Integer) params[0] - }; - } else if (params.length >= 2) { - scrollPageSizes = (Integer[]) params; - } - } + /** Circuit breaker trigger limit (percentage) */ + private Integer circuitBreakLimit = DEFAULT_CIRCUIT_BREAK_LIMIT; - public int circuitBreakLimit() { - return circuitBreakLimit; - } + /** Intervals for back off retry */ + private double[] backOffRetryIntervals = DEFAULT_BACK_OFF_RETRY_INTERVALS; - public void configureCircuitBreakLimit(Object[] params) { - if (params.length > 0) { - circuitBreakLimit = (Integer) params[0]; - } - } + /** Total number of rows in final result specified by LIMIT */ + private int totalLimit; - public double[] backOffRetryIntervals() { - return backOffRetryIntervals; - } + /** Number of rows fetched from each table specified by JOIN_TABLES_LIMIT hint */ + private int tableLimit1; - public void configureBackOffRetryIntervals(Object[] params) { - backOffRetryIntervals = new double[params.length]; - for (int i = 0; i < params.length; i++) { - backOffRetryIntervals[i] = (Integer) params[i]; //Only support integer interval for now - } - } + private int tableLimit2; - public void configureLimit(Integer totalLimit, Integer tableLimit1, Integer tableLimit2) { - if (totalLimit != null) { - this.totalLimit = totalLimit; - } - if (tableLimit1 != null) { - this.tableLimit1 = tableLimit1; - } - if (tableLimit2 != null) { - this.tableLimit2 = tableLimit2; - } - } + /** Push down column values in ON of first table to query against second table */ + private boolean isUseTermsFilterOptimization = false; - public int totalLimit() { - return totalLimit; - } + /** Total time out (seconds) for the execution */ + private int timeout = DEFAULT_TIME_OUT; - public int tableLimit1() { - return tableLimit1; + public BlockSize blockSize() { + return blockSize; + } + + public void configureBlockSize(Object[] params) { + if (params.length > 0) { + Integer size = (Integer) params[0]; + if (size > 0) { + blockSize = new FixedBlockSize(size); + } else { + blockSize = new AdaptiveBlockSize(0); + } } + } + + public Integer[] scrollPageSize() { + return scrollPageSizes; + } - public int tableLimit2() { - return tableLimit2; + public void configureScrollPageSize(Object[] params) { + if (params.length == 1) { + scrollPageSizes = new Integer[] {(Integer) params[0], (Integer) params[0]}; + } else if (params.length >= 2) { + scrollPageSizes = (Integer[]) params; } + } + + public int circuitBreakLimit() { + return circuitBreakLimit; + } - public void configureTermsFilterOptimization(boolean isUseTermFiltersOptimization) { - this.isUseTermsFilterOptimization = isUseTermFiltersOptimization; + public void configureCircuitBreakLimit(Object[] params) { + if (params.length > 0) { + circuitBreakLimit = (Integer) params[0]; } + } - public boolean isUseTermsFilterOptimization() { - return isUseTermsFilterOptimization; + public double[] backOffRetryIntervals() { + return backOffRetryIntervals; + } + + public void configureBackOffRetryIntervals(Object[] params) { + backOffRetryIntervals = new double[params.length]; + for (int i = 0; i < params.length; i++) { + backOffRetryIntervals[i] = (Integer) params[i]; // Only support integer interval for now } + } - public void configureTimeOut(Object[] params) { - if (params.length > 0) { - timeout = (Integer) params[0]; - } + public void configureLimit(Integer totalLimit, Integer tableLimit1, Integer tableLimit2) { + if (totalLimit != null) { + this.totalLimit = totalLimit; + } + if (tableLimit1 != null) { + this.tableLimit1 = tableLimit1; + } + if (tableLimit2 != null) { + this.tableLimit2 = tableLimit2; } + } - public int timeout() { - return timeout; + public int totalLimit() { + return totalLimit; + } + + public int tableLimit1() { + return tableLimit1; + } + + public int tableLimit2() { + return tableLimit2; + } + + public void configureTermsFilterOptimization(boolean isUseTermFiltersOptimization) { + this.isUseTermsFilterOptimization = isUseTermFiltersOptimization; + } + + public boolean isUseTermsFilterOptimization() { + return isUseTermsFilterOptimization; + } + + public void configureTimeOut(Object[] params) { + if (params.length > 0) { + timeout = (Integer) params[0]; } + } + + public int timeout() { + return timeout; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/ExecuteParams.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/ExecuteParams.java index dcb3c3b727..c5ed48a514 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/ExecuteParams.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/ExecuteParams.java @@ -3,32 +3,29 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.core; import java.util.EnumMap; -/** - * Parameters needed for physical operator execution. - */ +/** Parameters needed for physical operator execution. */ public class ExecuteParams { - /** - * Mapping from type to parameters - */ - private EnumMap params = new EnumMap<>(ExecuteParamType.class); - - public void add(ExecuteParamType type, T param) { - params.put(type, param); - } + /** Mapping from type to parameters */ + private EnumMap params = new EnumMap<>(ExecuteParamType.class); - @SuppressWarnings("unchecked") - public T get(ExecuteParamType type) { - return (T) params.get(type); - } + public void add(ExecuteParamType type, T param) { + params.put(type, param); + } - public enum ExecuteParamType { - CLIENT, RESOURCE_MANAGER, EXTRA_QUERY_FILTER, TIMEOUT - } + @SuppressWarnings("unchecked") + public T get(ExecuteParamType type) { + return (T) params.get(type); + } + public enum ExecuteParamType { + CLIENT, + RESOURCE_MANAGER, + EXTRA_QUERY_FILTER, + TIMEOUT + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/Plan.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/Plan.java index f163e61f0e..328bb9451f 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/Plan.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/Plan.java @@ -3,26 +3,20 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.core; import org.opensearch.sql.legacy.query.planner.core.PlanNode.Visitor; -/** - * Query plan - */ +/** Query plan */ public interface Plan { - /** - * Explain current query plan by visitor - * - * @param explanation visitor to explain the plan - */ - void traverse(Visitor explanation); - - /** - * Optimize current query plan to get the optimal one - */ - void optimize(); + /** + * Explain current query plan by visitor + * + * @param explanation visitor to explain the plan + */ + void traverse(Visitor explanation); + /** Optimize current query plan to get the optimal one */ + void optimize(); } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/PlanNode.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/PlanNode.java index ad421f82a4..b30ec9d3d9 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/PlanNode.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/PlanNode.java @@ -3,54 +3,47 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.core; -/** - * Abstract plan node in query plan. - */ +/** Abstract plan node in query plan. */ public interface PlanNode { - /** - * All child nodes of current node used for traversal. - * - * @return all children - */ - PlanNode[] children(); + /** + * All child nodes of current node used for traversal. + * + * @return all children + */ + PlanNode[] children(); + + /** + * Accept a visitor and traverse the plan tree with it. + * + * @param visitor plan node visitor + */ + default void accept(Visitor visitor) { + if (visitor.visit(this)) { + for (PlanNode node : children()) { + node.accept(visitor); + } + } + visitor.endVisit(this); + } + + /** Plan node visitor. */ + interface Visitor { /** - * Accept a visitor and traverse the plan tree with it. + * To avoid listing all subclasses of PlanNode here, we dispatch manually in concrete visitor. * - * @param visitor plan node visitor + * @param op plan node being visited */ - default void accept(Visitor visitor) { - if (visitor.visit(this)) { - for (PlanNode node : children()) { - node.accept(visitor); - } - } - visitor.endVisit(this); - } + boolean visit(PlanNode op); /** - * Plan node visitor. + * Re-visit current node before return to parent node + * + * @param op plan node finished visit */ - interface Visitor { - - /** - * To avoid listing all subclasses of PlanNode here, we dispatch manually in concrete visitor. - * - * @param op plan node being visited - */ - boolean visit(PlanNode op); - - /** - * Re-visit current node before return to parent node - * - * @param op plan node finished visit - */ - default void endVisit(PlanNode op) { - } - } - + default void endVisit(PlanNode op) {} + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/QueryParams.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/QueryParams.java index 2cb835da94..f5e2a3fcd6 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/QueryParams.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/QueryParams.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.core; import com.alibaba.druid.sql.ast.statement.SQLJoinTableSource; @@ -12,70 +11,63 @@ import org.opensearch.sql.legacy.domain.Field; import org.opensearch.sql.legacy.query.join.TableInJoinRequestBuilder; -/** - * All parameters required by QueryPlanner - */ +/** All parameters required by QueryPlanner */ public class QueryParams { - /** - * Request builder for first table - */ - private final TableInJoinRequestBuilder request1; - - /** - * Request builder for second table - */ - private final TableInJoinRequestBuilder request2; + /** Request builder for first table */ + private final TableInJoinRequestBuilder request1; - /** - * Join type, ex. inner join, left join - */ - private final SQLJoinTableSource.JoinType joinType; + /** Request builder for second table */ + private final TableInJoinRequestBuilder request2; - /** - * Join conditions in ON clause grouped by OR. - * For example, "ON (a.name = b.id AND a.age = b.age) OR a.location = b.address" - * => list: [ - * [ (a.name, b.id), (a.age, b.age) ], - * [ (a.location, b.address) ] - * ] - */ - private final List>> joinConditions; + /** Join type, ex. inner join, left join */ + private final SQLJoinTableSource.JoinType joinType; + /** + * Join conditions in ON clause grouped by OR. For example, "ON (a.name = b.id AND a.age = b.age) + * OR a.location = b.address" => list: [ [ (a.name, b.id), (a.age, b.age) ], [ (a.location, + * b.address) ] ] + */ + private final List>> joinConditions; - public QueryParams(TableInJoinRequestBuilder request1, - TableInJoinRequestBuilder request2, - SQLJoinTableSource.JoinType joinType, - List>> t1ToT2FieldsComparison) { - this.request1 = request1; - this.request2 = request2; - this.joinType = joinType; - this.joinConditions = t1ToT2FieldsComparison; - } + public QueryParams( + TableInJoinRequestBuilder request1, + TableInJoinRequestBuilder request2, + SQLJoinTableSource.JoinType joinType, + List>> t1ToT2FieldsComparison) { + this.request1 = request1; + this.request2 = request2; + this.joinType = joinType; + this.joinConditions = t1ToT2FieldsComparison; + } - public TableInJoinRequestBuilder firstRequest() { - return request1; - } + public TableInJoinRequestBuilder firstRequest() { + return request1; + } - public TableInJoinRequestBuilder secondRequest() { - return request2; - } + public TableInJoinRequestBuilder secondRequest() { + return request2; + } - public SQLJoinTableSource.JoinType joinType() { - return joinType; - } + public SQLJoinTableSource.JoinType joinType() { + return joinType; + } - public List>> joinConditions() { - return joinConditions; - } + public List>> joinConditions() { + return joinConditions; + } - @Override - public String toString() { - return "QueryParams{" - + "request1=" + request1 - + ", request2=" + request2 - + ", joinType=" + joinType - + ", joinConditions=" + joinConditions - + '}'; - } + @Override + public String toString() { + return "QueryParams{" + + "request1=" + + request1 + + ", request2=" + + request2 + + ", joinType=" + + joinType + + ", joinConditions=" + + joinConditions + + '}'; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/QueryPlanner.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/QueryPlanner.java index 56acfa5d0c..0a1c2fd24b 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/QueryPlanner.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/core/QueryPlanner.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.core; import static org.opensearch.sql.legacy.query.planner.core.ExecuteParams.ExecuteParamType.CLIENT; @@ -21,89 +20,69 @@ import org.opensearch.sql.legacy.query.planner.resource.ResourceManager; import org.opensearch.sql.legacy.query.planner.resource.Stats; -/** - * Query planner that driver the logical planning, physical planning, execute and explain. - */ +/** Query planner that driver the logical planning, physical planning, execute and explain. */ public class QueryPlanner { - /** - * Connection to ElasticSearch - */ - private final Client client; - - /** - * Query plan configuration - */ - private final Config config; - - /** - * Optimized logical plan - */ - private final LogicalPlan logicalPlan; - - /** - * Best physical plan to execute - */ - private final PhysicalPlan physicalPlan; - - /** - * Statistics collector - */ - private Stats stats; - - /** - * Resource monitor and statistics manager - */ - private ResourceManager resourceMgr; - - - public QueryPlanner(Client client, Config config, QueryParams params) { - this.client = client; - this.config = config; - this.stats = new Stats(client); - this.resourceMgr = new ResourceManager(stats, config); - - logicalPlan = new LogicalPlan(config, params); - logicalPlan.optimize(); - - physicalPlan = new PhysicalPlan(logicalPlan); - physicalPlan.optimize(); - } - - /** - * Execute query plan - * - * @return response of the execution - */ - public List execute() { - ExecuteParams params = new ExecuteParams(); - params.add(CLIENT, client); - params.add(TIMEOUT, config.timeout()); - params.add(RESOURCE_MANAGER, resourceMgr); - return physicalPlan.execute(params); - } - - /** - * Explain query plan - * - * @return explanation string of the plan - */ - public String explain() { - return new Explanation( - logicalPlan, physicalPlan, - new JsonExplanationFormat(4) - ).toString(); - } - - public MetaSearchResult getMetaResult() { - return resourceMgr.getMetaResult(); - } - - /** - * Setter for unit test - */ - public void setStats(Stats stats) { - this.stats = stats; - this.resourceMgr = new ResourceManager(stats, config); - } + /** Connection to ElasticSearch */ + private final Client client; + + /** Query plan configuration */ + private final Config config; + + /** Optimized logical plan */ + private final LogicalPlan logicalPlan; + + /** Best physical plan to execute */ + private final PhysicalPlan physicalPlan; + + /** Statistics collector */ + private Stats stats; + + /** Resource monitor and statistics manager */ + private ResourceManager resourceMgr; + + public QueryPlanner(Client client, Config config, QueryParams params) { + this.client = client; + this.config = config; + this.stats = new Stats(client); + this.resourceMgr = new ResourceManager(stats, config); + + logicalPlan = new LogicalPlan(config, params); + logicalPlan.optimize(); + + physicalPlan = new PhysicalPlan(logicalPlan); + physicalPlan.optimize(); + } + + /** + * Execute query plan + * + * @return response of the execution + */ + public List execute() { + ExecuteParams params = new ExecuteParams(); + params.add(CLIENT, client); + params.add(TIMEOUT, config.timeout()); + params.add(RESOURCE_MANAGER, resourceMgr); + return physicalPlan.execute(params); + } + + /** + * Explain query plan + * + * @return explanation string of the plan + */ + public String explain() { + return new Explanation(logicalPlan, physicalPlan, new JsonExplanationFormat(4)).toString(); + } + + public MetaSearchResult getMetaResult() { + return resourceMgr.getMetaResult(); + } + + /** Setter for unit test */ + public void setStats(Stats stats) { + this.stats = stats; + this.resourceMgr = new ResourceManager(stats, config); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/explain/Explanation.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/explain/Explanation.java index a22f2c5b7f..635ea3aace 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/explain/Explanation.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/explain/Explanation.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.explain; import com.google.common.collect.ImmutableMap; @@ -14,87 +13,71 @@ import org.opensearch.sql.legacy.query.planner.logical.node.Group; import org.opensearch.sql.legacy.query.planner.physical.PhysicalOperator; -/** - * Base class for different explanation implementation - */ +/** Base class for different explanation implementation */ public class Explanation implements Visitor { - /** - * Hard coding description to be consistent with old nested join explanation - */ - private static final String DESCRIPTION = - "Hash Join algorithm builds hash table based on result of first query, " - + "and then probes hash table to find matched rows for each row returned by second query"; - - /** - * Plans to be explained - */ - private final Plan logicalPlan; - private final Plan physicalPlan; - - /** - * Explanation format - */ - private final ExplanationFormat format; - - public Explanation(Plan logicalPlan, - Plan physicalPlan, - ExplanationFormat format) { - this.logicalPlan = logicalPlan; - this.physicalPlan = physicalPlan; - this.format = format; - } + /** Hard coding description to be consistent with old nested join explanation */ + private static final String DESCRIPTION = + "Hash Join algorithm builds hash table based on result of first query, " + + "and then probes hash table to find matched rows for each row returned by second query"; - @Override - public String toString() { - format.prepare(ImmutableMap.of("description", DESCRIPTION)); + /** Plans to be explained */ + private final Plan logicalPlan; - format.start("Logical Plan"); - logicalPlan.traverse(this); - format.end(); + private final Plan physicalPlan; - format.start("Physical Plan"); - physicalPlan.traverse(this); - format.end(); + /** Explanation format */ + private final ExplanationFormat format; - return format.toString(); - } + public Explanation(Plan logicalPlan, Plan physicalPlan, ExplanationFormat format) { + this.logicalPlan = logicalPlan; + this.physicalPlan = physicalPlan; + this.format = format; + } - @Override - public boolean visit(PlanNode node) { - if (isValidOp(node)) { - format.explain(node); - } - return true; - } + @Override + public String toString() { + format.prepare(ImmutableMap.of("description", DESCRIPTION)); - @Override - public void endVisit(PlanNode node) { - if (isValidOp(node)) { - format.end(); - } - } + format.start("Logical Plan"); + logicalPlan.traverse(this); + format.end(); - /** - * Check if node is a valid logical or physical operator - */ - private boolean isValidOp(PlanNode node) { - return isValidLogical(node) || isPhysical(node); - } + format.start("Physical Plan"); + physicalPlan.traverse(this); + format.end(); - /** - * Valid logical operator means it's Group OR NOT a no-op because Group clarify explanation - */ - private boolean isValidLogical(PlanNode node) { - return (node instanceof LogicalOperator) - && (node instanceof Group || !((LogicalOperator) node).isNoOp()); - } + return format.toString(); + } - /** - * Right now all physical operators are valid and non-no-op - */ - private boolean isPhysical(PlanNode node) { - return node instanceof PhysicalOperator; + @Override + public boolean visit(PlanNode node) { + if (isValidOp(node)) { + format.explain(node); } + return true; + } + @Override + public void endVisit(PlanNode node) { + if (isValidOp(node)) { + format.end(); + } + } + + /** Check if node is a valid logical or physical operator */ + private boolean isValidOp(PlanNode node) { + return isValidLogical(node) || isPhysical(node); + } + + /** Valid logical operator means it's Group OR NOT a no-op because Group clarify explanation */ + private boolean isValidLogical(PlanNode node) { + return (node instanceof LogicalOperator) + && (node instanceof Group || !((LogicalOperator) node).isNoOp()); + } + + /** Right now all physical operators are valid and non-no-op */ + private boolean isPhysical(PlanNode node) { + return node instanceof PhysicalOperator; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/explain/ExplanationFormat.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/explain/ExplanationFormat.java index 23c8bb76fe..a349666221 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/explain/ExplanationFormat.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/explain/ExplanationFormat.java @@ -3,42 +3,34 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.explain; import java.util.Map; -/** - * Explanation format - */ +/** Explanation format */ public interface ExplanationFormat { - /** - * Initialize internal data structure - * - * @param kvs key-value pairs - */ - void prepare(Map kvs); - - /** - * Start a new section in explanation. - * - * @param name section name - */ - void start(String name); - - - /** - * Explain and add to current section. - * - * @param object object to be added to explanation - */ - void explain(Object object); - - - /** - * End current section. - */ - void end(); - + /** + * Initialize internal data structure + * + * @param kvs key-value pairs + */ + void prepare(Map kvs); + + /** + * Start a new section in explanation. + * + * @param name section name + */ + void start(String name); + + /** + * Explain and add to current section. + * + * @param object object to be added to explanation + */ + void explain(Object object); + + /** End current section. */ + void end(); } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/explain/JsonExplanationFormat.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/explain/JsonExplanationFormat.java index 404205d30b..7bf4f833de 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/explain/JsonExplanationFormat.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/explain/JsonExplanationFormat.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.explain; import java.util.ArrayDeque; @@ -13,89 +12,80 @@ import org.json.JSONException; import org.json.JSONObject; -/** - * Explain query plan in JSON format. - */ +/** Explain query plan in JSON format. */ public class JsonExplanationFormat implements ExplanationFormat { - /** - * JSONObject stack to track the path from root to current ndoe - */ - private final Deque jsonObjStack = new ArrayDeque<>(); - - /** - * Indentation in final output string - */ - private final int indent; - - public JsonExplanationFormat(int indent) { - this.indent = indent; - } - - @Override - public void prepare(Map kvs) { - jsonObjStack.push(new JSONObject(kvs)); - } - - @Override - public void start(String name) { - JSONObject json = new JSONObject(); - jsonObjStack.peek().put(name, json); - jsonObjStack.push(json); - } - - @Override - public void explain(Object obj) { - JSONObject json = new JSONObject(obj); // JSONify using getter - jsonifyValueIfValidJson(json); - appendToArrayIfExist(nodeName(obj), json); - jsonObjStack.push(json); + /** JSONObject stack to track the path from root to current ndoe */ + private final Deque jsonObjStack = new ArrayDeque<>(); + + /** Indentation in final output string */ + private final int indent; + + public JsonExplanationFormat(int indent) { + this.indent = indent; + } + + @Override + public void prepare(Map kvs) { + jsonObjStack.push(new JSONObject(kvs)); + } + + @Override + public void start(String name) { + JSONObject json = new JSONObject(); + jsonObjStack.peek().put(name, json); + jsonObjStack.push(json); + } + + @Override + public void explain(Object obj) { + JSONObject json = new JSONObject(obj); // JSONify using getter + jsonifyValueIfValidJson(json); + appendToArrayIfExist(nodeName(obj), json); + jsonObjStack.push(json); + } + + @Override + public void end() { + jsonObjStack.pop(); + } + + @Override + public String toString() { + return jsonObjStack.pop().toString(indent); + } + + /** + * Trick to parse JSON in field getter due to missing support for custom processor in org.json. + * And also because it's not appropriate to make getter aware of concrete format logic + */ + private void jsonifyValueIfValidJson(JSONObject json) { + for (String key : json.keySet()) { + try { + JSONObject jsonValue = new JSONObject(json.getString(key)); + json.put(key, jsonValue); + } catch (JSONException e) { + // Ignore value that is not a valid JSON. + } } - - @Override - public void end() { - jsonObjStack.pop(); - } - - @Override - public String toString() { - return jsonObjStack.pop().toString(indent); - } - - /** - * Trick to parse JSON in field getter due to missing support for custom processor - * in org.json. And also because it's not appropriate to make getter aware of concrete format logic - */ - private void jsonifyValueIfValidJson(JSONObject json) { - for (String key : json.keySet()) { - try { - JSONObject jsonValue = new JSONObject(json.getString(key)); - json.put(key, jsonValue); - } catch (JSONException e) { - // Ignore value that is not a valid JSON. - } - } + } + + private String nodeName(Object obj) { + return obj.toString(); // obj.getClass().getSimpleName(); + } + + /** Replace JSONObject by JSONArray if key is duplicate */ + private void appendToArrayIfExist(String name, JSONObject child) { + JSONObject parent = jsonObjStack.peek(); + Object otherChild = parent.opt(name); + if (otherChild == null) { + parent.put(name, child); + } else { + if (!(otherChild instanceof JSONArray)) { + parent.remove(name); + parent.append(name, otherChild); + } + parent.append(name, child); } - - private String nodeName(Object obj) { - return obj.toString(); //obj.getClass().getSimpleName(); - } - - /** - * Replace JSONObject by JSONArray if key is duplicate - */ - private void appendToArrayIfExist(String name, JSONObject child) { - JSONObject parent = jsonObjStack.peek(); - Object otherChild = parent.opt(name); - if (otherChild == null) { - parent.put(name, child); - } else { - if (!(otherChild instanceof JSONArray)) { - parent.remove(name); - parent.append(name, otherChild); - } - parent.append(name, child); - } - } - + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/LogicalOperator.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/LogicalOperator.java index 825af762f5..b814f1f563 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/LogicalOperator.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/LogicalOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.logical; import java.util.Map; @@ -11,32 +10,29 @@ import org.opensearch.sql.legacy.query.planner.core.PlanNode; import org.opensearch.sql.legacy.query.planner.physical.PhysicalOperator; -/** - * Logical operator in logical plan tree. - */ +/** Logical operator in logical plan tree. */ public interface LogicalOperator extends PlanNode { - /** - * If current operator is no operation. It depends on specific internal state of operator - *

- * Ignore this field in explanation because all explainable operator are NOT no-op. - * - * @return true if NoOp - */ - @JSONPropertyIgnore - default boolean isNoOp() { - return false; - } - - /** - * Map logical operator to physical operators (possibly 1 to N mapping) - *

- * Note that generic type on PhysicalOperator[] would enforce all impl convert array to generic type array - * because generic type array is unable to be created directly. - * - * @param optimalOps optimal physical operators estimated so far - * @return list of physical operator - */ - PhysicalOperator[] toPhysical(Map> optimalOps); + /** + * If current operator is no operation. It depends on specific internal state of operator + * + *

Ignore this field in explanation because all explainable operator are NOT no-op. + * + * @return true if NoOp + */ + @JSONPropertyIgnore + default boolean isNoOp() { + return false; + } + /** + * Map logical operator to physical operators (possibly 1 to N mapping) + * + *

Note that generic type on PhysicalOperator[] would enforce all impl convert array to generic + * type array because generic type array is unable to be created directly. + * + * @param optimalOps optimal physical operators estimated so far + * @return list of physical operator + */ + PhysicalOperator[] toPhysical(Map> optimalOps); } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/LogicalPlan.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/LogicalPlan.java index 369da44e7f..05a797bbe0 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/LogicalPlan.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/LogicalPlan.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.logical; import static org.opensearch.sql.legacy.query.planner.logical.node.Project.Visitor; @@ -32,176 +31,154 @@ import org.opensearch.sql.legacy.query.planner.logical.rule.ProjectionPushDown; import org.opensearch.sql.legacy.query.planner.logical.rule.SelectionPushDown; -/** - * Logical query plan. - */ +/** Logical query plan. */ public class LogicalPlan implements Plan { - /** - * Planner configuration - */ - private final Config config; - - /** - * Parameters - */ - private final QueryParams params; - - /** - * Root node of logical query plan tree - */ - private final LogicalOperator root; - - /** - * Transformation rule - */ - private final List rules = Arrays.asList( - new SelectionPushDown(), //Enforce this run first to simplify Group. Avoid this order dependency in future. - new ProjectionPushDown() - ); - - public LogicalPlan(Config config, QueryParams params) { - this.config = config; - this.params = params; - this.root = buildPlanTree(); - } - - @Override - public void traverse(Visitor visitor) { - root.accept(visitor); - } - - @Override - public void optimize() { - for (LogicalPlanVisitor rule : rules) { - root.accept(rule); - } + /** Planner configuration */ + private final Config config; + + /** Parameters */ + private final QueryParams params; + + /** Root node of logical query plan tree */ + private final LogicalOperator root; + + /** Transformation rule */ + private final List rules = + Arrays.asList( + new SelectionPushDown(), // Enforce this run first to simplify Group. Avoid this order + // dependency in future. + new ProjectionPushDown()); + + public LogicalPlan(Config config, QueryParams params) { + this.config = config; + this.params = params; + this.root = buildPlanTree(); + } + + @Override + public void traverse(Visitor visitor) { + root.accept(visitor); + } + + @Override + public void optimize() { + for (LogicalPlanVisitor rule : rules) { + root.accept(rule); } - - /** - * Build logical plan tree - */ - private LogicalOperator buildPlanTree() { - return project( - top( - sort( - filter( - join( - top( - group(params.firstRequest(), config.scrollPageSize()[0]), - config.tableLimit1() - ), - top( - group(params.secondRequest(), config.scrollPageSize()[1]), - config.tableLimit2() - ) - ) - ) - ), config.totalLimit() - ) - ); - } - - /** - * Create projection operator - */ - private LogicalOperator project(LogicalOperator next) { - Project project = new Project(next); - for (TableInJoinRequestBuilder req : getRequests()) { - if (req.getOriginalSelect().isSelectAll()) { - project.projectAll(req.getAlias()); - } else { - project.project(req.getAlias(), req.getReturnedFields()); - } - } - return project; + } + + /** Build logical plan tree */ + private LogicalOperator buildPlanTree() { + return project( + top( + sort( + filter( + join( + top( + group(params.firstRequest(), config.scrollPageSize()[0]), + config.tableLimit1()), + top( + group(params.secondRequest(), config.scrollPageSize()[1]), + config.tableLimit2())))), + config.totalLimit())); + } + + /** Create projection operator */ + private LogicalOperator project(LogicalOperator next) { + Project project = new Project(next); + for (TableInJoinRequestBuilder req : getRequests()) { + if (req.getOriginalSelect().isSelectAll()) { + project.projectAll(req.getAlias()); + } else { + project.project(req.getAlias(), req.getReturnedFields()); + } } + return project; + } - private LogicalOperator top(LogicalOperator next, int limit) { - if (limit > 0) { - return new Top(next, limit); - } - return next; + private LogicalOperator top(LogicalOperator next, int limit) { + if (limit > 0) { + return new Top(next, limit); } - - private LogicalOperator sort(LogicalOperator next) { - List orderByColNames = new ArrayList<>(); - String orderByType = ""; - for (TableInJoinRequestBuilder request : getRequests()) { - List orderBys = request.getOriginalSelect().getOrderBys(); - if (orderBys != null) { - String tableAlias = request.getAlias() == null ? "" : request.getAlias() + "."; - for (Order orderBy : orderBys) { - orderByColNames.add(tableAlias + orderBy.getName()); - orderByType = orderBy.getType(); - } - } - } - - if (orderByColNames.isEmpty()) { - return next; + return next; + } + + private LogicalOperator sort(LogicalOperator next) { + List orderByColNames = new ArrayList<>(); + String orderByType = ""; + for (TableInJoinRequestBuilder request : getRequests()) { + List orderBys = request.getOriginalSelect().getOrderBys(); + if (orderBys != null) { + String tableAlias = request.getAlias() == null ? "" : request.getAlias() + "."; + for (Order orderBy : orderBys) { + orderByColNames.add(tableAlias + orderBy.getName()); + orderByType = orderBy.getType(); } - return new Sort(next, orderByColNames, orderByType); + } } - private LogicalOperator filter(LogicalOperator next) { - Filter filter = new Filter(next, getRequests()); - if (filter.isNoOp()) { - return next; - } - return filter; + if (orderByColNames.isEmpty()) { + return next; } + return new Sort(next, orderByColNames, orderByType); + } - private LogicalOperator join(LogicalOperator left, LogicalOperator right) { - return new Join( - left, right, - params.joinType(), - groupJoinConditionByOr(), - config.blockSize(), - config.isUseTermsFilterOptimization() - ); + private LogicalOperator filter(LogicalOperator next) { + Filter filter = new Filter(next, getRequests()); + if (filter.isNoOp()) { + return next; } - - /** - * Group conditions in ON by OR because it makes hash table group be required too - */ - private JoinCondition groupJoinConditionByOr() { - String leftTableAlias = params.firstRequest().getAlias(); - String rightTableAlias = params.secondRequest().getAlias(); - - JoinCondition orCond; - if (params.joinConditions().isEmpty()) { - orCond = new JoinCondition(leftTableAlias, rightTableAlias, 0); - } else { - orCond = new JoinCondition(leftTableAlias, rightTableAlias, params.joinConditions().size()); - for (int i = 0; i < params.joinConditions().size(); i++) { - List> andCond = params.joinConditions().get(i); - String[] leftColumnNames = new String[andCond.size()]; - String[] rightColumnNames = new String[andCond.size()]; - - for (int j = 0; j < andCond.size(); j++) { - Map.Entry cond = andCond.get(j); - leftColumnNames[j] = cond.getKey().getName(); - rightColumnNames[j] = cond.getValue().getName(); - } - - orCond.addLeftColumnNames(i, leftColumnNames); - orCond.addRightColumnNames(i, rightColumnNames); - } + return filter; + } + + private LogicalOperator join(LogicalOperator left, LogicalOperator right) { + return new Join( + left, + right, + params.joinType(), + groupJoinConditionByOr(), + config.blockSize(), + config.isUseTermsFilterOptimization()); + } + + /** Group conditions in ON by OR because it makes hash table group be required too */ + private JoinCondition groupJoinConditionByOr() { + String leftTableAlias = params.firstRequest().getAlias(); + String rightTableAlias = params.secondRequest().getAlias(); + + JoinCondition orCond; + if (params.joinConditions().isEmpty()) { + orCond = new JoinCondition(leftTableAlias, rightTableAlias, 0); + } else { + orCond = new JoinCondition(leftTableAlias, rightTableAlias, params.joinConditions().size()); + for (int i = 0; i < params.joinConditions().size(); i++) { + List> andCond = params.joinConditions().get(i); + String[] leftColumnNames = new String[andCond.size()]; + String[] rightColumnNames = new String[andCond.size()]; + + for (int j = 0; j < andCond.size(); j++) { + Map.Entry cond = andCond.get(j); + leftColumnNames[j] = cond.getKey().getName(); + rightColumnNames[j] = cond.getValue().getName(); } - return orCond; - } - private LogicalOperator group(TableInJoinRequestBuilder request, int pageSize) { - return new Group(new TableScan(request, pageSize)); + orCond.addLeftColumnNames(i, leftColumnNames); + orCond.addRightColumnNames(i, rightColumnNames); + } } + return orCond; + } - private List getRequests() { - return Arrays.asList(params.firstRequest(), params.secondRequest()); - } + private LogicalOperator group(TableInJoinRequestBuilder request, int pageSize) { + return new Group(new TableScan(request, pageSize)); + } - private List map(Collection source, Function func) { - return source.stream().map(func).collect(Collectors.toList()); - } + private List getRequests() { + return Arrays.asList(params.firstRequest(), params.secondRequest()); + } + private List map(Collection source, Function func) { + return source.stream().map(func).collect(Collectors.toList()); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/LogicalPlanVisitor.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/LogicalPlanVisitor.java index b779242a09..ef9e1a8d93 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/LogicalPlanVisitor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/LogicalPlanVisitor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.logical; import org.opensearch.sql.legacy.query.planner.core.PlanNode; @@ -21,93 +20,86 @@ */ public interface LogicalPlanVisitor extends Visitor { - @Override - default boolean visit(PlanNode op) { - if (op instanceof Project) { - return visit((Project) op); - } else if (op instanceof Filter) { - return visit((Filter) op); - } else if (op instanceof Join) { - return visit((Join) op); - } else if (op instanceof Group) { - return visit((Group) op); - } else if (op instanceof TableScan) { - return visit((TableScan) op); - } else if (op instanceof Top) { - return visit((Top) op); - } else if (op instanceof Sort) { - return visit((Sort) op); - } - throw new IllegalArgumentException("Unknown operator type: " + op); - } - - @Override - default void endVisit(PlanNode op) { - if (op instanceof Project) { - endVisit((Project) op); - } else if (op instanceof Filter) { - endVisit((Filter) op); - } else if (op instanceof Join) { - endVisit((Join) op); - } else if (op instanceof Group) { - endVisit((Group) op); - } else if (op instanceof TableScan) { - endVisit((TableScan) op); - } else if (op instanceof Top) { - endVisit((Top) op); - } else if (op instanceof Sort) { - endVisit((Sort) op); - } else { - throw new IllegalArgumentException("Unknown operator type: " + op); - } - } - - default boolean visit(Project project) { - return true; - } - - default void endVisit(Project project) { - } - - default boolean visit(Filter filter) { - return true; - } - - default void endVisit(Filter filter) { - } - - default boolean visit(Join join) { - return true; - } - - default void endVisit(Join join) { - } - - default boolean visit(Group group) { - return true; - } - - default void endVisit(Group group) { - } - - default boolean visit(TableScan scan) { - return true; - } - - default void endVisit(TableScan scan) { - } - - default boolean visit(Top top) { - return true; - } - - default void endVisit(Top top) { - } - - default boolean visit(Sort sort) { - return true; - } - - default void endVisit(Sort sort) { - } + @Override + default boolean visit(PlanNode op) { + if (op instanceof Project) { + return visit((Project) op); + } else if (op instanceof Filter) { + return visit((Filter) op); + } else if (op instanceof Join) { + return visit((Join) op); + } else if (op instanceof Group) { + return visit((Group) op); + } else if (op instanceof TableScan) { + return visit((TableScan) op); + } else if (op instanceof Top) { + return visit((Top) op); + } else if (op instanceof Sort) { + return visit((Sort) op); + } + throw new IllegalArgumentException("Unknown operator type: " + op); + } + + @Override + default void endVisit(PlanNode op) { + if (op instanceof Project) { + endVisit((Project) op); + } else if (op instanceof Filter) { + endVisit((Filter) op); + } else if (op instanceof Join) { + endVisit((Join) op); + } else if (op instanceof Group) { + endVisit((Group) op); + } else if (op instanceof TableScan) { + endVisit((TableScan) op); + } else if (op instanceof Top) { + endVisit((Top) op); + } else if (op instanceof Sort) { + endVisit((Sort) op); + } else { + throw new IllegalArgumentException("Unknown operator type: " + op); + } + } + + default boolean visit(Project project) { + return true; + } + + default void endVisit(Project project) {} + + default boolean visit(Filter filter) { + return true; + } + + default void endVisit(Filter filter) {} + + default boolean visit(Join join) { + return true; + } + + default void endVisit(Join join) {} + + default boolean visit(Group group) { + return true; + } + + default void endVisit(Group group) {} + + default boolean visit(TableScan scan) { + return true; + } + + default void endVisit(TableScan scan) {} + + default boolean visit(Top top) { + return true; + } + + default void endVisit(Top top) {} + + default boolean visit(Sort sort) { + return true; + } + + default void endVisit(Sort sort) {} } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Filter.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Filter.java index f5e3e40f2d..5d4423d67a 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Filter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Filter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.logical.node; import java.util.HashMap; @@ -16,59 +15,54 @@ import org.opensearch.sql.legacy.query.planner.logical.LogicalOperator; import org.opensearch.sql.legacy.query.planner.physical.PhysicalOperator; -/** - * Selection expression - */ +/** Selection expression */ public class Filter implements LogicalOperator { - private final LogicalOperator next; + private final LogicalOperator next; - /** - * Alias to WHERE clause mapping - */ - private final Map aliasWhereMap = new HashMap<>(); + /** Alias to WHERE clause mapping */ + private final Map aliasWhereMap = new HashMap<>(); - public Filter(LogicalOperator next, List tables) { - this.next = next; - - for (TableInJoinRequestBuilder table : tables) { - Select select = table.getOriginalSelect(); - if (select.getWhere() != null) { - aliasWhereMap.put(table.getAlias(), select.getWhere()); - } - } - } + public Filter(LogicalOperator next, List tables) { + this.next = next; - public Filter(LogicalOperator next) { - this.next = next; + for (TableInJoinRequestBuilder table : tables) { + Select select = table.getOriginalSelect(); + if (select.getWhere() != null) { + aliasWhereMap.put(table.getAlias(), select.getWhere()); + } } + } - @Override - public PlanNode[] children() { - return new PlanNode[]{next}; - } + public Filter(LogicalOperator next) { + this.next = next; + } - @Override - public boolean isNoOp() { - return aliasWhereMap.isEmpty(); - } + @Override + public PlanNode[] children() { + return new PlanNode[] {next}; + } - @Override - public PhysicalOperator[] toPhysical(Map> optimalOps) { - // Always no-op after push down, skip it by returning next - return new PhysicalOperator[]{optimalOps.get(next)}; - } + @Override + public boolean isNoOp() { + return aliasWhereMap.isEmpty(); + } - public void pushDown(String tableAlias, Filter pushedDownFilter) { - Where pushedDownWhere = pushedDownFilter.aliasWhereMap.remove(tableAlias); - if (pushedDownWhere != null) { - aliasWhereMap.put(tableAlias, pushedDownWhere); - } - } + @Override + public PhysicalOperator[] toPhysical(Map> optimalOps) { + // Always no-op after push down, skip it by returning next + return new PhysicalOperator[] {optimalOps.get(next)}; + } - @Override - public String toString() { - return "Filter [ conditions=" + aliasWhereMap.values() + " ]"; + public void pushDown(String tableAlias, Filter pushedDownFilter) { + Where pushedDownWhere = pushedDownFilter.aliasWhereMap.remove(tableAlias); + if (pushedDownWhere != null) { + aliasWhereMap.put(tableAlias, pushedDownWhere); } + } + @Override + public String toString() { + return "Filter [ conditions=" + aliasWhereMap.values() + " ]"; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Group.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Group.java index 5ae9ddc0a2..da94ae74da 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Group.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Group.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.logical.node; import java.util.Map; @@ -11,66 +10,57 @@ import org.opensearch.sql.legacy.query.planner.logical.LogicalOperator; import org.opensearch.sql.legacy.query.planner.physical.PhysicalOperator; -/** - * Project-Filter-TableScan group for push down optimization convenience. - */ +/** Project-Filter-TableScan group for push down optimization convenience. */ public class Group implements LogicalOperator { - /** - * Optional pushed down projection - */ - private Project project; - - /** - * Optional pushed down filter (selection) - */ - private Filter filter; + /** Optional pushed down projection */ + private Project project; - /** - * Required table scan operator - */ - private final TableScan tableScan; + /** Optional pushed down filter (selection) */ + private Filter filter; + /** Required table scan operator */ + private final TableScan tableScan; - public Group(TableScan tableScan) { - this.tableScan = tableScan; - this.filter = new Filter(tableScan); - this.project = new Project<>(filter); - } + public Group(TableScan tableScan) { + this.tableScan = tableScan; + this.filter = new Filter(tableScan); + this.project = new Project<>(filter); + } - @Override - public boolean isNoOp() { - return true; - } + @Override + public boolean isNoOp() { + return true; + } - @Override - public PhysicalOperator[] toPhysical(Map> optimalOps) { - return tableScan.toPhysical(optimalOps); - } + @Override + public PhysicalOperator[] toPhysical(Map> optimalOps) { + return tableScan.toPhysical(optimalOps); + } - @Override - public PlanNode[] children() { - return new PlanNode[]{topNonNullNode()}; - } + @Override + public PlanNode[] children() { + return new PlanNode[] {topNonNullNode()}; + } - private PlanNode topNonNullNode() { - return project != null ? project : (filter != null ? filter : tableScan); - } + private PlanNode topNonNullNode() { + return project != null ? project : (filter != null ? filter : tableScan); + } - public String id() { - return tableScan.getTableAlias(); - } + public String id() { + return tableScan.getTableAlias(); + } - public void pushDown(Project project) { - this.project.pushDown(id(), project); - } + public void pushDown(Project project) { + this.project.pushDown(id(), project); + } - public void pushDown(Filter filter) { - this.filter.pushDown(id(), filter); - } + public void pushDown(Filter filter) { + this.filter.pushDown(id(), filter); + } - @Override - public String toString() { - return "Group"; - } + @Override + public String toString() { + return "Group"; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Join.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Join.java index ae833ca580..686bf4c089 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Join.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Join.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.logical.node; import static com.alibaba.druid.sql.ast.statement.SQLJoinTableSource.JoinType; @@ -15,162 +14,141 @@ import org.opensearch.sql.legacy.query.planner.physical.node.join.BlockHashJoin; import org.opensearch.sql.legacy.query.planner.resource.blocksize.BlockSize; -/** - * Join expression - */ +/** Join expression */ public class Join implements LogicalOperator { - private final LogicalOperator left; - private final LogicalOperator right; - - /** - * Join type, ex inner join, left join - */ - private final JoinType type; - - /** - * Joined columns in ON condition - */ - private final JoinCondition condition; - - /** - * Block size calculator - */ - private final BlockSize blockSize; - - /** - * Use terms filter optimization or not - */ - private final boolean isUseTermsFilterOptimization; - - - public Join(LogicalOperator left, - LogicalOperator right, - JoinType joinType, - JoinCondition condition, - BlockSize blockSize, - boolean isUseTermsFilterOptimization) { - this.left = left; - this.right = right; - this.type = joinType; - this.condition = condition; - this.blockSize = blockSize; - this.isUseTermsFilterOptimization = isUseTermsFilterOptimization; + private final LogicalOperator left; + private final LogicalOperator right; + + /** Join type, ex inner join, left join */ + private final JoinType type; + + /** Joined columns in ON condition */ + private final JoinCondition condition; + + /** Block size calculator */ + private final BlockSize blockSize; + + /** Use terms filter optimization or not */ + private final boolean isUseTermsFilterOptimization; + + public Join( + LogicalOperator left, + LogicalOperator right, + JoinType joinType, + JoinCondition condition, + BlockSize blockSize, + boolean isUseTermsFilterOptimization) { + this.left = left; + this.right = right; + this.type = joinType; + this.condition = condition; + this.blockSize = blockSize; + this.isUseTermsFilterOptimization = isUseTermsFilterOptimization; + } + + @Override + public PlanNode[] children() { + return new PlanNode[] {left, right}; + } + + @Override + public PhysicalOperator[] toPhysical(Map> optimalOps) { + PhysicalOperator optimalLeft = optimalOps.get(left); + PhysicalOperator optimalRight = optimalOps.get(right); + return new PhysicalOperator[] { + new BlockHashJoin<>( + optimalLeft, optimalRight, type, condition, blockSize, isUseTermsFilterOptimization) + }; + } + + public JoinCondition conditions() { + return condition; + } + + @Override + public String toString() { + return "Join [ conditions=" + condition + " type=" + type + " ]"; + } + + /** + * Join condition in ON clause grouped by OR. + * + *

For example, "ON (a.name = b.id AND a.age = b.age) OR a.location = b.address" => input list: + * [ [ (a.name, b.id), (a.age, b.age) ], [ (a.location, b.address) ] ] + * + *

=> JoinCondition: leftTableAlias: "a", rightTableAlias: "b" leftColumnNames: [ ["name", + * "age"], ["location"] ] rightColumnNames: [ ["id", "age"], ["address" ] ] + */ + public static class JoinCondition { + + private final String leftTableAlias; + private final String rightTableAlias; + + private final String[][] leftColumnNames; + private final String[][] rightColumnNames; + + public JoinCondition(String leftTableAlias, String rightTableAlias, int groupSize) { + this.leftTableAlias = leftTableAlias; + this.rightTableAlias = rightTableAlias; + this.leftColumnNames = new String[groupSize][]; + this.rightColumnNames = new String[groupSize][]; } - @Override - public PlanNode[] children() { - return new PlanNode[]{left, right}; + public void addLeftColumnNames(int groupNum, String[] colNames) { + leftColumnNames[groupNum] = colNames; } - @Override - public PhysicalOperator[] toPhysical(Map> optimalOps) { - PhysicalOperator optimalLeft = optimalOps.get(left); - PhysicalOperator optimalRight = optimalOps.get(right); - return new PhysicalOperator[]{ - new BlockHashJoin<>( - optimalLeft, optimalRight, type, condition, - blockSize, isUseTermsFilterOptimization - ) - }; + public void addRightColumnNames(int groupNum, String[] colNames) { + rightColumnNames[groupNum] = colNames; } - public JoinCondition conditions() { - return condition; + public int groupSize() { + return leftColumnNames.length; } - @Override - public String toString() { - return "Join [ conditions=" + condition + " type=" + type + " ]"; + public String leftTableAlias() { + return leftTableAlias; } - /** - * Join condition in ON clause grouped by OR. - *

- * For example, "ON (a.name = b.id AND a.age = b.age) OR a.location = b.address" - * => input list: [ - * [ (a.name, b.id), (a.age, b.age) ], - * [ (a.location, b.address) ] - * ] - *

- * => JoinCondition: - * leftTableAlias: "a", rightTableAlias: "b" - * leftColumnNames: [ ["name", "age"], ["location"] ] - * rightColumnNames: [ ["id", "age"], ["address" ] ] - */ - public static class JoinCondition { - - private final String leftTableAlias; - private final String rightTableAlias; - - private final String[][] leftColumnNames; - private final String[][] rightColumnNames; - - public JoinCondition(String leftTableAlias, - String rightTableAlias, - int groupSize) { - this.leftTableAlias = leftTableAlias; - this.rightTableAlias = rightTableAlias; - this.leftColumnNames = new String[groupSize][]; - this.rightColumnNames = new String[groupSize][]; - } - - public void addLeftColumnNames(int groupNum, String[] colNames) { - leftColumnNames[groupNum] = colNames; - } - - public void addRightColumnNames(int groupNum, String[] colNames) { - rightColumnNames[groupNum] = colNames; - } - - public int groupSize() { - return leftColumnNames.length; - } - - public String leftTableAlias() { - return leftTableAlias; - } + public String rightTableAlias() { + return rightTableAlias; + } - public String rightTableAlias() { - return rightTableAlias; - } + public String[] leftColumnNames(int groupNum) { + return leftColumnNames[groupNum]; + } - public String[] leftColumnNames(int groupNum) { - return leftColumnNames[groupNum]; - } + public String[] rightColumnNames(int groupNum) { + return rightColumnNames[groupNum]; + } - public String[] rightColumnNames(int groupNum) { - return rightColumnNames[groupNum]; + @Override + public String toString() { + StringBuilder str = new StringBuilder(); + int groupSize = leftColumnNames.length; + for (int i = 0; i < groupSize; i++) { + if (i > 0) { + str.append(" OR "); } - @Override - public String toString() { - StringBuilder str = new StringBuilder(); - int groupSize = leftColumnNames.length; - for (int i = 0; i < groupSize; i++) { - if (i > 0) { - str.append(" OR "); - } - - str.append("( "); - int condSize = leftColumnNames[i].length; - for (int j = 0; j < condSize; j++) { - if (j > 0) { - str.append(" AND "); - } - str.append(leftTableAlias). - append("."). - append(leftColumnNames[i][j]). - append(" = "). - append(rightTableAlias). - append("."). - append(rightColumnNames[i][j]); - } - str.append(" )"); - } - return str.toString(); + str.append("( "); + int condSize = leftColumnNames[i].length; + for (int j = 0; j < condSize; j++) { + if (j > 0) { + str.append(" AND "); + } + str.append(leftTableAlias) + .append(".") + .append(leftColumnNames[i][j]) + .append(" = ") + .append(rightTableAlias) + .append(".") + .append(rightColumnNames[i][j]); } - + str.append(" )"); + } + return str.toString(); } - + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Project.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Project.java index bd24564de2..4226744f1b 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Project.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Project.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.logical.node; import com.google.common.collect.HashMultimap; @@ -23,126 +22,116 @@ import org.opensearch.sql.legacy.query.planner.physical.Row; import org.opensearch.sql.legacy.query.planner.physical.estimation.Cost; -/** - * Projection expression - */ +/** Projection expression */ public class Project implements LogicalOperator, PhysicalOperator { - private static final Logger LOG = LogManager.getLogger(); - - private final PlanNode next; + private static final Logger LOG = LogManager.getLogger(); - /** - * All columns being projected in SELECT in each table - */ - private final Multimap tableAliasColumns; + private final PlanNode next; - /** - * All columns full name (tableAlias.colName) to alias mapping - */ - private final Map fullNameAlias; + /** All columns being projected in SELECT in each table */ + private final Multimap tableAliasColumns; + /** All columns full name (tableAlias.colName) to alias mapping */ + private final Map fullNameAlias; - @SuppressWarnings("unchecked") - public Project(PlanNode next) { - this(next, HashMultimap.create()); - } + @SuppressWarnings("unchecked") + public Project(PlanNode next) { + this(next, HashMultimap.create()); + } - @SuppressWarnings("unchecked") - public Project(PlanNode next, Multimap tableAliasToColumns) { - this.next = next; - this.tableAliasColumns = tableAliasToColumns; - this.fullNameAlias = fullNameAndAlias(); - } - - @Override - public boolean isNoOp() { - return tableAliasColumns.isEmpty(); - } - - @Override - public PlanNode[] children() { - return new PlanNode[]{next}; - } + @SuppressWarnings("unchecked") + public Project(PlanNode next, Multimap tableAliasToColumns) { + this.next = next; + this.tableAliasColumns = tableAliasToColumns; + this.fullNameAlias = fullNameAndAlias(); + } - @Override - public PhysicalOperator[] toPhysical(Map> optimalOps) { - if (!(next instanceof LogicalOperator)) { - throw new IllegalStateException("Only logical operator can perform this toPhysical() operation"); - } - return new PhysicalOperator[]{ - new Project(optimalOps.get(next), tableAliasColumns) // Create physical Project instance - }; - } + @Override + public boolean isNoOp() { + return tableAliasColumns.isEmpty(); + } - @Override - public Cost estimate() { - return new Cost(); - } + @Override + public PlanNode[] children() { + return new PlanNode[] {next}; + } - @Override - public boolean hasNext() { - return ((PhysicalOperator) next).hasNext(); + @Override + public PhysicalOperator[] toPhysical(Map> optimalOps) { + if (!(next instanceof LogicalOperator)) { + throw new IllegalStateException( + "Only logical operator can perform this toPhysical() operation"); } - - @SuppressWarnings("unchecked") - @Override - public Row next() { - Row row = ((PhysicalOperator) this.next).next(); - - /* - * Empty means SELECT * which means retain all fields from both tables - * Because push down is always applied, only limited support for this. - */ - if (!fullNameAlias.isEmpty()) { - row.retain(fullNameAlias); - } - - LOG.trace("Projected row by fields {}: {}", tableAliasColumns, row); - return row; + return new PhysicalOperator[] { + new Project(optimalOps.get(next), tableAliasColumns) // Create physical Project instance + }; + } + + @Override + public Cost estimate() { + return new Cost(); + } + + @Override + public boolean hasNext() { + return ((PhysicalOperator) next).hasNext(); + } + + @SuppressWarnings("unchecked") + @Override + public Row next() { + Row row = ((PhysicalOperator) this.next).next(); + + /* + * Empty means SELECT * which means retain all fields from both tables + * Because push down is always applied, only limited support for this. + */ + if (!fullNameAlias.isEmpty()) { + row.retain(fullNameAlias); } - public void project(String tableAlias, Collection columns) { - tableAliasColumns.putAll(tableAlias, columns); - } + LOG.trace("Projected row by fields {}: {}", tableAliasColumns, row); + return row; + } - public void projectAll(String tableAlias) { - tableAliasColumns.put(tableAlias, new Field("*", "")); - } + public void project(String tableAlias, Collection columns) { + tableAliasColumns.putAll(tableAlias, columns); + } - public void forEach(BiConsumer> action) { - tableAliasColumns.asMap().forEach(action); - } + public void projectAll(String tableAlias) { + tableAliasColumns.put(tableAlias, new Field("*", "")); + } - public void pushDown(String tableAlias, Project pushedDownProj) { - Collection columns = pushedDownProj.tableAliasColumns.get(tableAlias); - if (columns != null) { - tableAliasColumns.putAll(tableAlias, columns); - } - } + public void forEach(BiConsumer> action) { + tableAliasColumns.asMap().forEach(action); + } - /** - * Return mapping from column full name ("e.age") and alias ("a" in "SELECT e.age AS a") - */ - private Map fullNameAndAlias() { - Map fullNamesAlias = new HashMap<>(); - forEach( - (tableAlias, fields) -> { - for (Field field : fields) { - fullNamesAlias.put(tableAlias + "." + field.getName(), field.getAlias()); - } - } - ); - return fullNamesAlias; + public void pushDown(String tableAlias, Project pushedDownProj) { + Collection columns = pushedDownProj.tableAliasColumns.get(tableAlias); + if (columns != null) { + tableAliasColumns.putAll(tableAlias, columns); } - - @Override - public String toString() { - List colStrs = new ArrayList<>(); - for (Map.Entry entry : tableAliasColumns.entries()) { - colStrs.add(entry.getKey() + "." + entry.getValue().getName()); - } - return "Project [ columns=[" + String.join(", ", colStrs) + "] ]"; + } + + /** Return mapping from column full name ("e.age") and alias ("a" in "SELECT e.age AS a") */ + private Map fullNameAndAlias() { + Map fullNamesAlias = new HashMap<>(); + forEach( + (tableAlias, fields) -> { + for (Field field : fields) { + fullNamesAlias.put(tableAlias + "." + field.getName(), field.getAlias()); + } + }); + return fullNamesAlias; + } + + @Override + public String toString() { + List colStrs = new ArrayList<>(); + for (Map.Entry entry : tableAliasColumns.entries()) { + colStrs.add(entry.getKey() + "." + entry.getValue().getName()); } - + return "Project [ columns=[" + String.join(", ", colStrs) + "] ]"; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Sort.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Sort.java index 670be71de5..f9033ce90f 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Sort.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Sort.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.logical.node; import java.util.List; @@ -13,45 +12,37 @@ import org.opensearch.sql.legacy.query.planner.physical.PhysicalOperator; import org.opensearch.sql.legacy.query.planner.physical.node.sort.QuickSort; -/** - * Logical operator for Sort. - */ +/** Logical operator for Sort. */ public class Sort implements LogicalOperator { - private final LogicalOperator next; - - /** - * Column name list in ORDER BY - */ - private final List orderByColNames; - - /** - * Order by type, ex. ASC, DESC - */ - private final String orderByType; + private final LogicalOperator next; + /** Column name list in ORDER BY */ + private final List orderByColNames; - public Sort(LogicalOperator next, List orderByColNames, String orderByType) { - this.next = next; - this.orderByColNames = orderByColNames; - this.orderByType = orderByType.toUpperCase(); - } + /** Order by type, ex. ASC, DESC */ + private final String orderByType; - @Override - public PlanNode[] children() { - return new PlanNode[]{next}; - } + public Sort(LogicalOperator next, List orderByColNames, String orderByType) { + this.next = next; + this.orderByColNames = orderByColNames; + this.orderByType = orderByType.toUpperCase(); + } - @Override - public PhysicalOperator[] toPhysical(Map> optimalOps) { - return new PhysicalOperator[]{ - new QuickSort<>(optimalOps.get(next), orderByColNames, orderByType) - }; - } + @Override + public PlanNode[] children() { + return new PlanNode[] {next}; + } - @Override - public String toString() { - return "Sort [ columns=" + orderByColNames + " order=" + orderByType + " ]"; - } + @Override + public PhysicalOperator[] toPhysical(Map> optimalOps) { + return new PhysicalOperator[] { + new QuickSort<>(optimalOps.get(next), orderByColNames, orderByType) + }; + } + @Override + public String toString() { + return "Sort [ columns=" + orderByColNames + " order=" + orderByType + " ]"; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/TableScan.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/TableScan.java index 466779faae..16af199ed7 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/TableScan.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/TableScan.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.logical.node; import java.util.Map; @@ -13,54 +12,44 @@ import org.opensearch.sql.legacy.query.planner.physical.PhysicalOperator; import org.opensearch.sql.legacy.query.planner.physical.node.scroll.Scroll; -/** - * Table scan - */ +/** Table scan */ public class TableScan implements LogicalOperator { - /** - * Request builder for the table - */ - private final TableInJoinRequestBuilder request; - - /** - * Page size for physical operator - */ - private final int pageSize; - - public TableScan(TableInJoinRequestBuilder request, int pageSize) { - this.request = request; - this.pageSize = pageSize; - } + /** Request builder for the table */ + private final TableInJoinRequestBuilder request; - @Override - public PlanNode[] children() { - return new PlanNode[0]; - } + /** Page size for physical operator */ + private final int pageSize; - @Override - public PhysicalOperator[] toPhysical(Map> optimalOps) { - return new PhysicalOperator[]{ - new Scroll(request, pageSize) - }; - } + public TableScan(TableInJoinRequestBuilder request, int pageSize) { + this.request = request; + this.pageSize = pageSize; + } - @Override - public String toString() { - return "TableScan"; - } + @Override + public PlanNode[] children() { + return new PlanNode[0]; + } + @Override + public PhysicalOperator[] toPhysical(Map> optimalOps) { + return new PhysicalOperator[] {new Scroll(request, pageSize)}; + } - /********************************************* - * Getters for Explain - *********************************************/ + @Override + public String toString() { + return "TableScan"; + } - public String getTableAlias() { - return request.getAlias(); - } + /********************************************* + * Getters for Explain + *********************************************/ - public String getTableName() { - return request.getOriginalSelect().getFrom().get(0).getIndex(); - } + public String getTableAlias() { + return request.getAlias(); + } + public String getTableName() { + return request.getOriginalSelect().getFrom().get(0).getIndex(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Top.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Top.java index e39f36ed5a..a484dc8a6a 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Top.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/node/Top.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.logical.node; import java.util.Map; @@ -20,52 +19,51 @@ */ public class Top implements LogicalOperator, PhysicalOperator { - private final PlanNode next; + private final PlanNode next; - /** - * Number of rows to return in total - */ - private int count; + /** Number of rows to return in total */ + private int count; - @SuppressWarnings("unchecked") - public Top(PlanNode next, int count) { - this.next = next; - this.count = count; - } + @SuppressWarnings("unchecked") + public Top(PlanNode next, int count) { + this.next = next; + this.count = count; + } - @Override - public PlanNode[] children() { - return new PlanNode[]{next}; - } + @Override + public PlanNode[] children() { + return new PlanNode[] {next}; + } - @SuppressWarnings("unchecked") - @Override - public boolean hasNext() { - return count > 0 && ((PhysicalOperator) next).hasNext(); - } + @SuppressWarnings("unchecked") + @Override + public boolean hasNext() { + return count > 0 && ((PhysicalOperator) next).hasNext(); + } - @SuppressWarnings("unchecked") - @Override - public Row next() { - count--; - return ((PhysicalOperator) next).next(); - } + @SuppressWarnings("unchecked") + @Override + public Row next() { + count--; + return ((PhysicalOperator) next).next(); + } - @Override - public PhysicalOperator[] toPhysical(Map> optimalOps) { - if (!(next instanceof LogicalOperator)) { - throw new IllegalStateException("Only logical operator can perform this toPhysical() operation"); - } - return new PhysicalOperator[]{new Top<>(optimalOps.get(next), count)}; + @Override + public PhysicalOperator[] toPhysical(Map> optimalOps) { + if (!(next instanceof LogicalOperator)) { + throw new IllegalStateException( + "Only logical operator can perform this toPhysical() operation"); } + return new PhysicalOperator[] {new Top<>(optimalOps.get(next), count)}; + } - @Override - public Cost estimate() { - return new Cost(); - } + @Override + public Cost estimate() { + return new Cost(); + } - @Override - public String toString() { - return "Top [ " + "count=" + count + " ]"; - } + @Override + public String toString() { + return "Top [ " + "count=" + count + " ]"; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/rule/ProjectionPushDown.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/rule/ProjectionPushDown.java index f5a3e28fce..5195894a75 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/rule/ProjectionPushDown.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/rule/ProjectionPushDown.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.logical.rule; import static java.util.stream.Collectors.toList; @@ -18,68 +17,54 @@ import org.opensearch.sql.legacy.query.planner.logical.node.Join; import org.opensearch.sql.legacy.query.planner.logical.node.Project; - -/** - * Projection push down optimization. - */ +/** Projection push down optimization. */ public class ProjectionPushDown implements LogicalPlanVisitor { - /** - * Project used to collect column names in SELECT, ON, ORDER BY... - */ - private final Project project = new Project(null); + /** Project used to collect column names in SELECT, ON, ORDER BY... */ + private final Project project = new Project(null); - @Override - public boolean visit(Project project) { - pushDown(project); - return true; - } + @Override + public boolean visit(Project project) { + pushDown(project); + return true; + } - @Override - public boolean visit(Join join) { - pushDown(join.conditions()); - return true; - } + @Override + public boolean visit(Join join) { + pushDown(join.conditions()); + return true; + } - @Override - public boolean visit(Group group) { - if (!project.isNoOp()) { - group.pushDown(project); - } - return false; // avoid iterating operators in virtual Group + @Override + public boolean visit(Group group) { + if (!project.isNoOp()) { + group.pushDown(project); } + return false; // avoid iterating operators in virtual Group + } - /** - * Note that raw type Project cause generic type of forEach be erased at compile time - */ - private void pushDown(Project project) { - project.forEach(this::project); - } - - private void pushDown(JoinCondition orCond) { - for (int i = 0; i < orCond.groupSize(); i++) { - project( - orCond.leftTableAlias(), - columnNamesToFields(orCond.leftColumnNames(i)) - ); - project( - orCond.rightTableAlias(), - columnNamesToFields(orCond.rightColumnNames(i)) - ); - } - } + /** Note that raw type Project cause generic type of forEach be erased at compile time */ + private void pushDown(Project project) { + project.forEach(this::project); + } - private void project(String tableAlias, Collection columns) { - project.project(tableAlias, columns); // Bug: Field doesn't implement hashCode() which leads to duplicate + private void pushDown(JoinCondition orCond) { + for (int i = 0; i < orCond.groupSize(); i++) { + project(orCond.leftTableAlias(), columnNamesToFields(orCond.leftColumnNames(i))); + project(orCond.rightTableAlias(), columnNamesToFields(orCond.rightColumnNames(i))); } + } - /** - * Convert column name string to Field object with empty alias - */ - private List columnNamesToFields(String[] colNames) { - return Arrays.stream(colNames). - map(name -> new Field(name, null)). // Alias is useless for pushed down project - collect(toList()); - } + private void project(String tableAlias, Collection columns) { + project.project( + tableAlias, columns); // Bug: Field doesn't implement hashCode() which leads to duplicate + } + /** Convert column name string to Field object with empty alias */ + private List columnNamesToFields(String[] colNames) { + return Arrays.stream(colNames) + .map(name -> new Field(name, null)) + . // Alias is useless for pushed down project + collect(toList()); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/rule/SelectionPushDown.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/rule/SelectionPushDown.java index 61578f91b7..deae266afc 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/rule/SelectionPushDown.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/logical/rule/SelectionPushDown.java @@ -3,36 +3,32 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.logical.rule; import org.opensearch.sql.legacy.query.planner.logical.LogicalPlanVisitor; import org.opensearch.sql.legacy.query.planner.logical.node.Filter; import org.opensearch.sql.legacy.query.planner.logical.node.Group; -/** - * Push down selection (filter) - */ +/** Push down selection (filter) */ public class SelectionPushDown implements LogicalPlanVisitor { - /** - * Store the filter found in visit and reused to push down. - * It's not necessary to create a new one because no need to collect filter condition elsewhere - */ - private Filter filter; - - @Override - public boolean visit(Filter filter) { - this.filter = filter; - return true; + /** + * Store the filter found in visit and reused to push down. It's not necessary to create a new one + * because no need to collect filter condition elsewhere + */ + private Filter filter; + + @Override + public boolean visit(Filter filter) { + this.filter = filter; + return true; + } + + @Override + public boolean visit(Group group) { + if (filter != null && !filter.isNoOp()) { + group.pushDown(filter); } - - @Override - public boolean visit(Group group) { - if (filter != null && !filter.isNoOp()) { - group.pushDown(filter); - } - return false; // avoid iterating operators in virtual Group - } - + return false; // avoid iterating operators in virtual Group + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/PhysicalOperator.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/PhysicalOperator.java index 9271bae0d7..897beee3e9 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/PhysicalOperator.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/PhysicalOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical; import java.util.Iterator; @@ -11,40 +10,36 @@ import org.opensearch.sql.legacy.query.planner.core.PlanNode; import org.opensearch.sql.legacy.query.planner.physical.estimation.Cost; -/** - * Physical operator - */ +/** Physical operator */ public interface PhysicalOperator extends PlanNode, Iterator>, AutoCloseable { - /** - * Estimate the cost of current physical operator - * - * @return cost - */ - Cost estimate(); - - - /** - * Initialize operator. - * - * @param params exuecution parameters needed - */ - default void open(ExecuteParams params) throws Exception { - for (PlanNode node : children()) { - ((PhysicalOperator) node).open(params); - } + /** + * Estimate the cost of current physical operator + * + * @return cost + */ + Cost estimate(); + + /** + * Initialize operator. + * + * @param params exuecution parameters needed + */ + default void open(ExecuteParams params) throws Exception { + for (PlanNode node : children()) { + ((PhysicalOperator) node).open(params); } - - - /** - * Close resources related to the operator. - * - * @throws Exception potential exception raised - */ - @Override - default void close() { - for (PlanNode node : children()) { - ((PhysicalOperator) node).close(); - } + } + + /** + * Close resources related to the operator. + * + * @throws Exception potential exception raised + */ + @Override + default void close() { + for (PlanNode node : children()) { + ((PhysicalOperator) node).close(); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/PhysicalPlan.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/PhysicalPlan.java index eac4e855b0..5a79c63838 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/PhysicalPlan.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/PhysicalPlan.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical; import java.util.ArrayList; @@ -18,81 +17,69 @@ import org.opensearch.sql.legacy.query.planner.physical.estimation.Estimation; import org.opensearch.sql.legacy.query.planner.resource.ResourceManager; -/** - * Physical plan - */ +/** Physical plan */ public class PhysicalPlan implements Plan { - private static final Logger LOG = LogManager.getLogger(); + private static final Logger LOG = LogManager.getLogger(); - /** - * Optimized logical plan that being ready for physical planning - */ - private final LogicalPlan logicalPlan; + /** Optimized logical plan that being ready for physical planning */ + private final LogicalPlan logicalPlan; - /** - * Root of physical plan tree - */ - private PhysicalOperator root; + /** Root of physical plan tree */ + private PhysicalOperator root; - public PhysicalPlan(LogicalPlan logicalPlan) { - this.logicalPlan = logicalPlan; - } + public PhysicalPlan(LogicalPlan logicalPlan) { + this.logicalPlan = logicalPlan; + } - @Override - public void traverse(Visitor visitor) { - if (root != null) { - root.accept(visitor); - } + @Override + public void traverse(Visitor visitor) { + if (root != null) { + root.accept(visitor); } - - @Override - public void optimize() { - Estimation estimation = new Estimation<>(); - logicalPlan.traverse(estimation); - root = estimation.optimalPlan(); + } + + @Override + public void optimize() { + Estimation estimation = new Estimation<>(); + logicalPlan.traverse(estimation); + root = estimation.optimalPlan(); + } + + /** Execute physical plan after verifying if system is healthy at the moment */ + public List execute(ExecuteParams params) { + if (shouldReject(params)) { + throw new IllegalStateException("Query request rejected due to insufficient resource"); } - /** - * Execute physical plan after verifying if system is healthy at the moment - */ - public List execute(ExecuteParams params) { - if (shouldReject(params)) { - throw new IllegalStateException("Query request rejected due to insufficient resource"); - } - - try (PhysicalOperator op = root) { - return doExecutePlan(op, params); - } catch (Exception e) { - LOG.error("Error happened during execution", e); - // Runtime error or circuit break. Should we return partial result to customer? - throw new IllegalStateException("Error happened during execution", e); - } + try (PhysicalOperator op = root) { + return doExecutePlan(op, params); + } catch (Exception e) { + LOG.error("Error happened during execution", e); + // Runtime error or circuit break. Should we return partial result to customer? + throw new IllegalStateException("Error happened during execution", e); } - - /** - * Reject physical plan execution of new query request if unhealthy - */ - private boolean shouldReject(ExecuteParams params) { - return !((ResourceManager) params.get(ExecuteParams.ExecuteParamType.RESOURCE_MANAGER)).isHealthy(); + } + + /** Reject physical plan execution of new query request if unhealthy */ + private boolean shouldReject(ExecuteParams params) { + return !((ResourceManager) params.get(ExecuteParams.ExecuteParamType.RESOURCE_MANAGER)) + .isHealthy(); + } + + /** Execute physical plan in order: open, fetch result, close */ + private List doExecutePlan(PhysicalOperator op, ExecuteParams params) + throws Exception { + List hits = new ArrayList<>(); + op.open(params); + + while (op.hasNext()) { + hits.add(op.next().data()); } - /** - * Execute physical plan in order: open, fetch result, close - */ - private List doExecutePlan(PhysicalOperator op, - ExecuteParams params) throws Exception { - List hits = new ArrayList<>(); - op.open(params); - - while (op.hasNext()) { - hits.add(op.next().data()); - } - - if (LOG.isTraceEnabled()) { - hits.forEach(hit -> LOG.trace("Final result row: {}", hit.getSourceAsMap())); - } - return hits; + if (LOG.isTraceEnabled()) { + hits.forEach(hit -> LOG.trace("Final result row: {}", hit.getSourceAsMap())); } - + return hits; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/Row.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/Row.java index 9e7d81a194..5ed074da6d 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/Row.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/Row.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical; import java.util.Arrays; @@ -17,106 +16,93 @@ */ public interface Row { - Row NULL = null; - - /** - * Generate key to represent identity of the row. - * - * @param colNames column names as keys - * @return row key - */ - RowKey key(String[] colNames); - - - /** - * Combine current row and another row together to generate a new combined row. - * - * @param otherRow another row - * @return combined row - */ - Row combine(Row otherRow); - - - /** - * Retain columns specified and rename to alias if any. - * - * @param colNameAlias column names to alias mapping - */ - void retain(Map colNameAlias); - + Row NULL = null; + + /** + * Generate key to represent identity of the row. + * + * @param colNames column names as keys + * @return row key + */ + RowKey key(String[] colNames); + + /** + * Combine current row and another row together to generate a new combined row. + * + * @param otherRow another row + * @return combined row + */ + Row combine(Row otherRow); + + /** + * Retain columns specified and rename to alias if any. + * + * @param colNameAlias column names to alias mapping + */ + void retain(Map colNameAlias); + + /** + * @return raw data of row wrapped inside + */ + T data(); + + /** Key that help Row be sorted or hashed. */ + class RowKey implements Comparable { + + /** Represent null key if any joined column value is NULL */ + public static final RowKey NULL = null; + + /** Values of row key */ + private final Object[] keys; + + /** Cached hash code since this class is intended to be used by hash table */ + private final int hashCode; + + public RowKey(Object... keys) { + this.keys = keys; + this.hashCode = Objects.hash(keys); + } - /** - * @return raw data of row wrapped inside - */ - T data(); + public Object[] keys() { + return keys; + } + @Override + public int hashCode() { + return hashCode; + } - /** - * Key that help Row be sorted or hashed. - */ - class RowKey implements Comparable { + @Override + public boolean equals(Object other) { + return other instanceof RowKey && Arrays.deepEquals(this.keys, ((RowKey) other).keys); + } - /** - * Represent null key if any joined column value is NULL - */ - public static final RowKey NULL = null; + @SuppressWarnings("unchecked") + @Override + public int compareTo(RowKey other) { + for (int i = 0; i < keys.length; i++) { - /** - * Values of row key + /* + * Only one is null, otherwise (both null or non-null) go ahead. + * Always consider NULL is smaller value which means NULL comes last in ASC and first in DESC */ - private final Object[] keys; - - /** - * Cached hash code since this class is intended to be used by hash table - */ - private final int hashCode; - - public RowKey(Object... keys) { - this.keys = keys; - this.hashCode = Objects.hash(keys); - } - - public Object[] keys() { - return keys; - } - - @Override - public int hashCode() { - return hashCode; + if (keys[i] == null ^ other.keys[i] == null) { + return keys[i] == null ? 1 : -1; } - @Override - public boolean equals(Object other) { - return other instanceof RowKey && Arrays.deepEquals(this.keys, ((RowKey) other).keys); - } - - @SuppressWarnings("unchecked") - @Override - public int compareTo(RowKey other) { - for (int i = 0; i < keys.length; i++) { - - /* - * Only one is null, otherwise (both null or non-null) go ahead. - * Always consider NULL is smaller value which means NULL comes last in ASC and first in DESC - */ - if (keys[i] == null ^ other.keys[i] == null) { - return keys[i] == null ? 1 : -1; - } - - if (keys[i] instanceof Comparable) { - int result = ((Comparable) keys[i]).compareTo(other.keys[i]); - if (result != 0) { - return result; - } - } // Ignore incomparable field silently? - } - return 0; - } - - @Override - public String toString() { - return "RowKey: " + Arrays.toString(keys); - } + if (keys[i] instanceof Comparable) { + int result = ((Comparable) keys[i]).compareTo(other.keys[i]); + if (result != 0) { + return result; + } + } // Ignore incomparable field silently? + } + return 0; + } + @Override + public String toString() { + return "RowKey: " + Arrays.toString(keys); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/estimation/Cost.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/estimation/Cost.java index efaf7057b6..86f155d626 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/estimation/Cost.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/estimation/Cost.java @@ -3,22 +3,20 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.estimation; public class Cost implements Comparable { - public static final Cost INFINITY = new Cost(); + public static final Cost INFINITY = new Cost(); - private long inputSize; + private long inputSize; - private long time; + private long time; - public Cost() { - } + public Cost() {} - @Override - public int compareTo(Cost o) { - return 0; - } + @Override + public int compareTo(Cost o) { + return 0; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/estimation/Estimation.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/estimation/Estimation.java index 1648cf854d..72ffbd4652 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/estimation/Estimation.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/estimation/Estimation.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.estimation; import static java.util.Comparator.comparing; @@ -18,39 +17,35 @@ import org.opensearch.sql.legacy.query.planner.physical.PhysicalOperator; /** - * Convert and estimate the cost of each operator and generate one optimal plan. - * Memorize cost of candidate physical operators in the bottom-up way to avoid duplicate computation. + * Convert and estimate the cost of each operator and generate one optimal plan. Memorize cost of + * candidate physical operators in the bottom-up way to avoid duplicate computation. */ public class Estimation implements LogicalPlanVisitor { - /** - * Optimal physical operator for logical operator based on completed estimation - */ - private Map> optimalOps = new IdentityHashMap<>(); - - /** - * Keep tracking of the operator that exit visit() - */ - private PhysicalOperator root; - - @Override - public boolean visit(Group group) { - return false; - } - - @SuppressWarnings("unchecked") - @Override - public void endVisit(PlanNode node) { - LogicalOperator op = (LogicalOperator) node; - PhysicalOperator optimal = Arrays.stream(op.toPhysical(optimalOps)). - min(comparing(PhysicalOperator::estimate)). - orElseThrow(() -> new IllegalStateException( - "No optimal operator found: " + op)); - optimalOps.put(op, optimal); - root = optimal; - } - - public PhysicalOperator optimalPlan() { - return root; - } + /** Optimal physical operator for logical operator based on completed estimation */ + private Map> optimalOps = new IdentityHashMap<>(); + + /** Keep tracking of the operator that exit visit() */ + private PhysicalOperator root; + + @Override + public boolean visit(Group group) { + return false; + } + + @SuppressWarnings("unchecked") + @Override + public void endVisit(PlanNode node) { + LogicalOperator op = (LogicalOperator) node; + PhysicalOperator optimal = + Arrays.stream(op.toPhysical(optimalOps)) + .min(comparing(PhysicalOperator::estimate)) + .orElseThrow(() -> new IllegalStateException("No optimal operator found: " + op)); + optimalOps.put(op, optimal); + root = optimal; + } + + public PhysicalOperator optimalPlan() { + return root; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/BatchPhysicalOperator.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/BatchPhysicalOperator.java index 3b4eb2b48e..127e542255 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/BatchPhysicalOperator.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/BatchPhysicalOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.node; import static org.opensearch.sql.legacy.query.planner.core.ExecuteParams.ExecuteParamType.RESOURCE_MANAGER; @@ -19,78 +18,74 @@ import org.opensearch.sql.legacy.query.planner.resource.ResourceManager; /** - * Abstraction for physical operators that load large volume of data and generally prefetch for efficiency. + * Abstraction for physical operators that load large volume of data and generally prefetch for + * efficiency. * * @param */ public abstract class BatchPhysicalOperator implements PhysicalOperator { - protected static final Logger LOG = LogManager.getLogger(); + protected static final Logger LOG = LogManager.getLogger(); - /** - * Resource monitor to avoid consuming too much resource - */ - private ResourceManager resourceMgr; + /** Resource monitor to avoid consuming too much resource */ + private ResourceManager resourceMgr; - /** - * Current batch of data - */ - private Iterator> curBatch; + /** Current batch of data */ + private Iterator> curBatch; - @Override - public void open(ExecuteParams params) throws Exception { - //PhysicalOperator.super.open(params); // Child needs to call this super.open() and open its next node too - resourceMgr = params.get(RESOURCE_MANAGER); - } + @Override + public void open(ExecuteParams params) throws Exception { + // PhysicalOperator.super.open(params); // Child needs to call this super.open() and open its + // next node too + resourceMgr = params.get(RESOURCE_MANAGER); + } - @Override - public boolean hasNext() { - if (isNoMoreDataInCurrentBatch()) { - LOG.debug("{} No more data in current batch, pre-fetching next batch", this); - Collection> nextBatch = prefetchSafely(); + @Override + public boolean hasNext() { + if (isNoMoreDataInCurrentBatch()) { + LOG.debug("{} No more data in current batch, pre-fetching next batch", this); + Collection> nextBatch = prefetchSafely(); - LOG.debug("{} Pre-fetched {} rows", this, nextBatch.size()); - if (LOG.isTraceEnabled()) { - nextBatch.forEach(row -> LOG.trace("Row pre-fetched: {}", row)); - } + LOG.debug("{} Pre-fetched {} rows", this, nextBatch.size()); + if (LOG.isTraceEnabled()) { + nextBatch.forEach(row -> LOG.trace("Row pre-fetched: {}", row)); + } - curBatch = nextBatch.iterator(); - } - return curBatch.hasNext(); + curBatch = nextBatch.iterator(); } - - @Override - public Row next() { - return curBatch.next(); - } - - /** - * Prefetch next batch safely by checking resource monitor - */ - private Collection> prefetchSafely() { - Objects.requireNonNull(resourceMgr, "ResourceManager is not set so unable to do sanity check"); - - boolean isHealthy = resourceMgr.isHealthy(); - boolean isTimeout = resourceMgr.isTimeout(); - if (isHealthy && !isTimeout) { - try { - return prefetch(); - } catch (Exception e) { - throw new IllegalStateException("Failed to prefetch next batch", e); - } - } - throw new IllegalStateException("Exit due to " + (isHealthy ? "time out" : "insufficient resource")); + return curBatch.hasNext(); + } + + @Override + public Row next() { + return curBatch.next(); + } + + /** Prefetch next batch safely by checking resource monitor */ + private Collection> prefetchSafely() { + Objects.requireNonNull(resourceMgr, "ResourceManager is not set so unable to do sanity check"); + + boolean isHealthy = resourceMgr.isHealthy(); + boolean isTimeout = resourceMgr.isTimeout(); + if (isHealthy && !isTimeout) { + try { + return prefetch(); + } catch (Exception e) { + throw new IllegalStateException("Failed to prefetch next batch", e); + } } - - /** - * Prefetch next batch if current is exhausted. - * - * @return next batch - */ - protected abstract Collection> prefetch() throws Exception; - - private boolean isNoMoreDataInCurrentBatch() { - return curBatch == null || !curBatch.hasNext(); - } - + throw new IllegalStateException( + "Exit due to " + (isHealthy ? "time out" : "insufficient resource")); + } + + /** + * Prefetch next batch if current is exhausted. + * + * @return next batch + */ + protected abstract Collection> prefetch() throws Exception; + + private boolean isNoMoreDataInCurrentBatch() { + return curBatch == null || !curBatch.hasNext(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/BlockHashJoin.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/BlockHashJoin.java index 19c0ae41d2..90bf9923d3 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/BlockHashJoin.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/BlockHashJoin.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.node.join; import static com.alibaba.druid.sql.ast.statement.SQLJoinTableSource.JoinType; @@ -25,91 +24,87 @@ import org.opensearch.sql.legacy.query.planner.physical.estimation.Cost; import org.opensearch.sql.legacy.query.planner.resource.blocksize.BlockSize; -/** - * Block-based Hash Join implementation - */ +/** Block-based Hash Join implementation */ public class BlockHashJoin extends JoinAlgorithm { - /** - * Use terms filter optimization or not - */ - private final boolean isUseTermsFilterOptimization; + /** Use terms filter optimization or not */ + private final boolean isUseTermsFilterOptimization; - public BlockHashJoin(PhysicalOperator left, - PhysicalOperator right, - JoinType type, - JoinCondition condition, - BlockSize blockSize, - boolean isUseTermsFilterOptimization) { - super(left, right, type, condition, blockSize); + public BlockHashJoin( + PhysicalOperator left, + PhysicalOperator right, + JoinType type, + JoinCondition condition, + BlockSize blockSize, + boolean isUseTermsFilterOptimization) { + super(left, right, type, condition, blockSize); - this.isUseTermsFilterOptimization = isUseTermsFilterOptimization; - } + this.isUseTermsFilterOptimization = isUseTermsFilterOptimization; + } - @Override - public Cost estimate() { - return new Cost(); - } + @Override + public Cost estimate() { + return new Cost(); + } - @Override - protected void reopenRight() throws Exception { - Objects.requireNonNull(params, "Execute params is not set so unable to add extra filter"); + @Override + protected void reopenRight() throws Exception { + Objects.requireNonNull(params, "Execute params is not set so unable to add extra filter"); - if (isUseTermsFilterOptimization) { - params.add(ExecuteParams.ExecuteParamType.EXTRA_QUERY_FILTER, queryForPushedDownOnConds()); - } - right.open(params); + if (isUseTermsFilterOptimization) { + params.add(ExecuteParams.ExecuteParamType.EXTRA_QUERY_FILTER, queryForPushedDownOnConds()); } - - @Override - protected List> probe() { - List> combinedRows = new ArrayList<>(); - int totalSize = 0; - - /* Return if already found enough matched rows to give ResourceMgr a chance to check resource usage */ - while (right.hasNext() && totalSize < hashTable.size()) { - Row rightRow = right.next(); - Collection> matchedLeftRows = hashTable.match(rightRow); - - if (!matchedLeftRows.isEmpty()) { - combinedRows.add(new CombinedRow<>(rightRow, matchedLeftRows)); - totalSize += matchedLeftRows.size(); - } - } - return combinedRows; + right.open(params); + } + + @Override + protected List> probe() { + List> combinedRows = new ArrayList<>(); + int totalSize = 0; + + /* Return if already found enough matched rows to give ResourceMgr a chance to check resource usage */ + while (right.hasNext() && totalSize < hashTable.size()) { + Row rightRow = right.next(); + Collection> matchedLeftRows = hashTable.match(rightRow); + + if (!matchedLeftRows.isEmpty()) { + combinedRows.add(new CombinedRow<>(rightRow, matchedLeftRows)); + totalSize += matchedLeftRows.size(); + } } - - /** - * Build query for pushed down conditions in ON - */ - private BoolQueryBuilder queryForPushedDownOnConds() { - BoolQueryBuilder orQuery = boolQuery(); - Map>[] rightNameToLeftValuesGroup = hashTable.rightFieldWithLeftValues(); - - for (Map> rightNameToLeftValues : rightNameToLeftValuesGroup) { - if (LOG.isTraceEnabled()) { - rightNameToLeftValues.forEach((rightName, leftValues) -> - LOG.trace("Right name to left values mapping: {} => {}", rightName, leftValues)); - } - - BoolQueryBuilder andQuery = boolQuery(); - rightNameToLeftValues.forEach( - (rightName, leftValues) -> andQuery.must(termsQuery(rightName, leftValues)) - ); - - if (LOG.isTraceEnabled()) { - LOG.trace("Terms filter optimization: {}", Strings.toString(XContentType.JSON, andQuery)); - } - orQuery.should(andQuery); - } - return orQuery; + return combinedRows; + } + + /** Build query for pushed down conditions in ON */ + private BoolQueryBuilder queryForPushedDownOnConds() { + BoolQueryBuilder orQuery = boolQuery(); + Map>[] rightNameToLeftValuesGroup = + hashTable.rightFieldWithLeftValues(); + + for (Map> rightNameToLeftValues : rightNameToLeftValuesGroup) { + if (LOG.isTraceEnabled()) { + rightNameToLeftValues.forEach( + (rightName, leftValues) -> + LOG.trace("Right name to left values mapping: {} => {}", rightName, leftValues)); + } + + BoolQueryBuilder andQuery = boolQuery(); + rightNameToLeftValues.forEach( + (rightName, leftValues) -> andQuery.must(termsQuery(rightName, leftValues))); + + if (LOG.isTraceEnabled()) { + LOG.trace("Terms filter optimization: {}", Strings.toString(XContentType.JSON, andQuery)); + } + orQuery.should(andQuery); } + return orQuery; + } - /********************************************* - * Getters for Explain - *********************************************/ + /********************************************* + * Getters for Explain + *********************************************/ - public boolean isUseTermsFilterOptimization() { - return isUseTermsFilterOptimization; - } + public boolean isUseTermsFilterOptimization() { + return isUseTermsFilterOptimization; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/CombinedRow.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/CombinedRow.java index e83bbb7d0e..b1fb43441e 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/CombinedRow.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/CombinedRow.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.node.join; import java.util.ArrayList; @@ -19,28 +18,28 @@ */ public class CombinedRow { - private Row rightRow; - private Collection> leftRows; + private Row rightRow; + private Collection> leftRows; - public CombinedRow(Row rightRow, Collection> leftRows) { - this.rightRow = rightRow; - this.leftRows = leftRows; - } + public CombinedRow(Row rightRow, Collection> leftRows) { + this.rightRow = rightRow; + this.leftRows = leftRows; + } - public List> combine() { - List> combinedRows = new ArrayList<>(); - for (Row leftRow : leftRows) { - combinedRows.add(leftRow.combine(rightRow)); - } - return combinedRows; + public List> combine() { + List> combinedRows = new ArrayList<>(); + for (Row leftRow : leftRows) { + combinedRows.add(leftRow.combine(rightRow)); } + return combinedRows; + } - public Collection> leftMatchedRows() { - return Collections.unmodifiableCollection(leftRows); - } + public Collection> leftMatchedRows() { + return Collections.unmodifiableCollection(leftRows); + } - @Override - public String toString() { - return "CombinedRow{rightRow=" + rightRow + ", leftRows=" + leftRows + '}'; - } + @Override + public String toString() { + return "CombinedRow{rightRow=" + rightRow + ", leftRows=" + leftRows + '}'; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/DefaultHashTable.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/DefaultHashTable.java index 733d7a78ab..23e79d2c31 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/DefaultHashTable.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/DefaultHashTable.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.node.join; import static java.util.Collections.emptyList; @@ -22,102 +21,98 @@ import org.opensearch.sql.legacy.query.planner.physical.Row.RowKey; /** - * Hash table implementation. - * In the case of no join condition, hash table degrades to linked list with all rows in block paired to RowKey.NULL + * Hash table implementation. In the case of no join condition, hash table degrades to linked list + * with all rows in block paired to RowKey.NULL * * @param Row data type */ public class DefaultHashTable implements HashTable { - private static final Logger LOG = LogManager.getLogger(); - - /** - * Hash table implementation - */ - private final Multimap> table = ArrayListMultimap.create(); - - /** - * Left join conditions to generate key to build hash table by left rows from block - */ - private final String[] leftJoinFields; - - /** - * Right join conditions to generate key to probe hash table by right rows - */ - private final String[] rightJoinFields; - - - public DefaultHashTable(String[] leftJoinFields, String[] rightJoinFields) { - this.leftJoinFields = leftJoinFields; - this.rightJoinFields = rightJoinFields; + private static final Logger LOG = LogManager.getLogger(); + + /** Hash table implementation */ + private final Multimap> table = ArrayListMultimap.create(); + + /** Left join conditions to generate key to build hash table by left rows from block */ + private final String[] leftJoinFields; + + /** Right join conditions to generate key to probe hash table by right rows */ + private final String[] rightJoinFields; + + public DefaultHashTable(String[] leftJoinFields, String[] rightJoinFields) { + this.leftJoinFields = leftJoinFields; + this.rightJoinFields = rightJoinFields; + } + + /** + * Add row in block to hash table by left conditions in ON. For the duplicate key, append them to + * the list in value (MultiMap) + */ + @Override + public void add(Row row) { + RowKey key = row.key(leftJoinFields); + if (key == RowKey.NULL) { + LOG.debug( + "Skip rows with NULL column value during build: row={}, conditions={}", + row, + leftJoinFields); + } else { + table.put(key, row); } - - /** - * Add row in block to hash table by left conditions in ON. - * For the duplicate key, append them to the list in value (MultiMap) - */ - @Override - public void add(Row row) { - RowKey key = row.key(leftJoinFields); - if (key == RowKey.NULL) { - LOG.debug("Skip rows with NULL column value during build: row={}, conditions={}", row, leftJoinFields); - } else { - table.put(key, row); - } + } + + /** Probe hash table to match right rows by values of right conditions */ + @Override + public Collection> match(Row row) { + RowKey key = row.key(rightJoinFields); + if (key == RowKey.NULL) { + LOG.debug( + "Skip rows with NULL column value during probing: row={}, conditions={}", + row, + rightJoinFields); + return emptyList(); } - - /** - * Probe hash table to match right rows by values of right conditions - */ - @Override - public Collection> match(Row row) { - RowKey key = row.key(rightJoinFields); - if (key == RowKey.NULL) { - LOG.debug("Skip rows with NULL column value during probing: row={}, conditions={}", row, rightJoinFields); - return emptyList(); - } - return table.get(key); // Multimap returns empty list rather null. + return table.get(key); // Multimap returns empty list rather null. + } + + /** Right joined field name with according column value list to push down */ + @SuppressWarnings("unchecked") + @Override + public Map>[] rightFieldWithLeftValues() { + Map> result = + new HashMap<>(); // Eliminate potential duplicate in values + for (RowKey key : table.keySet()) { + Object[] keys = key.keys(); + for (int i = 0; i < keys.length; i++) { + result + .computeIfAbsent(rightJoinFields[i], (k -> new HashSet<>())) + .add(lowercaseIfStr(keys[i])); // Terms stored in lower case in OpenSearch + } } - /** - * Right joined field name with according column value list to push down - */ - @SuppressWarnings("unchecked") - @Override - public Map>[] rightFieldWithLeftValues() { - Map> result = new HashMap<>(); // Eliminate potential duplicate in values - for (RowKey key : table.keySet()) { - Object[] keys = key.keys(); - for (int i = 0; i < keys.length; i++) { - result.computeIfAbsent(rightJoinFields[i], (k -> new HashSet<>())). - add(lowercaseIfStr(keys[i])); // Terms stored in lower case in OpenSearch - } - } - - // Convert value of Map from Guava's Set to JDK list which is expected by OpenSearch writer - for (Entry> entry : result.entrySet()) { - entry.setValue(new ArrayList<>(entry.getValue())); - } - return new Map[]{result}; + // Convert value of Map from Guava's Set to JDK list which is expected by OpenSearch writer + for (Entry> entry : result.entrySet()) { + entry.setValue(new ArrayList<>(entry.getValue())); } - - @Override - public int size() { - return table.size(); - } - - @Override - public boolean isEmpty() { - return table.isEmpty(); - } - - @Override - public void clear() { - table.clear(); - } - - private Object lowercaseIfStr(Object key) { - return key instanceof String ? ((String) key).toLowerCase() : key; - } - + return new Map[] {result}; + } + + @Override + public int size() { + return table.size(); + } + + @Override + public boolean isEmpty() { + return table.isEmpty(); + } + + @Override + public void clear() { + table.clear(); + } + + private Object lowercaseIfStr(Object key) { + return key instanceof String ? ((String) key).toLowerCase() : key; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/HashTable.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/HashTable.java index 4a20b1833b..1811af5158 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/HashTable.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/HashTable.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.node.join; import java.util.Collection; @@ -17,50 +16,42 @@ */ public interface HashTable { - /** - * Add one row to the hash table - * - * @param row row - */ - void add(Row row); - - - /** - * Find all matched row(s) in the hash table. - * - * @param row row to be matched - * @return all matches - */ - Collection> match(Row row); - - - /** - * Mapping from right field to value(s) of left size - * - * @return - */ - Map>[] rightFieldWithLeftValues(); - - - /** - * Get size of hash table - * - * @return size of hash table - */ - int size(); - - - /** - * Is hash table empty? - * - * @return true for yes - */ - boolean isEmpty(); - - - /** - * Clear internal data structure - */ - void clear(); - + /** + * Add one row to the hash table + * + * @param row row + */ + void add(Row row); + + /** + * Find all matched row(s) in the hash table. + * + * @param row row to be matched + * @return all matches + */ + Collection> match(Row row); + + /** + * Mapping from right field to value(s) of left size + * + * @return + */ + Map>[] rightFieldWithLeftValues(); + + /** + * Get size of hash table + * + * @return size of hash table + */ + int size(); + + /** + * Is hash table empty? + * + * @return true for yes + */ + boolean isEmpty(); + + /** Clear internal data structure */ + void clear(); } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/HashTableGroup.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/HashTableGroup.java index c22eb9dc19..ded83429d8 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/HashTableGroup.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/HashTableGroup.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.node.join; import static org.opensearch.sql.legacy.query.planner.logical.node.Join.JoinCondition; @@ -15,75 +14,69 @@ import java.util.Set; import org.opensearch.sql.legacy.query.planner.physical.Row; -/** - * Hash table group with each hash table per AND join condition. - */ +/** Hash table group with each hash table per AND join condition. */ public class HashTableGroup implements HashTable { - private final HashTable[] hashTables; + private final HashTable[] hashTables; - /** - * Number of rows stored in the hash table (in other words, = block size) - */ - private int numOfRows = 0; + /** Number of rows stored in the hash table (in other words, = block size) */ + private int numOfRows = 0; - @SuppressWarnings("unchecked") - public HashTableGroup(JoinCondition condition) { - int groupSize = condition.groupSize(); - if (groupSize == 0) { - // Create one hash table (degraded to list) for Cross Join - hashTables = new HashTable[]{new ListHashTable()}; - } else { - hashTables = new HashTable[groupSize]; - for (int i = 0; i < groupSize; i++) { - hashTables[i] = new DefaultHashTable<>( - condition.leftColumnNames(i), - condition.rightColumnNames(i) - ); - } - } + @SuppressWarnings("unchecked") + public HashTableGroup(JoinCondition condition) { + int groupSize = condition.groupSize(); + if (groupSize == 0) { + // Create one hash table (degraded to list) for Cross Join + hashTables = new HashTable[] {new ListHashTable()}; + } else { + hashTables = new HashTable[groupSize]; + for (int i = 0; i < groupSize; i++) { + hashTables[i] = + new DefaultHashTable<>(condition.leftColumnNames(i), condition.rightColumnNames(i)); + } } + } - @Override - public void add(Row row) { - for (HashTable hashTable : hashTables) { - hashTable.add(row); - } - numOfRows++; + @Override + public void add(Row row) { + for (HashTable hashTable : hashTables) { + hashTable.add(row); } + numOfRows++; + } - @Override - public Collection> match(Row row) { - Set> allMatched = Sets.newIdentityHashSet(); - for (HashTable hashTable : hashTables) { - allMatched.addAll(hashTable.match(row)); - } - return allMatched; + @Override + public Collection> match(Row row) { + Set> allMatched = Sets.newIdentityHashSet(); + for (HashTable hashTable : hashTables) { + allMatched.addAll(hashTable.match(row)); } + return allMatched; + } - @SuppressWarnings("unchecked") - public Map>[] rightFieldWithLeftValues() { - return Arrays.stream(hashTables). - map(hashTable -> hashTable.rightFieldWithLeftValues()[0]). // Make interface consistent - toArray(Map[]::new); - } + @SuppressWarnings("unchecked") + public Map>[] rightFieldWithLeftValues() { + return Arrays.stream(hashTables) + .map(hashTable -> hashTable.rightFieldWithLeftValues()[0]) + . // Make interface consistent + toArray(Map[]::new); + } - @Override - public boolean isEmpty() { - return numOfRows == 0; - } + @Override + public boolean isEmpty() { + return numOfRows == 0; + } - @Override - public int size() { - return numOfRows; - } + @Override + public int size() { + return numOfRows; + } - @Override - public void clear() { - for (HashTable hashTable : hashTables) { - hashTable.clear(); - } - numOfRows = 0; + @Override + public void clear() { + for (HashTable hashTable : hashTables) { + hashTable.clear(); } - + numOfRows = 0; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/JoinAlgorithm.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/JoinAlgorithm.java index 07f008bea4..cdda116970 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/JoinAlgorithm.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/JoinAlgorithm.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.node.join; import static java.util.Collections.emptyList; @@ -31,253 +30,232 @@ */ public abstract class JoinAlgorithm extends BatchPhysicalOperator { - protected static final Logger LOG = LogManager.getLogger(); - - /** - * Left child operator - */ - private final PhysicalOperator left; - - /** - * Right child operator handled by concrete join algorithm subclass - */ - protected final PhysicalOperator right; - - /** - * Join type ex. inner join, left join - */ - private final JoinType type; - - /** - * Joined columns in ON conditions - */ - private final JoinCondition condition; - - /** - * Block size calculator - */ - private final BlockSize blockSize; - - /** - * Bookkeeping unmatched rows in current block from left - */ - private final Set> leftMismatch; - - /** - * Hash table for right table probing - */ - protected HashTable hashTable; - - /** - * Execute params to reset right side for each left block - */ - protected ExecuteParams params; - - JoinAlgorithm(PhysicalOperator left, - PhysicalOperator right, - JoinType type, - JoinCondition condition, - BlockSize blockSize) { - this.left = left; - this.right = right; - this.type = type; - this.condition = condition; - this.blockSize = blockSize; - this.hashTable = new HashTableGroup<>(condition); - this.leftMismatch = Sets.newIdentityHashSet(); - } - - @Override - public PlanNode[] children() { - return new PlanNode[]{left, right}; - } + protected static final Logger LOG = LogManager.getLogger(); + + /** Left child operator */ + private final PhysicalOperator left; + + /** Right child operator handled by concrete join algorithm subclass */ + protected final PhysicalOperator right; + + /** Join type ex. inner join, left join */ + private final JoinType type; + + /** Joined columns in ON conditions */ + private final JoinCondition condition; + + /** Block size calculator */ + private final BlockSize blockSize; + + /** Bookkeeping unmatched rows in current block from left */ + private final Set> leftMismatch; + + /** Hash table for right table probing */ + protected HashTable hashTable; + + /** Execute params to reset right side for each left block */ + protected ExecuteParams params; + + JoinAlgorithm( + PhysicalOperator left, + PhysicalOperator right, + JoinType type, + JoinCondition condition, + BlockSize blockSize) { + this.left = left; + this.right = right; + this.type = type; + this.condition = condition; + this.blockSize = blockSize; + this.hashTable = new HashTableGroup<>(condition); + this.leftMismatch = Sets.newIdentityHashSet(); + } + + @Override + public PlanNode[] children() { + return new PlanNode[] {left, right}; + } + + @Override + public void open(ExecuteParams params) throws Exception { + super.open(params); + left.open(params); + this.params = params; + } + + @Override + public void close() { + super.close(); + hashTable.clear(); + leftMismatch.clear(); + LOG.debug("Cleared all resources used by join"); + } + + /** + * Build-probe left and right block by block to prefetch next matches (and mismatches if outer + * join). + * + *

1) Build hash table and open right side. 2) Keep probing right to find matched rows + * (meanwhile update mismatched set) 3) Check if any row in mismatched set to return in the case + * of outer join. 4) Nothing remained now, move on to next block of left. Go back to step 1. + * + *

This is a new run AND no block from left means algorithm should stop and return empty. + */ + @Override + protected Collection> prefetch() throws Exception { + while (!isNewRunButNoMoreBlockFromLeft()) { + + // 1.Build hash table and (re-)open right side for the new run + if (isNewRun()) { + buildHashTableByNextBlock(); + reopenRight(); + } + + // 2.Keep probing right by the hash table and bookkeeping mismatch + while (isAnyMoreDataFromRight()) { + Collection> matched = probeMatchAndBookkeepMismatch(); + if (!matched.isEmpty()) { + return matched; + } + } - @Override - public void open(ExecuteParams params) throws Exception { - super.open(params); - left.open(params); - this.params = params; - } + // 3.You know it's a mismatch only after this run finished (left block + all right). + if (isAnyMismatchForOuterJoin()) { + return returnAndClearMismatch(); + } - @Override - public void close() { - super.close(); - hashTable.clear(); - leftMismatch.clear(); - LOG.debug("Cleared all resources used by join"); + // 4.Clean up and close right + cleanUpAndCloseRight(); } + return emptyList(); + } - /** - * Build-probe left and right block by block to prefetch next matches (and mismatches if outer join). - *

- * 1) Build hash table and open right side. - * 2) Keep probing right to find matched rows (meanwhile update mismatched set) - * 3) Check if any row in mismatched set to return in the case of outer join. - * 4) Nothing remained now, move on to next block of left. Go back to step 1. - *

- * This is a new run AND no block from left means algorithm should stop and return empty. - */ - @Override - protected Collection> prefetch() throws Exception { - while (!isNewRunButNoMoreBlockFromLeft()) { - - // 1.Build hash table and (re-)open right side for the new run - if (isNewRun()) { - buildHashTableByNextBlock(); - reopenRight(); - } - - // 2.Keep probing right by the hash table and bookkeeping mismatch - while (isAnyMoreDataFromRight()) { - Collection> matched = probeMatchAndBookkeepMismatch(); - if (!matched.isEmpty()) { - return matched; - } - } - - // 3.You know it's a mismatch only after this run finished (left block + all right). - if (isAnyMismatchForOuterJoin()) { - return returnAndClearMismatch(); - } - - // 4.Clean up and close right - cleanUpAndCloseRight(); - } - return emptyList(); + /** Probe right by hash table built from left. Handle matched and mismatched rows. */ + private Collection> probeMatchAndBookkeepMismatch() { + if (hashTable.isEmpty()) { + throw new IllegalStateException("Hash table is NOT supposed to be empty"); } - /** - * Probe right by hash table built from left. Handle matched and mismatched rows. - */ - private Collection> probeMatchAndBookkeepMismatch() { - if (hashTable.isEmpty()) { - throw new IllegalStateException("Hash table is NOT supposed to be empty"); - } + List> combinedRows = probe(); - List> combinedRows = probe(); + List> matchRows = new ArrayList<>(); + if (combinedRows.isEmpty()) { + LOG.debug("No matched row found"); + } else { + if (LOG.isTraceEnabled()) { + combinedRows.forEach(row -> LOG.trace("Matched row before combined: {}", row)); + } - List> matchRows = new ArrayList<>(); - if (combinedRows.isEmpty()) { - LOG.debug("No matched row found"); - } else { - if (LOG.isTraceEnabled()) { - combinedRows.forEach(row -> LOG.trace("Matched row before combined: {}", row)); - } + for (CombinedRow row : combinedRows) { + matchRows.addAll(row.combine()); + } - for (CombinedRow row : combinedRows) { - matchRows.addAll(row.combine()); - } + if (LOG.isTraceEnabled()) { + matchRows.forEach(row -> LOG.trace("Matched row after combined: {}", row)); + } - if (LOG.isTraceEnabled()) { - matchRows.forEach(row -> LOG.trace("Matched row after combined: {}", row)); - } - - bookkeepMismatchedRows(combinedRows); - } - return matchRows; + bookkeepMismatchedRows(combinedRows); } - - private boolean isNewRunButNoMoreBlockFromLeft() { - return isNewRun() && !isAnyMoreBlockFromLeft(); + return matchRows; + } + + private boolean isNewRunButNoMoreBlockFromLeft() { + return isNewRun() && !isAnyMoreBlockFromLeft(); + } + + private boolean isNewRun() { + return hashTable.isEmpty(); + } + + private boolean isAnyMoreBlockFromLeft() { + return left.hasNext(); + } + + private boolean isAnyMoreDataFromRight() { + return right.hasNext(); + } + + private boolean isAnyMismatchForOuterJoin() { + return !leftMismatch.isEmpty(); + } + + /** Clone mismatch list and clear it so that we won't return it forever */ + @SuppressWarnings("unchecked") + private Collection> returnAndClearMismatch() { + if (LOG.isTraceEnabled()) { + leftMismatch.forEach(row -> LOG.trace("Mismatched rows before combined: {}", row)); } - private boolean isNewRun() { - return hashTable.isEmpty(); + List> result = new ArrayList<>(); + for (Row row : leftMismatch) { + result.add(row.combine(Row.NULL)); } - private boolean isAnyMoreBlockFromLeft() { - return left.hasNext(); + if (LOG.isTraceEnabled()) { + result.forEach(row -> LOG.trace("Mismatched rows after combined: {}", row)); } - - private boolean isAnyMoreDataFromRight() { - return right.hasNext(); + leftMismatch.clear(); + return result; + } + + /** Building phase: Build hash table from data block. */ + private void buildHashTableByNextBlock() { + List> block = loadNextBlockFromLeft(blockSize.size()); + if (LOG.isTraceEnabled()) { + LOG.trace("Build hash table on conditions with block: {}, {}", condition, block); } - private boolean isAnyMismatchForOuterJoin() { - return !leftMismatch.isEmpty(); + for (Row data : block) { + hashTable.add(data); } - /** - * Clone mismatch list and clear it so that we won't return it forever - */ - @SuppressWarnings("unchecked") - private Collection> returnAndClearMismatch() { - if (LOG.isTraceEnabled()) { - leftMismatch.forEach(row -> LOG.trace("Mismatched rows before combined: {}", row)); - } - - List> result = new ArrayList<>(); - for (Row row : leftMismatch) { - result.add(row.combine(Row.NULL)); - } - - if (LOG.isTraceEnabled()) { - result.forEach(row -> LOG.trace("Mismatched rows after combined: {}", row)); - } - leftMismatch.clear(); - return result; + if (type == JoinType.LEFT_OUTER_JOIN) { + leftMismatch.addAll(block); } - - /** - * Building phase: - * Build hash table from data block. - */ - private void buildHashTableByNextBlock() { - List> block = loadNextBlockFromLeft(blockSize.size()); - if (LOG.isTraceEnabled()) { - LOG.trace("Build hash table on conditions with block: {}, {}", condition, block); - } - - for (Row data : block) { - hashTable.add(data); - } - - if (type == JoinType.LEFT_OUTER_JOIN) { - leftMismatch.addAll(block); - } - } - - private void cleanUpAndCloseRight() { - LOG.debug("No more data from right. Clean up and close right."); - hashTable.clear(); - leftMismatch.clear(); - right.close(); - } - - private List> loadNextBlockFromLeft(int blockSize) { - List> block = new ArrayList<>(); - for (int i = 0; i < blockSize && left.hasNext(); i++) { - block.add(left.next()); - } - return block; + } + + private void cleanUpAndCloseRight() { + LOG.debug("No more data from right. Clean up and close right."); + hashTable.clear(); + leftMismatch.clear(); + right.close(); + } + + private List> loadNextBlockFromLeft(int blockSize) { + List> block = new ArrayList<>(); + for (int i = 0; i < blockSize && left.hasNext(); i++) { + block.add(left.next()); } - - private void bookkeepMismatchedRows(List> combinedRows) { - if (type == JoinType.LEFT_OUTER_JOIN) { - for (CombinedRow row : combinedRows) { - leftMismatch.removeAll(row.leftMatchedRows()); - } - } + return block; + } + + private void bookkeepMismatchedRows(List> combinedRows) { + if (type == JoinType.LEFT_OUTER_JOIN) { + for (CombinedRow row : combinedRows) { + leftMismatch.removeAll(row.leftMatchedRows()); + } } - - /** - * (Re-)open right side by params. - */ - protected abstract void reopenRight() throws Exception; - - - /** - * Probing phase - * - * @return matched rows from left and right in - */ - protected abstract List> probe(); - - - @Override - public String toString() { - return getClass().getSimpleName() + "[ conditions=" + condition - + ", type=" + type + ", blockSize=[" + blockSize + "] ]"; - } - + } + + /** (Re-)open right side by params. */ + protected abstract void reopenRight() throws Exception; + + /** + * Probing phase + * + * @return matched rows from left and right in + */ + protected abstract List> probe(); + + @Override + public String toString() { + return getClass().getSimpleName() + + "[ conditions=" + + condition + + ", type=" + + type + + ", blockSize=[" + + blockSize + + "] ]"; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/ListHashTable.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/ListHashTable.java index 5d39529632..baf0af8c86 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/ListHashTable.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/join/ListHashTable.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.node.join; import java.util.ArrayList; @@ -13,41 +12,39 @@ import java.util.Map; import org.opensearch.sql.legacy.query.planner.physical.Row; -/** - * List implementation to avoid normal hash table degrading into linked list. - */ +/** List implementation to avoid normal hash table degrading into linked list. */ public class ListHashTable implements HashTable { - private List> rows = new ArrayList<>(); - - @Override - public void add(Row row) { - rows.add(row); - } - - @Override - public Collection> match(Row row) { - return rows; - } - - @SuppressWarnings("unchecked") - @Override - public Map>[] rightFieldWithLeftValues() { - return new Map[]{new HashMap()}; - } - - @Override - public int size() { - return rows.size(); - } - - @Override - public boolean isEmpty() { - return rows.isEmpty(); - } - - @Override - public void clear() { - rows.clear(); - } + private List> rows = new ArrayList<>(); + + @Override + public void add(Row row) { + rows.add(row); + } + + @Override + public Collection> match(Row row) { + return rows; + } + + @SuppressWarnings("unchecked") + @Override + public Map>[] rightFieldWithLeftValues() { + return new Map[] {new HashMap()}; + } + + @Override + public int size() { + return rows.size(); + } + + @Override + public boolean isEmpty() { + return rows.isEmpty(); + } + + @Override + public void clear() { + rows.clear(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/project/PhysicalProject.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/project/PhysicalProject.java index 9c4bdc5c9e..e09ef5c3fe 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/project/PhysicalProject.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/project/PhysicalProject.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.node.project; import java.util.List; @@ -16,34 +15,34 @@ import org.opensearch.sql.legacy.query.planner.physical.estimation.Cost; import org.opensearch.sql.legacy.query.planner.physical.node.scroll.BindingTupleRow; -/** - * The definition of Project Operator. - */ +/** The definition of Project Operator. */ @RequiredArgsConstructor public class PhysicalProject implements PhysicalOperator { - private final PhysicalOperator next; - private final List fields; - - @Override - public Cost estimate() { - return null; - } - - @Override - public PlanNode[] children() { - return new PlanNode[]{next}; - } - - @Override - public boolean hasNext() { - return next.hasNext(); - } - - @Override - public Row next() { - BindingTuple input = next.next().data(); - BindingTuple.BindingTupleBuilder outputBindingTupleBuilder = BindingTuple.builder(); - fields.forEach(field -> outputBindingTupleBuilder.binding(field.getName(), field.getExpr().valueOf(input))); - return new BindingTupleRow(outputBindingTupleBuilder.build()); - } + private final PhysicalOperator next; + private final List fields; + + @Override + public Cost estimate() { + return null; + } + + @Override + public PlanNode[] children() { + return new PlanNode[] {next}; + } + + @Override + public boolean hasNext() { + return next.hasNext(); + } + + @Override + public Row next() { + BindingTuple input = next.next().data(); + BindingTuple.BindingTupleBuilder outputBindingTupleBuilder = BindingTuple.builder(); + fields.forEach( + field -> + outputBindingTupleBuilder.binding(field.getName(), field.getExpr().valueOf(input))); + return new BindingTupleRow(outputBindingTupleBuilder.build()); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/BindingTupleRow.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/BindingTupleRow.java index 9e3a190e30..41f500fed1 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/BindingTupleRow.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/BindingTupleRow.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.node.scroll; import java.util.Map; @@ -13,25 +12,25 @@ @RequiredArgsConstructor public class BindingTupleRow implements Row { - private final BindingTuple bindingTuple; - - @Override - public RowKey key(String[] colNames) { - return null; - } - - @Override - public Row combine(Row otherRow) { - throw new RuntimeException("unsupported operation"); - } - - @Override - public void retain(Map colNameAlias) { - // do nothing - } - - @Override - public BindingTuple data() { - return bindingTuple; - } + private final BindingTuple bindingTuple; + + @Override + public RowKey key(String[] colNames) { + return null; + } + + @Override + public Row combine(Row otherRow) { + throw new RuntimeException("unsupported operation"); + } + + @Override + public void retain(Map colNameAlias) { + // do nothing + } + + @Override + public BindingTuple data() { + return bindingTuple; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/PhysicalScroll.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/PhysicalScroll.java index 8866420218..16ad327a87 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/PhysicalScroll.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/PhysicalScroll.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.node.scroll; import java.util.Iterator; @@ -21,54 +20,53 @@ import org.opensearch.sql.legacy.query.planner.physical.Row; import org.opensearch.sql.legacy.query.planner.physical.estimation.Cost; -/** - * The definition of Scroll Operator. - */ +/** The definition of Scroll Operator. */ @RequiredArgsConstructor public class PhysicalScroll implements PhysicalOperator { - private final QueryAction queryAction; + private final QueryAction queryAction; - private Iterator rowIterator; + private Iterator rowIterator; - @Override - public Cost estimate() { - return null; - } + @Override + public Cost estimate() { + return null; + } - @Override - public PlanNode[] children() { - return new PlanNode[0]; - } + @Override + public PlanNode[] children() { + return new PlanNode[0]; + } - @Override - public boolean hasNext() { - return rowIterator.hasNext(); - } + @Override + public boolean hasNext() { + return rowIterator.hasNext(); + } - @Override - public Row next() { - return rowIterator.next(); - } + @Override + public Row next() { + return rowIterator.next(); + } - @Override - public void open(ExecuteParams params) { - try { - ActionResponse response = queryAction.explain().get(); - if (queryAction instanceof AggregationQueryAction) { - rowIterator = SearchAggregationResponseHelper - .populateSearchAggregationResponse(((SearchResponse) response).getAggregations()) - .iterator(); - } else { - throw new IllegalStateException("Not support QueryAction type: " + queryAction.getClass()); - } - } catch (SqlParseException e) { - throw new RuntimeException(e); - } + @Override + public void open(ExecuteParams params) { + try { + ActionResponse response = queryAction.explain().get(); + if (queryAction instanceof AggregationQueryAction) { + rowIterator = + SearchAggregationResponseHelper.populateSearchAggregationResponse( + ((SearchResponse) response).getAggregations()) + .iterator(); + } else { + throw new IllegalStateException("Not support QueryAction type: " + queryAction.getClass()); + } + } catch (SqlParseException e) { + throw new RuntimeException(e); } + } - @SneakyThrows - @Override - public String toString() { - return queryAction.explain().toString(); - } + @SneakyThrows + @Override + public String toString() { + return queryAction.explain().toString(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/Scroll.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/Scroll.java index 05d538d320..40e9860886 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/Scroll.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/Scroll.java @@ -3,13 +3,11 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.node.scroll; import java.util.Arrays; import java.util.Collection; import java.util.Objects; - import org.opensearch.action.search.ClearScrollResponse; import org.opensearch.action.search.SearchResponse; import org.opensearch.client.Client; @@ -32,170 +30,160 @@ import org.opensearch.sql.legacy.query.planner.physical.node.BatchPhysicalOperator; import org.opensearch.sql.legacy.query.planner.resource.ResourceManager; -/** - * OpenSearch Scroll API as physical implementation of TableScan - */ +/** OpenSearch Scroll API as physical implementation of TableScan */ public class Scroll extends BatchPhysicalOperator { - /** - * Request to submit to OpenSearch to scroll over - */ - private final TableInJoinRequestBuilder request; - - /** - * Page size to scroll over index - */ - private final int pageSize; - - /** - * Client connection to ElasticSearch - */ - private Client client; - - /** - * Currently undergoing Scroll - */ - private SearchResponse scrollResponse; - - /** - * Time out - */ - private Integer timeout; - - /** - * Resource monitor manager - */ - private ResourceManager resourceMgr; - - - public Scroll(TableInJoinRequestBuilder request, int pageSize) { - this.request = request; - this.pageSize = pageSize; - } - - @Override - public PlanNode[] children() { - return new PlanNode[0]; - } - - @Override - public Cost estimate() { - return new Cost(); - } - - @Override - public void open(ExecuteParams params) throws Exception { - super.open(params); - client = params.get(ExecuteParams.ExecuteParamType.CLIENT); - timeout = params.get(ExecuteParams.ExecuteParamType.TIMEOUT); - resourceMgr = params.get(ExecuteParams.ExecuteParamType.RESOURCE_MANAGER); - - Object filter = params.get(ExecuteParams.ExecuteParamType.EXTRA_QUERY_FILTER); - if (filter instanceof BoolQueryBuilder) { - request.getRequestBuilder().setQuery( - generateNewQueryWithExtraFilter((BoolQueryBuilder) filter)); - - if (LOG.isDebugEnabled()) { - LOG.debug("Received extra query filter, re-build query: {}", Strings.toString(XContentType.JSON, - request.getRequestBuilder().request().source(), true, true - )); - } - } + /** Request to submit to OpenSearch to scroll over */ + private final TableInJoinRequestBuilder request; + + /** Page size to scroll over index */ + private final int pageSize; + + /** Client connection to ElasticSearch */ + private Client client; + + /** Currently undergoing Scroll */ + private SearchResponse scrollResponse; + + /** Time out */ + private Integer timeout; + + /** Resource monitor manager */ + private ResourceManager resourceMgr; + + public Scroll(TableInJoinRequestBuilder request, int pageSize) { + this.request = request; + this.pageSize = pageSize; + } + + @Override + public PlanNode[] children() { + return new PlanNode[0]; + } + + @Override + public Cost estimate() { + return new Cost(); + } + + @Override + public void open(ExecuteParams params) throws Exception { + super.open(params); + client = params.get(ExecuteParams.ExecuteParamType.CLIENT); + timeout = params.get(ExecuteParams.ExecuteParamType.TIMEOUT); + resourceMgr = params.get(ExecuteParams.ExecuteParamType.RESOURCE_MANAGER); + + Object filter = params.get(ExecuteParams.ExecuteParamType.EXTRA_QUERY_FILTER); + if (filter instanceof BoolQueryBuilder) { + request + .getRequestBuilder() + .setQuery(generateNewQueryWithExtraFilter((BoolQueryBuilder) filter)); + + if (LOG.isDebugEnabled()) { + LOG.debug( + "Received extra query filter, re-build query: {}", + Strings.toString( + XContentType.JSON, request.getRequestBuilder().request().source(), true, true)); + } } - - @Override - public void close() { - if (scrollResponse != null) { - LOG.debug("Closing all scroll resources"); - ClearScrollResponse clearScrollResponse = client.prepareClearScroll(). - addScrollId(scrollResponse.getScrollId()). - get(); - if (!clearScrollResponse.isSucceeded()) { - LOG.warn("Failed to close scroll: {}", clearScrollResponse.status()); - } - scrollResponse = null; - } else { - LOG.debug("Scroll already be closed"); - } + } + + @Override + public void close() { + if (scrollResponse != null) { + LOG.debug("Closing all scroll resources"); + ClearScrollResponse clearScrollResponse = + client.prepareClearScroll().addScrollId(scrollResponse.getScrollId()).get(); + if (!clearScrollResponse.isSucceeded()) { + LOG.warn("Failed to close scroll: {}", clearScrollResponse.status()); + } + scrollResponse = null; + } else { + LOG.debug("Scroll already be closed"); } - - @Override - protected Collection> prefetch() { - Objects.requireNonNull(client, "Client connection is not ready"); - Objects.requireNonNull(resourceMgr, "ResourceManager is not set"); - Objects.requireNonNull(timeout, "Time out is not set"); - - if (scrollResponse == null) { - loadFirstBatch(); - updateMetaResult(); - } else { - loadNextBatchByScrollId(); - } - return wrapRowForCurrentBatch(); + } + + @Override + protected Collection> prefetch() { + Objects.requireNonNull(client, "Client connection is not ready"); + Objects.requireNonNull(resourceMgr, "ResourceManager is not set"); + Objects.requireNonNull(timeout, "Time out is not set"); + + if (scrollResponse == null) { + loadFirstBatch(); + updateMetaResult(); + } else { + loadNextBatchByScrollId(); } - - /** - * Extra filter pushed down from upstream. Re-parse WHERE clause with extra filter - * because OpenSearch RequestBuilder doesn't allow QueryBuilder inside be changed after added. - */ - private QueryBuilder generateNewQueryWithExtraFilter(BoolQueryBuilder filter) throws SqlParseException { - Where where = request.getOriginalSelect().getWhere(); - BoolQueryBuilder newQuery; - if (where != null) { - newQuery = QueryMaker.explain(where, false); - newQuery.must(filter); - } else { - newQuery = filter; - } - return newQuery; + return wrapRowForCurrentBatch(); + } + + /** + * Extra filter pushed down from upstream. Re-parse WHERE clause with extra filter because + * OpenSearch RequestBuilder doesn't allow QueryBuilder inside be changed after added. + */ + private QueryBuilder generateNewQueryWithExtraFilter(BoolQueryBuilder filter) + throws SqlParseException { + Where where = request.getOriginalSelect().getWhere(); + BoolQueryBuilder newQuery; + if (where != null) { + newQuery = QueryMaker.explain(where, false); + newQuery.must(filter); + } else { + newQuery = filter; } - - private void loadFirstBatch() { - scrollResponse = request.getRequestBuilder(). - addSort(FieldSortBuilder.DOC_FIELD_NAME, SortOrder.ASC). - setSize(pageSize). - setScroll(TimeValue.timeValueSeconds(timeout)). - get(); + return newQuery; + } + + private void loadFirstBatch() { + scrollResponse = + request + .getRequestBuilder() + .addSort(FieldSortBuilder.DOC_FIELD_NAME, SortOrder.ASC) + .setSize(pageSize) + .setScroll(TimeValue.timeValueSeconds(timeout)) + .get(); + } + + private void updateMetaResult() { + resourceMgr.getMetaResult().addTotalNumOfShards(scrollResponse.getTotalShards()); + resourceMgr.getMetaResult().addSuccessfulShards(scrollResponse.getSuccessfulShards()); + resourceMgr.getMetaResult().addFailedShards(scrollResponse.getFailedShards()); + resourceMgr.getMetaResult().updateTimeOut(scrollResponse.isTimedOut()); + } + + private void loadNextBatchByScrollId() { + scrollResponse = + client + .prepareSearchScroll(scrollResponse.getScrollId()) + .setScroll(TimeValue.timeValueSeconds(timeout)) + .get(); + } + + @SuppressWarnings("unchecked") + private Collection> wrapRowForCurrentBatch() { + SearchHit[] hits = scrollResponse.getHits().getHits(); + Row[] rows = new Row[hits.length]; + for (int i = 0; i < hits.length; i++) { + rows[i] = new SearchHitRow(hits[i], request.getAlias()); } + return Arrays.asList(rows); + } - private void updateMetaResult() { - resourceMgr.getMetaResult().addTotalNumOfShards(scrollResponse.getTotalShards()); - resourceMgr.getMetaResult().addSuccessfulShards(scrollResponse.getSuccessfulShards()); - resourceMgr.getMetaResult().addFailedShards(scrollResponse.getFailedShards()); - resourceMgr.getMetaResult().updateTimeOut(scrollResponse.isTimedOut()); - } + @Override + public String toString() { + return "Scroll [ " + describeTable() + ", pageSize=" + pageSize + " ]"; + } - private void loadNextBatchByScrollId() { - scrollResponse = client.prepareSearchScroll(scrollResponse.getScrollId()). - setScroll(TimeValue.timeValueSeconds(timeout)). - get(); - } + private String describeTable() { + return request.getOriginalSelect().getFrom().get(0).getIndex() + " as " + request.getAlias(); + } - @SuppressWarnings("unchecked") - private Collection> wrapRowForCurrentBatch() { - SearchHit[] hits = scrollResponse.getHits().getHits(); - Row[] rows = new Row[hits.length]; - for (int i = 0; i < hits.length; i++) { - rows[i] = new SearchHitRow(hits[i], request.getAlias()); - } - return Arrays.asList(rows); - } + /********************************************* + * Getters for Explain + *********************************************/ - @Override - public String toString() { - return "Scroll [ " + describeTable() + ", pageSize=" + pageSize + " ]"; - } - - private String describeTable() { - return request.getOriginalSelect().getFrom().get(0).getIndex() + " as " + request.getAlias(); - } - - - /********************************************* - * Getters for Explain - *********************************************/ - - public String getRequest() { - return Strings.toString(XContentType.JSON, request.getRequestBuilder().request().source()); - } + public String getRequest() { + return Strings.toString(XContentType.JSON, request.getRequestBuilder().request().source()); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/SearchAggregationResponseHelper.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/SearchAggregationResponseHelper.java index 5e0ce1f2b4..ed0e0f2423 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/SearchAggregationResponseHelper.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/SearchAggregationResponseHelper.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.node.scroll; import com.google.common.annotations.VisibleForTesting; @@ -22,70 +21,82 @@ import org.opensearch.search.aggregations.metrics.Percentiles; import org.opensearch.sql.legacy.expression.domain.BindingTuple; -/** - * The definition of Search {@link Aggregations} parser helper class. - */ +/** The definition of Search {@link Aggregations} parser helper class. */ public class SearchAggregationResponseHelper { - public static List populateSearchAggregationResponse(Aggregations aggs) { - List> flatten = flatten(aggs); - List bindingTupleList = flatten.stream() - .map(BindingTuple::from) - .map(bindingTuple -> new BindingTupleRow(bindingTuple)) - .collect(Collectors.toList()); - return bindingTupleList; - } + public static List populateSearchAggregationResponse(Aggregations aggs) { + List> flatten = flatten(aggs); + List bindingTupleList = + flatten.stream() + .map(BindingTuple::from) + .map(bindingTuple -> new BindingTupleRow(bindingTuple)) + .collect(Collectors.toList()); + return bindingTupleList; + } - @VisibleForTesting - public static List> flatten(Aggregations aggregations) { - List aggregationList = aggregations.asList(); - List> resultList = new ArrayList<>(); - Map resultMap = new HashMap<>(); - for (Aggregation aggregation : aggregationList) { - if (aggregation instanceof Terms) { - for (Terms.Bucket bucket : ((Terms) aggregation).getBuckets()) { - List> internalBucketList = flatten(bucket.getAggregations()); - fillResultListWithInternalBucket(resultList, internalBucketList, aggregation.getName(), - bucket.getKey()); - } - } else if (aggregation instanceof NumericMetricsAggregation.SingleValue) { - resultMap.put(aggregation.getName(), ((NumericMetricsAggregation.SingleValue) aggregation).value()); - } else if (aggregation instanceof Percentiles) { - Percentiles percentiles = (Percentiles) aggregation; - resultMap.putAll((Map) StreamSupport.stream(percentiles.spliterator(), false) - .collect(Collectors.toMap( - (percentile) -> String.format("%s_%s", percentiles.getName(), percentile.getPercent()), - Percentile::getValue, (v1, v2) -> { - throw new IllegalArgumentException( - String.format("Duplicate key for values %s and %s", v1, v2)); - }, HashMap::new))); - } else if (aggregation instanceof Histogram) { - for (Histogram.Bucket bucket : ((Histogram) aggregation).getBuckets()) { - List> internalBucketList = flatten(bucket.getAggregations()); - fillResultListWithInternalBucket(resultList, internalBucketList, aggregation.getName(), - bucket.getKeyAsString()); - } - } else { - throw new RuntimeException("unsupported aggregation type " + aggregation.getType()); - } + @VisibleForTesting + public static List> flatten(Aggregations aggregations) { + List aggregationList = aggregations.asList(); + List> resultList = new ArrayList<>(); + Map resultMap = new HashMap<>(); + for (Aggregation aggregation : aggregationList) { + if (aggregation instanceof Terms) { + for (Terms.Bucket bucket : ((Terms) aggregation).getBuckets()) { + List> internalBucketList = flatten(bucket.getAggregations()); + fillResultListWithInternalBucket( + resultList, internalBucketList, aggregation.getName(), bucket.getKey()); } - if (!resultMap.isEmpty()) { - resultList.add(resultMap); + } else if (aggregation instanceof NumericMetricsAggregation.SingleValue) { + resultMap.put( + aggregation.getName(), ((NumericMetricsAggregation.SingleValue) aggregation).value()); + } else if (aggregation instanceof Percentiles) { + Percentiles percentiles = (Percentiles) aggregation; + resultMap.putAll( + (Map) + StreamSupport.stream(percentiles.spliterator(), false) + .collect( + Collectors.toMap( + (percentile) -> + String.format( + "%s_%s", percentiles.getName(), percentile.getPercent()), + Percentile::getValue, + (v1, v2) -> { + throw new IllegalArgumentException( + String.format("Duplicate key for values %s and %s", v1, v2)); + }, + HashMap::new))); + } else if (aggregation instanceof Histogram) { + for (Histogram.Bucket bucket : ((Histogram) aggregation).getBuckets()) { + List> internalBucketList = flatten(bucket.getAggregations()); + fillResultListWithInternalBucket( + resultList, internalBucketList, aggregation.getName(), bucket.getKeyAsString()); } - return resultList; + } else { + throw new RuntimeException("unsupported aggregation type " + aggregation.getType()); + } + } + if (!resultMap.isEmpty()) { + resultList.add(resultMap); } + return resultList; + } - private static void fillResultListWithInternalBucket(List> resultList, - List> internalBucketList, - String aggregationName, Object bucketKey) { - if (internalBucketList.isEmpty()) { - resultList.add(new HashMap() {{ - put(aggregationName, bucketKey); - }}); - } else { - for (Map map : internalBucketList) { - map.put(aggregationName, bucketKey); + private static void fillResultListWithInternalBucket( + List> resultList, + List> internalBucketList, + String aggregationName, + Object bucketKey) { + if (internalBucketList.isEmpty()) { + resultList.add( + new HashMap() { + { + put(aggregationName, bucketKey); } - resultList.addAll(internalBucketList); - } + }); + } else { + for (Map map : internalBucketList) { + map.put(aggregationName, bucketKey); + } + resultList.addAll(internalBucketList); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/SearchHitRow.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/SearchHitRow.java index 27e3072bab..a859a7c88c 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/SearchHitRow.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/SearchHitRow.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.node.scroll; import com.google.common.base.Strings; @@ -15,181 +14,172 @@ import org.opensearch.sql.legacy.query.planner.physical.Row; /** - * Search hit row that implements basic accessor for SearchHit. - * Encapsulate all OpenSearch specific knowledge: how to parse source including nested path. - *

- * State transition: - * for example, SELECT e.name.first AS firstName, e.age AS age FROM E e JOIN D d ON ... ORDER BY ... - *

- * Stage | hit.source | tableAlias | Passed in args + * Search hit row that implements basic accessor for SearchHit. Encapsulate all OpenSearch specific + * knowledge: how to parse source including nested path. + * + *

State transition: for example, SELECT e.name.first AS firstName, e.age AS age FROM E e JOIN D + * d ON ... ORDER BY ... + * + *

Stage | hit.source | tableAlias | Passed in args * ---------------------------------------------------------------------------------------------------------------------- - * new in Scroll | {"name":{"first": "Allen", "last": "Hank"}, "age": 30} | "e"| new(SearchHit, "e") + * new in Scroll | {"name":{"first": "Allen", "last": "Hank"}, "age": 30} | "e"| new(SearchHit, "e") * ---------------------------------------------------------------------------------------------------------------------- - * key()/combine() | | | key("name.first", "age") - * in JoinAlgorithm | {"e.name": {...}, "e.age": 30, "d..." } (after combined) | "" | combine(row of D) + * key()/combine() | | | key("name.first", "age") in JoinAlgorithm | {"e.name": {...}, "e.age": 30, + * "d..." } (after combined) | "" | combine(row of D) * ---------------------------------------------------------------------------------------------------------------------- - * key() in XXSort | same | "" | key("e.name.first", "e.age") + * key() in XXSort | same | "" | key("e.name.first", "e.age") * ---------------------------------------------------------------------------------------------------------------------- - * retain() in Project | {"firstName": "Allen", "age": 30 } | "" | retain("e.name.first", "e.age") + * retain() in Project | {"firstName": "Allen", "age": 30 } | "" | retain("e.name.first", "e.age") * ---------------------------------------------------------------------------------------------------------------------- */ class SearchHitRow implements Row { - /** - * Native OpenSearch data object for each row - */ - private final SearchHit hit; - - /** - * Column and value pairs - */ - private final Map source; - - /** - * Table alias owned the row. Empty if this row comes from combination of two other rows - */ - private final String tableAlias; + /** Native OpenSearch data object for each row */ + private final SearchHit hit; - SearchHitRow(SearchHit hit, String tableAlias) { - this.hit = hit; - this.source = hit.getSourceAsMap(); - this.tableAlias = tableAlias; - } + /** Column and value pairs */ + private final Map source; - @Override - public RowKey key(String[] colNames) { - if (colNames.length == 0) { - return RowKey.NULL; - } - - Object[] keys = new Object[colNames.length]; - for (int i = 0; i < colNames.length; i++) { - keys[i] = getValueOfPath(colNames[i]); - - if (keys[i] == null) { - return RowKey.NULL; - } - } - return new RowKey(keys); - } + /** Table alias owned the row. Empty if this row comes from combination of two other rows */ + private final String tableAlias; - /** - * Replace column name by full name to avoid naming conflicts. - * For efficiency, this only happens here when matched rows found. - * Create a new one to avoid mutating the original ones in hash table which impact subsequent match. - */ - @Override - public Row combine(Row other) { - SearchHit combined = cloneHit(other); - - collectFullName(combined.getSourceAsMap(), this); - if (other != NULL) { - collectFullName(combined.getSourceAsMap(), (SearchHitRow) other); - } - return new SearchHitRow(combined, ""); - } + SearchHitRow(SearchHit hit, String tableAlias) { + this.hit = hit; + this.source = hit.getSourceAsMap(); + this.tableAlias = tableAlias; + } - @Override - public void retain(Map colNameAlias) { - Map aliasSource = new HashMap<>(); - colNameAlias.forEach((colName, alias) -> { - if (colName.endsWith(".*")) { - String tableAlias = colName.substring(0, colName.length() - 2) + "."; - retainAllFieldsFromTable(aliasSource, tableAlias); - } else { - retainOneField(aliasSource, colName, alias); - } - }); - resetSource(aliasSource); + @Override + public RowKey key(String[] colNames) { + if (colNames.length == 0) { + return RowKey.NULL; } - @Override - public SearchHit data() { - return hit; - } + Object[] keys = new Object[colNames.length]; + for (int i = 0; i < colNames.length; i++) { + keys[i] = getValueOfPath(colNames[i]); - @Override - public String toString() { - return "SearchHitRow{" + "hit=" + source + '}'; + if (keys[i] == null) { + return RowKey.NULL; + } } - - private Object getValueOfPath(String path) { - /* - * If table alias is missing which means the row was generated by combine(). - * In this case, table alias is present and the first dot should be ignored, ex. "e.name.first" - */ - return getValueOfPath(source, path, Strings.isNullOrEmpty(tableAlias)); + return new RowKey(keys); + } + + /** + * Replace column name by full name to avoid naming conflicts. For efficiency, this only happens + * here when matched rows found. Create a new one to avoid mutating the original ones in hash + * table which impact subsequent match. + */ + @Override + public Row combine(Row other) { + SearchHit combined = cloneHit(other); + + collectFullName(combined.getSourceAsMap(), this); + if (other != NULL) { + collectFullName(combined.getSourceAsMap(), (SearchHitRow) other); } - - /** - * Recursively get value for field name path, such as object field a.b.c + return new SearchHitRow(combined, ""); + } + + @Override + public void retain(Map colNameAlias) { + Map aliasSource = new HashMap<>(); + colNameAlias.forEach( + (colName, alias) -> { + if (colName.endsWith(".*")) { + String tableAlias = colName.substring(0, colName.length() - 2) + "."; + retainAllFieldsFromTable(aliasSource, tableAlias); + } else { + retainOneField(aliasSource, colName, alias); + } + }); + resetSource(aliasSource); + } + + @Override + public SearchHit data() { + return hit; + } + + @Override + public String toString() { + return "SearchHitRow{" + "hit=" + source + '}'; + } + + private Object getValueOfPath(String path) { + /* + * If table alias is missing which means the row was generated by combine(). + * In this case, table alias is present and the first dot should be ignored, ex. "e.name.first" */ - private Object getValueOfPath(Object source, String path, boolean isIgnoreFirstDot) { - if (!(source instanceof Map) || path.isEmpty()) { - return source; - } - - int dot = path.indexOf('.', (isIgnoreFirstDot ? path.indexOf('.') + 1 : 0)); - if (dot == -1) { - return ((Map) source).get(path); - } - - // Object field name maybe unexpanded without recursive object structure - // ex. {"a.b.c": value} instead of {"a": {"b": {"c": value}}}} - if (((Map) source).containsKey(path)) { - return ((Map) source).get(path); - } - - return getValueOfPath( - ((Map) source).get(path.substring(0, dot)), - path.substring(dot + 1), - false - ); - } + return getValueOfPath(source, path, Strings.isNullOrEmpty(tableAlias)); + } - private SearchHit cloneHit(Row other) { - Map documentFields = new HashMap<>(); - Map metaFields = new HashMap<>(); - hit.getFields().forEach((fieldName, docField) -> - (MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) ? metaFields : documentFields).put(fieldName, docField)); - SearchHit combined = new SearchHit( - hit.docId(), - hit.getId() + "|" + (other == NULL ? "0" : ((SearchHitRow) other).hit.getId()), - documentFields, - metaFields - ); - combined.sourceRef(hit.getSourceRef()); - combined.getSourceAsMap().clear(); - return combined; + /** Recursively get value for field name path, such as object field a.b.c */ + private Object getValueOfPath(Object source, String path, boolean isIgnoreFirstDot) { + if (!(source instanceof Map) || path.isEmpty()) { + return source; } - private void collectFullName(Map newSource, SearchHitRow row) { - row.source.forEach((colName, value) -> newSource.put(row.tableAlias + "." + colName, value)); + int dot = path.indexOf('.', (isIgnoreFirstDot ? path.indexOf('.') + 1 : 0)); + if (dot == -1) { + return ((Map) source).get(path); } - private void retainAllFieldsFromTable(Map aliasSource, String tableAlias) { - source.entrySet(). - stream(). - filter(e -> e.getKey().startsWith(tableAlias)). - forEach(e -> aliasSource.put(e.getKey(), e.getValue())); + // Object field name maybe unexpanded without recursive object structure + // ex. {"a.b.c": value} instead of {"a": {"b": {"c": value}}}} + if (((Map) source).containsKey(path)) { + return ((Map) source).get(path); } - /** - * Note that column here is already prefixed by table alias after combine(). - *

- * Meanwhile check if column name with table alias prefix, ex. a.name, is property, namely a.name.lastname. - * In this case, split by first second dot and continue searching for the final value in nested map - * by getValueOfPath(source.get("a.name"), "lastname") - */ - private void retainOneField(Map aliasSource, String colName, String alias) { - aliasSource.put( - Strings.isNullOrEmpty(alias) ? colName : alias, - getValueOfPath(colName) - ); - } - - private void resetSource(Map newSource) { - source.clear(); - source.putAll(newSource); - } + return getValueOfPath( + ((Map) source).get(path.substring(0, dot)), path.substring(dot + 1), false); + } + + private SearchHit cloneHit(Row other) { + Map documentFields = new HashMap<>(); + Map metaFields = new HashMap<>(); + hit.getFields() + .forEach( + (fieldName, docField) -> + (MapperService.META_FIELDS_BEFORE_7DOT8.contains(fieldName) + ? metaFields + : documentFields) + .put(fieldName, docField)); + SearchHit combined = + new SearchHit( + hit.docId(), + hit.getId() + "|" + (other == NULL ? "0" : ((SearchHitRow) other).hit.getId()), + documentFields, + metaFields); + combined.sourceRef(hit.getSourceRef()); + combined.getSourceAsMap().clear(); + return combined; + } + + private void collectFullName(Map newSource, SearchHitRow row) { + row.source.forEach((colName, value) -> newSource.put(row.tableAlias + "." + colName, value)); + } + + private void retainAllFieldsFromTable(Map aliasSource, String tableAlias) { + source.entrySet().stream() + .filter(e -> e.getKey().startsWith(tableAlias)) + .forEach(e -> aliasSource.put(e.getKey(), e.getValue())); + } + + /** + * Note that column here is already prefixed by table alias after combine(). + * + *

Meanwhile check if column name with table alias prefix, ex. a.name, is property, namely + * a.name.lastname. In this case, split by first second dot and continue searching for the final + * value in nested map by getValueOfPath(source.get("a.name"), "lastname") + */ + private void retainOneField(Map aliasSource, String colName, String alias) { + aliasSource.put(Strings.isNullOrEmpty(alias) ? colName : alias, getValueOfPath(colName)); + } + + private void resetSource(Map newSource) { + source.clear(); + source.putAll(newSource); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/sort/QuickSort.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/sort/QuickSort.java index 90ae595d56..abfcf273ad 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/sort/QuickSort.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/physical/node/sort/QuickSort.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.physical.node.sort; import static java.util.Collections.emptyList; @@ -23,83 +22,80 @@ import org.opensearch.sql.legacy.query.planner.physical.node.BatchPhysicalOperator; /** - * Physical operator to sort by quick sort implementation in JDK. - * Note that this is all in-memory operator which may be a problem for large index. + * Physical operator to sort by quick sort implementation in JDK. Note that this is all in-memory + * operator which may be a problem for large index. * * @param actual data type, ex.SearchHit */ public class QuickSort extends BatchPhysicalOperator { - private static final Logger LOG = LogManager.getLogger(); + private static final Logger LOG = LogManager.getLogger(); - private final PhysicalOperator next; + private final PhysicalOperator next; - /** - * Column name list in ORDER BY - */ - private final String[] orderByColNames; + /** Column name list in ORDER BY */ + private final String[] orderByColNames; - /** - * Order by type, ex. ASC, DESC - */ - private final String orderByType; + /** Order by type, ex. ASC, DESC */ + private final String orderByType; - private boolean isDone = false; + private boolean isDone = false; - public QuickSort(PhysicalOperator next, List orderByColNames, String orderByType) { - this.next = next; - this.orderByColNames = orderByColNames.toArray(new String[0]); - this.orderByType = orderByType; - } + public QuickSort(PhysicalOperator next, List orderByColNames, String orderByType) { + this.next = next; + this.orderByColNames = orderByColNames.toArray(new String[0]); + this.orderByType = orderByType; + } - @Override - public PlanNode[] children() { - return new PlanNode[]{next}; - } + @Override + public PlanNode[] children() { + return new PlanNode[] {next}; + } - @Override - public Cost estimate() { - return new Cost(); - } + @Override + public Cost estimate() { + return new Cost(); + } - @Override - public void open(ExecuteParams params) throws Exception { - super.open(params); - next.open(params); - } + @Override + public void open(ExecuteParams params) throws Exception { + super.open(params); + next.open(params); + } - /** - * Only load all data once and return one batch - */ - @Override - protected Collection> prefetch() { - if (isDone) { - return emptyList(); - } - - List> allRowsSorted = new ArrayList<>(); - next.forEachRemaining(allRowsSorted::add); - allRowsSorted.sort(createRowComparator()); - - if (LOG.isTraceEnabled()) { - LOG.trace("All rows being sorted in RB-Tree: {}", allRowsSorted); - } - - isDone = true; - return allRowsSorted; + /** Only load all data once and return one batch */ + @Override + protected Collection> prefetch() { + if (isDone) { + return emptyList(); } - private Comparator> createRowComparator() { - Comparator> comparator = Comparator.comparing(o -> o.key(orderByColNames)); - if ("DESC".equals(orderByType)) { - comparator = comparator.reversed(); - } - return comparator; - } + List> allRowsSorted = new ArrayList<>(); + next.forEachRemaining(allRowsSorted::add); + allRowsSorted.sort(createRowComparator()); - @Override - public String toString() { - return "QuickSort [ columns=" + Arrays.toString(orderByColNames) + ", order=" + orderByType + " ]"; + if (LOG.isTraceEnabled()) { + LOG.trace("All rows being sorted in RB-Tree: {}", allRowsSorted); } + isDone = true; + return allRowsSorted; + } + + private Comparator> createRowComparator() { + Comparator> comparator = Comparator.comparing(o -> o.key(orderByColNames)); + if ("DESC".equals(orderByType)) { + comparator = comparator.reversed(); + } + return comparator; + } + + @Override + public String toString() { + return "QuickSort [ columns=" + + Arrays.toString(orderByColNames) + + ", order=" + + orderByType + + " ]"; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/ResourceManager.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/ResourceManager.java index 32cc7f45e3..4818d0a3ee 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/ResourceManager.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/ResourceManager.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.resource; import java.time.Duration; @@ -18,55 +17,48 @@ import org.opensearch.sql.legacy.query.planner.resource.monitor.Monitor; import org.opensearch.sql.legacy.query.planner.resource.monitor.TotalMemoryMonitor; -/** - * Aggregated resource monitor - */ +/** Aggregated resource monitor */ public class ResourceManager { - private static final Logger LOG = LogManager.getLogger(); + private static final Logger LOG = LogManager.getLogger(); + + /** Actual resource monitor list */ + private final List monitors = new ArrayList<>(); - /** - * Actual resource monitor list - */ - private final List monitors = new ArrayList<>(); + /** Time out for the execution */ + private final int timeout; - /** - * Time out for the execution - */ - private final int timeout; - private final Instant startTime; + private final Instant startTime; - /** - * Meta result of the execution - */ - private final MetaSearchResult metaResult; + /** Meta result of the execution */ + private final MetaSearchResult metaResult; - public ResourceManager(Stats stats, Config config) { - this.monitors.add(new TotalMemoryMonitor(stats, config)); - this.timeout = config.timeout(); - this.startTime = Instant.now(); - this.metaResult = new MetaSearchResult(); - } + public ResourceManager(Stats stats, Config config) { + this.monitors.add(new TotalMemoryMonitor(stats, config)); + this.timeout = config.timeout(); + this.startTime = Instant.now(); + this.metaResult = new MetaSearchResult(); + } - /** - * Is all resource monitor healthy with strategy. - * - * @return true for yes - */ - public boolean isHealthy() { - return BackOffRetryStrategy.isHealthy(); - } + /** + * Is all resource monitor healthy with strategy. + * + * @return true for yes + */ + public boolean isHealthy() { + return BackOffRetryStrategy.isHealthy(); + } - /** - * Is current execution time out? - * - * @return true for yes - */ - public boolean isTimeout() { - return Duration.between(startTime, Instant.now()).getSeconds() >= timeout; - } + /** + * Is current execution time out? + * + * @return true for yes + */ + public boolean isTimeout() { + return Duration.between(startTime, Instant.now()).getSeconds() >= timeout; + } - public MetaSearchResult getMetaResult() { - return metaResult; - } + public MetaSearchResult getMetaResult() { + return metaResult; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/Stats.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/Stats.java index ec03eeaccb..8cf827d58e 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/Stats.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/Stats.java @@ -3,67 +3,57 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.resource; import org.opensearch.client.Client; /** * Statistics collector collects from OpenSearch stats, JVM etc for other components: - *

- * 1) Resource monitor - * 2) Cost estimation - * 3) Block size calculation + * + *

1) Resource monitor 2) Cost estimation 3) Block size calculation */ public class Stats { - /** - * Client connection to OpenSearch cluster (unused now) - */ - private Client client; - - public Stats(Client client) { - this.client = client; - } - - public MemStats collectMemStats() { - return new MemStats( - Runtime.getRuntime().freeMemory(), - Runtime.getRuntime().totalMemory() - ); - } + /** Client connection to OpenSearch cluster (unused now) */ + private Client client; - /** - * Statistics data class for memory usage - */ - public static class MemStats { - private long free; - private long total; + public Stats(Client client) { + this.client = client; + } - public MemStats(long free, long total) { - this.free = free; - this.total = total; - } + public MemStats collectMemStats() { + return new MemStats(Runtime.getRuntime().freeMemory(), Runtime.getRuntime().totalMemory()); + } - public long getFree() { - return free; - } + /** Statistics data class for memory usage */ + public static class MemStats { + private long free; + private long total; - public long getTotal() { - return total; - } + public MemStats(long free, long total) { + this.free = free; + this.total = total; } - /* - public class IndexStats { - private long size; - private long docNum; + public long getFree() { + return free; + } - public IndexStats(long size, long docNum) { - this.size = size; - this.docNum = docNum; - } + public long getTotal() { + return total; } - */ + } + + /* + public class IndexStats { + private long size; + private long docNum; + + public IndexStats(long size, long docNum) { + this.size = size; + this.docNum = docNum; + } + } + */ } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/blocksize/AdaptiveBlockSize.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/blocksize/AdaptiveBlockSize.java index 7990b8c8d4..339e326cc3 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/blocksize/AdaptiveBlockSize.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/blocksize/AdaptiveBlockSize.java @@ -3,28 +3,25 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.resource.blocksize; -/** - * Adaptive block size calculator based on resource usage dynamically. - */ +/** Adaptive block size calculator based on resource usage dynamically. */ public class AdaptiveBlockSize implements BlockSize { - private int upperLimit; + private int upperLimit; - public AdaptiveBlockSize(int upperLimit) { - this.upperLimit = upperLimit; - } + public AdaptiveBlockSize(int upperLimit) { + this.upperLimit = upperLimit; + } - @Override - public int size() { - //TODO: calculate dynamically on each call - return upperLimit; - } + @Override + public int size() { + // TODO: calculate dynamically on each call + return upperLimit; + } - @Override - public String toString() { - return "AdaptiveBlockSize with " + "upperLimit=" + upperLimit; - } + @Override + public String toString() { + return "AdaptiveBlockSize with " + "upperLimit=" + upperLimit; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/blocksize/BlockSize.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/blocksize/BlockSize.java index d68b16b8bb..d8db7a0fad 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/blocksize/BlockSize.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/blocksize/BlockSize.java @@ -3,42 +3,35 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.resource.blocksize; -/** - * Block size calculating logic. - */ +/** Block size calculating logic. */ public interface BlockSize { - /** - * Get block size configured or dynamically. Integer should be sufficient for single block size. - * - * @return block size. - */ - int size(); - + /** + * Get block size configured or dynamically. Integer should be sufficient for single block size. + * + * @return block size. + */ + int size(); - /** - * Default implementation with fixed block size - */ - class FixedBlockSize implements BlockSize { + /** Default implementation with fixed block size */ + class FixedBlockSize implements BlockSize { - private int blockSize; + private int blockSize; - public FixedBlockSize(int blockSize) { - this.blockSize = blockSize; - } - - @Override - public int size() { - return blockSize; - } + public FixedBlockSize(int blockSize) { + this.blockSize = blockSize; + } - @Override - public String toString() { - return "FixedBlockSize with " + "size=" + blockSize; - } + @Override + public int size() { + return blockSize; } + @Override + public String toString() { + return "FixedBlockSize with " + "size=" + blockSize; + } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/monitor/Monitor.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/monitor/Monitor.java index 10b36f2483..52bc42587f 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/monitor/Monitor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/monitor/Monitor.java @@ -3,19 +3,15 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.resource.monitor; -/** - * Interface for different monitor component - */ +/** Interface for different monitor component */ public interface Monitor { - /** - * Is resource being monitored exhausted. - * - * @return true if yes - */ - boolean isHealthy(); - + /** + * Is resource being monitored exhausted. + * + * @return true if yes + */ + boolean isHealthy(); } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/monitor/TotalMemoryMonitor.java b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/monitor/TotalMemoryMonitor.java index 961729867d..76a8c5902c 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/monitor/TotalMemoryMonitor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/query/planner/resource/monitor/TotalMemoryMonitor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.query.planner.resource.monitor; import org.apache.logging.log4j.LogManager; @@ -12,46 +11,39 @@ import org.opensearch.sql.legacy.query.planner.resource.Stats; import org.opensearch.sql.legacy.query.planner.resource.Stats.MemStats; -/** - * Circuit breaker for total memory usage in JVM on current OpenSearch node. - */ +/** Circuit breaker for total memory usage in JVM on current OpenSearch node. */ public class TotalMemoryMonitor implements Monitor { - private static final Logger LOG = LogManager.getLogger(); - - /** - * Statistic collector - */ - private final Stats stats; + private static final Logger LOG = LogManager.getLogger(); - /** - * Upper limit for memory usage percentage - */ - private final int limit; + /** Statistic collector */ + private final Stats stats; - public TotalMemoryMonitor(Stats stats, Config config) { - this.stats = stats; - this.limit = config.circuitBreakLimit(); - } + /** Upper limit for memory usage percentage */ + private final int limit; - @Override - public boolean isHealthy() { - MemStats memStats = stats.collectMemStats(); - int usage = percentage(memUsage(memStats)); + public TotalMemoryMonitor(Stats stats, Config config) { + this.stats = stats; + this.limit = config.circuitBreakLimit(); + } - if (LOG.isDebugEnabled()) { - LOG.debug("Memory usage and limit: {}%, {}%", usage, limit); - } + @Override + public boolean isHealthy() { + MemStats memStats = stats.collectMemStats(); + int usage = percentage(memUsage(memStats)); - return usage < limit; + if (LOG.isDebugEnabled()) { + LOG.debug("Memory usage and limit: {}%, {}%", usage, limit); } - private int percentage(double usage) { - return (int) Math.round(usage * 100); - } + return usage < limit; + } - private double memUsage(MemStats memStats) { - return (1.0 * (memStats.getTotal() - memStats.getFree())) / memStats.getTotal(); - } + private int percentage(double usage) { + return (int) Math.round(usage * 100); + } + private double memUsage(MemStats memStats) { + return (1.0 * (memStats.getTotal() - memStats.getFree())) / memStats.getTotal(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/request/PreparedStatementRequest.java b/legacy/src/main/java/org/opensearch/sql/legacy/request/PreparedStatementRequest.java index deff4e2393..c32e529157 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/request/PreparedStatementRequest.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/request/PreparedStatementRequest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.request; import java.util.List; @@ -11,174 +10,181 @@ public class PreparedStatementRequest extends SqlRequest { - private List parameters; - private String sqlTemplate; - - public PreparedStatementRequest(String sql, JSONObject payloadJson, List parameters) { - super(null, payloadJson); - this.sqlTemplate = sql; - this.parameters = parameters; - this.sql = this.substituteParameters(); - } - - public PreparedStatementRequest(String sql, final Integer fetchSize, - JSONObject payloadJson, List parameters) { - this(sql, payloadJson, parameters); - this.fetchSize = fetchSize; + private List parameters; + private String sqlTemplate; + + public PreparedStatementRequest( + String sql, JSONObject payloadJson, List parameters) { + super(null, payloadJson); + this.sqlTemplate = sql; + this.parameters = parameters; + this.sql = this.substituteParameters(); + } + + public PreparedStatementRequest( + String sql, + final Integer fetchSize, + JSONObject payloadJson, + List parameters) { + this(sql, payloadJson, parameters); + this.fetchSize = fetchSize; + } + + public List getParameters() { + return this.parameters; + } + + @Override + public String getSql() { + return this.sql; + } + + public String getPreparedStatement() { + return this.sqlTemplate; + } + + private String substituteParameters() { + if (this.sqlTemplate == null) { + return null; } - public List getParameters() { - return this.parameters; - } - - @Override - public String getSql() { - return this.sql; - } - - public String getPreparedStatement() { - return this.sqlTemplate; - } - - private String substituteParameters() { - if (this.sqlTemplate == null) { - return null; - } - - StringBuilder sb = new StringBuilder(); - int paramIndex = 0; - int i = 0; + StringBuilder sb = new StringBuilder(); + int paramIndex = 0; + int i = 0; + while (i < this.sqlTemplate.length()) { + char c = this.sqlTemplate.charAt(i); + if (c == '\'') { + // found string starting quote character, skip the string + sb.append(c); + i++; while (i < this.sqlTemplate.length()) { - char c = this.sqlTemplate.charAt(i); - if (c == '\'') { - // found string starting quote character, skip the string - sb.append(c); - i++; - while (i < this.sqlTemplate.length()) { - char s = this.sqlTemplate.charAt(i); - sb.append(s); - if (s == '\'') { - if (this.sqlTemplate.charAt(i - 1) == '\\') { - // this is an escaped single quote (\') still in the string - i++; - } else if ((i + 1) < this.sqlTemplate.length() && this.sqlTemplate.charAt(i + 1) == '\'') { - // found 2 single quote {''} in a string, which is escaped single quote {'} - // move to next character - sb.append('\''); - i += 2; - } else { - // found the string ending single quote char - break; - } - } else { - // not single quote character, move on - i++; - } - } - } else if (c == '?') { - // question mark "?" not in a string - if (paramIndex >= this.parameters.size()) { - throw new IllegalStateException("Placeholder count is greater than parameter number " - + parameters.size() + " . Cannot convert PreparedStatement to sql query"); - } - sb.append(this.parameters.get(paramIndex).getSqlSubstitutionValue()); - paramIndex++; + char s = this.sqlTemplate.charAt(i); + sb.append(s); + if (s == '\'') { + if (this.sqlTemplate.charAt(i - 1) == '\\') { + // this is an escaped single quote (\') still in the string + i++; + } else if ((i + 1) < this.sqlTemplate.length() + && this.sqlTemplate.charAt(i + 1) == '\'') { + // found 2 single quote {''} in a string, which is escaped single quote {'} + // move to next character + sb.append('\''); + i += 2; } else { - // other character, simply append - sb.append(c); + // found the string ending single quote char + break; } + } else { + // not single quote character, move on i++; + } } - - return sb.toString(); + } else if (c == '?') { + // question mark "?" not in a string + if (paramIndex >= this.parameters.size()) { + throw new IllegalStateException( + "Placeholder count is greater than parameter number " + + parameters.size() + + " . Cannot convert PreparedStatement to sql query"); + } + sb.append(this.parameters.get(paramIndex).getSqlSubstitutionValue()); + paramIndex++; + } else { + // other character, simply append + sb.append(c); + } + i++; } - ////////////////////////////////////////////////// - // Parameter related types below - ////////////////////////////////////////////////// - public enum ParameterType { - BYTE, - SHORT, - INTEGER, - LONG, - FLOAT, - DOUBLE, - BOOLEAN, - STRING, - KEYWORD, - DATE, - NULL + return sb.toString(); + } + + ////////////////////////////////////////////////// + // Parameter related types below + ////////////////////////////////////////////////// + public enum ParameterType { + BYTE, + SHORT, + INTEGER, + LONG, + FLOAT, + DOUBLE, + BOOLEAN, + STRING, + KEYWORD, + DATE, + NULL + } + + public static class PreparedStatementParameter { + protected T value; + + public PreparedStatementParameter(T value) { + this.value = value; } - public static class PreparedStatementParameter { - protected T value; - - public PreparedStatementParameter(T value) { - this.value = value; - } - - public String getSqlSubstitutionValue() { - return String.valueOf(this.value); - } + public String getSqlSubstitutionValue() { + return String.valueOf(this.value); + } - public T getValue() { - return this.value; - } + public T getValue() { + return this.value; } + } - public static class StringParameter extends PreparedStatementParameter { + public static class StringParameter extends PreparedStatementParameter { - public StringParameter(String value) { - super(value); - } + public StringParameter(String value) { + super(value); + } - @Override - public String getSqlSubstitutionValue() { - // TODO: investigate other injection prevention - if (this.value == null) { - return "null"; - } - StringBuilder sb = new StringBuilder(); - sb.append('\''); // starting quote - for (int i = 0; i < this.value.length(); i++) { - char c = this.value.charAt(i); - switch (c) { - case 0: - sb.append('\\').append(0); - break; - case '\n': - sb.append('\\').append('n'); - break; - case '\r': - sb.append('\\').append('r'); - break; - case '\\': - sb.append('\\').append('\\'); - break; - case '\'': - sb.append('\\').append('\''); - break; - case '\"': - sb.append('\\').append('\"'); - break; - default: - sb.append(c); - } - } - sb.append('\''); // ending quote - return sb.toString(); + @Override + public String getSqlSubstitutionValue() { + // TODO: investigate other injection prevention + if (this.value == null) { + return "null"; + } + StringBuilder sb = new StringBuilder(); + sb.append('\''); // starting quote + for (int i = 0; i < this.value.length(); i++) { + char c = this.value.charAt(i); + switch (c) { + case 0: + sb.append('\\').append(0); + break; + case '\n': + sb.append('\\').append('n'); + break; + case '\r': + sb.append('\\').append('r'); + break; + case '\\': + sb.append('\\').append('\\'); + break; + case '\'': + sb.append('\\').append('\''); + break; + case '\"': + sb.append('\\').append('\"'); + break; + default: + sb.append(c); } + } + sb.append('\''); // ending quote + return sb.toString(); } + } - public static class NullParameter extends PreparedStatementParameter { + public static class NullParameter extends PreparedStatementParameter { - public NullParameter() { - super(null); - } + public NullParameter() { + super(null); + } - @Override - public String getSqlSubstitutionValue() { - return "null"; - } + @Override + public String getSqlSubstitutionValue() { + return "null"; } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/request/SqlRequest.java b/legacy/src/main/java/org/opensearch/sql/legacy/request/SqlRequest.java index 8ac66e4b70..bffdd36688 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/request/SqlRequest.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/request/SqlRequest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.request; import com.fasterxml.jackson.core.JsonFactory; @@ -22,95 +21,96 @@ public class SqlRequest { - public static final SqlRequest NULL = new SqlRequest("", null); - - String sql; - JSONObject jsonContent; - String cursor; - Integer fetchSize; - - public SqlRequest(final String sql, final JSONObject jsonContent) { - this.sql = sql; - this.jsonContent = jsonContent; - } - - public SqlRequest(final String cursor) { - this.cursor = cursor; - } - - public SqlRequest(final String sql, final Integer fetchSize, final JSONObject jsonContent) { - this.sql = sql; - this.fetchSize = fetchSize; - this.jsonContent = jsonContent; - } - - private static boolean isValidJson(String json) { - try { - new JSONObject(json); - } catch (JSONException e) { - return false; - } - return true; + public static final SqlRequest NULL = new SqlRequest("", null); + + String sql; + JSONObject jsonContent; + String cursor; + Integer fetchSize; + + public SqlRequest(final String sql, final JSONObject jsonContent) { + this.sql = sql; + this.jsonContent = jsonContent; + } + + public SqlRequest(final String cursor) { + this.cursor = cursor; + } + + public SqlRequest(final String sql, final Integer fetchSize, final JSONObject jsonContent) { + this.sql = sql; + this.fetchSize = fetchSize; + this.jsonContent = jsonContent; + } + + private static boolean isValidJson(String json) { + try { + new JSONObject(json); + } catch (JSONException e) { + return false; } - - public String getSql() { - return this.sql; - } - - public String cursor() { - return this.cursor; - } - - public Integer fetchSize() { - return this.fetchSize; - } - - public JSONObject getJsonContent() { - return this.jsonContent; - } - - /** - * JSONObject's getJSONObject method will return just the value, this helper method is to extract the key and - * value of 'filter' and return the JSON as a string. - */ - private String getFilterObjectAsString(JSONObject jsonContent) { - String filterVal = jsonContent.getJSONObject("filter").toString(); - return "{\"filter\":" + filterVal + "}"; - } - - private boolean hasFilterInRequest() { - return jsonContent != null && jsonContent.has("filter"); - } - - /** - * Takes 'filter' parameter from JSON request if JSON request and 'filter' were given and creates a QueryBuilder - * object out of it to add to the filterClauses of the BoolQueryBuilder. - */ - private void addFilterFromJson(BoolQueryBuilder boolQuery) throws SqlParseException { - try { - String filter = getFilterObjectAsString(jsonContent); - SearchModule searchModule = new SearchModule(Settings.EMPTY, Collections.emptyList()); - XContentParser parser = new JsonXContentParser( - new NamedXContentRegistry(searchModule.getNamedXContents()), - LoggingDeprecationHandler.INSTANCE, - new JsonFactory().createParser(filter)); - - // nextToken is called before passing the parser to fromXContent since the fieldName will be null if the - // first token it parses is START_OBJECT resulting in an exception - parser.nextToken(); - boolQuery.filter(BoolQueryBuilder.fromXContent(parser)); - } catch (IOException e) { - throw new SqlParseException("Unable to parse 'filter' in JSON request: " + e.getMessage()); - } - + return true; + } + + public String getSql() { + return this.sql; + } + + public String cursor() { + return this.cursor; + } + + public Integer fetchSize() { + return this.fetchSize; + } + + public JSONObject getJsonContent() { + return this.jsonContent; + } + + /** + * JSONObject's getJSONObject method will return just the value, this helper method is to extract + * the key and value of 'filter' and return the JSON as a string. + */ + private String getFilterObjectAsString(JSONObject jsonContent) { + String filterVal = jsonContent.getJSONObject("filter").toString(); + return "{\"filter\":" + filterVal + "}"; + } + + private boolean hasFilterInRequest() { + return jsonContent != null && jsonContent.has("filter"); + } + + /** + * Takes 'filter' parameter from JSON request if JSON request and 'filter' were given and creates + * a QueryBuilder object out of it to add to the filterClauses of the BoolQueryBuilder. + */ + private void addFilterFromJson(BoolQueryBuilder boolQuery) throws SqlParseException { + try { + String filter = getFilterObjectAsString(jsonContent); + SearchModule searchModule = new SearchModule(Settings.EMPTY, Collections.emptyList()); + XContentParser parser = + new JsonXContentParser( + new NamedXContentRegistry(searchModule.getNamedXContents()), + LoggingDeprecationHandler.INSTANCE, + new JsonFactory().createParser(filter)); + + // nextToken is called before passing the parser to fromXContent since the fieldName will be + // null if the + // first token it parses is START_OBJECT resulting in an exception + parser.nextToken(); + boolQuery.filter(BoolQueryBuilder.fromXContent(parser)); + } catch (IOException e) { + throw new SqlParseException("Unable to parse 'filter' in JSON request: " + e.getMessage()); } + } - public BoolQueryBuilder checkAndAddFilter(BoolQueryBuilder boolQuery) throws SqlParseException { - if (hasFilterInRequest()) { - // if WHERE was not given, create a new BoolQuery to add "filter" to - boolQuery = boolQuery == null ? new BoolQueryBuilder() : boolQuery; - addFilterFromJson(boolQuery); - } - return boolQuery; + public BoolQueryBuilder checkAndAddFilter(BoolQueryBuilder boolQuery) throws SqlParseException { + if (hasFilterInRequest()) { + // if WHERE was not given, create a new BoolQuery to add "filter" to + boolQuery = boolQuery == null ? new BoolQueryBuilder() : boolQuery; + addFilterFromJson(boolQuery); } + return boolQuery; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/request/SqlRequestFactory.java b/legacy/src/main/java/org/opensearch/sql/legacy/request/SqlRequestFactory.java index 4c5d207be8..0fee6cff86 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/request/SqlRequestFactory.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/request/SqlRequestFactory.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.request; import java.util.ArrayList; @@ -16,128 +15,134 @@ public class SqlRequestFactory { - private static final String SQL_URL_PARAM_KEY = "sql"; - private static final String SQL_FIELD_NAME = "query"; - private static final String PARAM_FIELD_NAME = "parameters"; - private static final String PARAM_TYPE_FIELD_NAME = "type"; - private static final String PARAM_VALUE_FIELD_NAME = "value"; + private static final String SQL_URL_PARAM_KEY = "sql"; + private static final String SQL_FIELD_NAME = "query"; + private static final String PARAM_FIELD_NAME = "parameters"; + private static final String PARAM_TYPE_FIELD_NAME = "type"; + private static final String PARAM_VALUE_FIELD_NAME = "value"; - public static final String SQL_CURSOR_FIELD_NAME = "cursor"; - public static final String SQL_FETCH_FIELD_NAME = "fetch_size"; + public static final String SQL_CURSOR_FIELD_NAME = "cursor"; + public static final String SQL_FETCH_FIELD_NAME = "fetch_size"; - public static SqlRequest getSqlRequest(RestRequest request) { - switch (request.method()) { - case POST: - return parseSqlRequestFromPayload(request); - default: - throw new IllegalArgumentException("OpenSearch SQL doesn't supported HTTP " + request.method().name()); - } + public static SqlRequest getSqlRequest(RestRequest request) { + switch (request.method()) { + case POST: + return parseSqlRequestFromPayload(request); + default: + throw new IllegalArgumentException( + "OpenSearch SQL doesn't supported HTTP " + request.method().name()); } + } - private static SqlRequest parseSqlRequestFromUrl(RestRequest restRequest) { - String sql; + private static SqlRequest parseSqlRequestFromUrl(RestRequest restRequest) { + String sql; - sql = restRequest.param(SQL_URL_PARAM_KEY); - if (sql == null) { - throw new IllegalArgumentException("Cannot find sql parameter from the URL"); - } - return new SqlRequest(sql, null); + sql = restRequest.param(SQL_URL_PARAM_KEY); + if (sql == null) { + throw new IllegalArgumentException("Cannot find sql parameter from the URL"); } + return new SqlRequest(sql, null); + } - private static SqlRequest parseSqlRequestFromPayload(RestRequest restRequest) { - String content = restRequest.content().utf8ToString(); + private static SqlRequest parseSqlRequestFromPayload(RestRequest restRequest) { + String content = restRequest.content().utf8ToString(); - JSONObject jsonContent; - try { - jsonContent = new JSONObject(content); - if (jsonContent.has(SQL_CURSOR_FIELD_NAME)) { - return new SqlRequest(jsonContent.getString(SQL_CURSOR_FIELD_NAME)); - } - } catch (JSONException e) { - throw new IllegalArgumentException("Failed to parse request payload", e); - } - String sql = jsonContent.getString(SQL_FIELD_NAME); - - if (jsonContent.has(PARAM_FIELD_NAME)) { // is a PreparedStatement - JSONArray paramArray = jsonContent.getJSONArray(PARAM_FIELD_NAME); - List parameters = parseParameters(paramArray); - return new PreparedStatementRequest(sql, validateAndGetFetchSize(jsonContent), jsonContent, parameters); - } - return new SqlRequest(sql, validateAndGetFetchSize(jsonContent), jsonContent); + JSONObject jsonContent; + try { + jsonContent = new JSONObject(content); + if (jsonContent.has(SQL_CURSOR_FIELD_NAME)) { + return new SqlRequest(jsonContent.getString(SQL_CURSOR_FIELD_NAME)); + } + } catch (JSONException e) { + throw new IllegalArgumentException("Failed to parse request payload", e); } + String sql = jsonContent.getString(SQL_FIELD_NAME); + if (jsonContent.has(PARAM_FIELD_NAME)) { // is a PreparedStatement + JSONArray paramArray = jsonContent.getJSONArray(PARAM_FIELD_NAME); + List parameters = + parseParameters(paramArray); + return new PreparedStatementRequest( + sql, validateAndGetFetchSize(jsonContent), jsonContent, parameters); + } + return new SqlRequest(sql, validateAndGetFetchSize(jsonContent), jsonContent); + } - private static Integer validateAndGetFetchSize(JSONObject jsonContent) { - Optional fetchSize = Optional.empty(); - try { - if (jsonContent.has(SQL_FETCH_FIELD_NAME)) { - fetchSize = Optional.of(jsonContent.getInt(SQL_FETCH_FIELD_NAME)); - if (fetchSize.get() < 0) { - throw new IllegalArgumentException("Fetch_size must be greater or equal to 0"); - } - } - } catch (JSONException e) { - throw new IllegalArgumentException("Failed to parse field [" + SQL_FETCH_FIELD_NAME +"]", e); + private static Integer validateAndGetFetchSize(JSONObject jsonContent) { + Optional fetchSize = Optional.empty(); + try { + if (jsonContent.has(SQL_FETCH_FIELD_NAME)) { + fetchSize = Optional.of(jsonContent.getInt(SQL_FETCH_FIELD_NAME)); + if (fetchSize.get() < 0) { + throw new IllegalArgumentException("Fetch_size must be greater or equal to 0"); } - return fetchSize.orElse(0); + } + } catch (JSONException e) { + throw new IllegalArgumentException("Failed to parse field [" + SQL_FETCH_FIELD_NAME + "]", e); } + return fetchSize.orElse(0); + } - private static List parseParameters( - JSONArray paramsJsonArray) { - List parameters = new ArrayList<>(); - for (int i = 0; i < paramsJsonArray.length(); i++) { - JSONObject paramJson = paramsJsonArray.getJSONObject(i); - String typeString = paramJson.getString(PARAM_TYPE_FIELD_NAME); - if (typeString == null) { - throw new IllegalArgumentException("Parameter type cannot be null. parameter json: " - + paramJson.toString()); - } - PreparedStatementRequest.ParameterType type; - try { - type = PreparedStatementRequest.ParameterType.valueOf(typeString.toUpperCase()); - } catch (IllegalArgumentException e) { - throw new IllegalArgumentException("Unsupported parameter type " + typeString, e); - } - try { - PreparedStatementRequest.PreparedStatementParameter parameter; - switch (type) { - case BOOLEAN: - parameter = new PreparedStatementRequest.PreparedStatementParameter<>( - paramJson.getBoolean(PARAM_VALUE_FIELD_NAME)); - parameters.add(parameter); - break; - case KEYWORD: - case STRING: - case DATE: - parameter = new PreparedStatementRequest.StringParameter( - paramJson.getString(PARAM_VALUE_FIELD_NAME)); - parameters.add(parameter); - break; - case BYTE: - case SHORT: - case INTEGER: - case LONG: - parameter = new PreparedStatementRequest.PreparedStatementParameter<>( - paramJson.getLong(PARAM_VALUE_FIELD_NAME)); - parameters.add(parameter); - break; - case FLOAT: - case DOUBLE: - parameter = new PreparedStatementRequest.PreparedStatementParameter<>( - paramJson.getDouble(PARAM_VALUE_FIELD_NAME)); - parameters.add(parameter); - break; - case NULL: - parameter = new PreparedStatementRequest.NullParameter(); - parameters.add(parameter); - break; - default: - throw new IllegalArgumentException("Failed to handle parameter type " + type.name()); - } - } catch (JSONException e) { - throw new IllegalArgumentException("Failed to parse PreparedStatement parameters", e); - } + private static List parseParameters( + JSONArray paramsJsonArray) { + List parameters = new ArrayList<>(); + for (int i = 0; i < paramsJsonArray.length(); i++) { + JSONObject paramJson = paramsJsonArray.getJSONObject(i); + String typeString = paramJson.getString(PARAM_TYPE_FIELD_NAME); + if (typeString == null) { + throw new IllegalArgumentException( + "Parameter type cannot be null. parameter json: " + paramJson.toString()); + } + PreparedStatementRequest.ParameterType type; + try { + type = PreparedStatementRequest.ParameterType.valueOf(typeString.toUpperCase()); + } catch (IllegalArgumentException e) { + throw new IllegalArgumentException("Unsupported parameter type " + typeString, e); + } + try { + PreparedStatementRequest.PreparedStatementParameter parameter; + switch (type) { + case BOOLEAN: + parameter = + new PreparedStatementRequest.PreparedStatementParameter<>( + paramJson.getBoolean(PARAM_VALUE_FIELD_NAME)); + parameters.add(parameter); + break; + case KEYWORD: + case STRING: + case DATE: + parameter = + new PreparedStatementRequest.StringParameter( + paramJson.getString(PARAM_VALUE_FIELD_NAME)); + parameters.add(parameter); + break; + case BYTE: + case SHORT: + case INTEGER: + case LONG: + parameter = + new PreparedStatementRequest.PreparedStatementParameter<>( + paramJson.getLong(PARAM_VALUE_FIELD_NAME)); + parameters.add(parameter); + break; + case FLOAT: + case DOUBLE: + parameter = + new PreparedStatementRequest.PreparedStatementParameter<>( + paramJson.getDouble(PARAM_VALUE_FIELD_NAME)); + parameters.add(parameter); + break; + case NULL: + parameter = new PreparedStatementRequest.NullParameter(); + parameters.add(parameter); + break; + default: + throw new IllegalArgumentException("Failed to handle parameter type " + type.name()); } - return parameters; + } catch (JSONException e) { + throw new IllegalArgumentException("Failed to parse PreparedStatement parameters", e); + } } + return parameters; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/request/SqlRequestParam.java b/legacy/src/main/java/org/opensearch/sql/legacy/request/SqlRequestParam.java index c9d3abb320..b151fabde6 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/request/SqlRequestParam.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/request/SqlRequestParam.java @@ -3,57 +3,56 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.request; import java.util.Map; import java.util.Optional; import org.opensearch.sql.legacy.executor.Format; -/** - * Utils class for parse the request params. - */ +/** Utils class for parse the request params. */ public class SqlRequestParam { - public static final String QUERY_PARAMS_FORMAT = "format"; - public static final String QUERY_PARAMS_PRETTY = "pretty"; - public static final String QUERY_PARAMS_ESCAPE = "escape"; - - private static final String DEFAULT_RESPONSE_FORMAT = "jdbc"; - - /** - * Parse the pretty params to decide whether the response should be pretty formatted. - * @param requestParams request params. - * @return return true if the response required pretty format, otherwise return false. - */ - public static boolean isPrettyFormat(Map requestParams) { - return requestParams.containsKey(QUERY_PARAMS_PRETTY) - && ("".equals(requestParams.get(QUERY_PARAMS_PRETTY)) - || "true".equals(requestParams.get(QUERY_PARAMS_PRETTY))); - } - - /** - * Parse the request params and return the {@link Format} of the response - * @param requestParams request params - * @return The response Format. - */ - public static Format getFormat(Map requestParams) { - String formatName = - requestParams.containsKey(QUERY_PARAMS_FORMAT) - ? requestParams.get(QUERY_PARAMS_FORMAT).toLowerCase() - : DEFAULT_RESPONSE_FORMAT; - Optional optionalFormat = Format.of(formatName); - if (optionalFormat.isPresent()) { - return optionalFormat.get(); - } else { - throw new IllegalArgumentException("Failed to create executor due to unknown response format: " - + formatName); - } + public static final String QUERY_PARAMS_FORMAT = "format"; + public static final String QUERY_PARAMS_PRETTY = "pretty"; + public static final String QUERY_PARAMS_ESCAPE = "escape"; + + private static final String DEFAULT_RESPONSE_FORMAT = "jdbc"; + + /** + * Parse the pretty params to decide whether the response should be pretty formatted. + * + * @param requestParams request params. + * @return return true if the response required pretty format, otherwise return false. + */ + public static boolean isPrettyFormat(Map requestParams) { + return requestParams.containsKey(QUERY_PARAMS_PRETTY) + && ("".equals(requestParams.get(QUERY_PARAMS_PRETTY)) + || "true".equals(requestParams.get(QUERY_PARAMS_PRETTY))); + } + + /** + * Parse the request params and return the {@link Format} of the response + * + * @param requestParams request params + * @return The response Format. + */ + public static Format getFormat(Map requestParams) { + String formatName = + requestParams.containsKey(QUERY_PARAMS_FORMAT) + ? requestParams.get(QUERY_PARAMS_FORMAT).toLowerCase() + : DEFAULT_RESPONSE_FORMAT; + Optional optionalFormat = Format.of(formatName); + if (optionalFormat.isPresent()) { + return optionalFormat.get(); + } else { + throw new IllegalArgumentException( + "Failed to create executor due to unknown response format: " + formatName); } + } - public static boolean getEscapeOption(Map requestParams) { - if (requestParams.containsKey(QUERY_PARAMS_ESCAPE)) { - return Boolean.parseBoolean(requestParams.get(QUERY_PARAMS_ESCAPE)); - } - return false; + public static boolean getEscapeOption(Map requestParams) { + if (requestParams.containsKey(QUERY_PARAMS_ESCAPE)) { + return Boolean.parseBoolean(requestParams.get(QUERY_PARAMS_ESCAPE)); } + return false; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/RewriteRule.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/RewriteRule.java index 6744bfa3e5..cd6400ed88 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/RewriteRule.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/RewriteRule.java @@ -3,29 +3,26 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter; import com.alibaba.druid.sql.ast.expr.SQLQueryExpr; import java.sql.SQLFeatureNotSupportedException; -/** - * Query Optimize Rule - */ +/** Query Optimize Rule */ public interface RewriteRule { - /** - * Checking whether the rule match the query? - * - * @return true if the rule match to the query. - * @throws SQLFeatureNotSupportedException - */ - boolean match(T expr) throws SQLFeatureNotSupportedException; + /** + * Checking whether the rule match the query? + * + * @return true if the rule match to the query. + * @throws SQLFeatureNotSupportedException + */ + boolean match(T expr) throws SQLFeatureNotSupportedException; - /** - * Optimize the query. - * - * @throws SQLFeatureNotSupportedException - */ - void rewrite(T expr) throws SQLFeatureNotSupportedException; + /** + * Optimize the query. + * + * @throws SQLFeatureNotSupportedException + */ + void rewrite(T expr) throws SQLFeatureNotSupportedException; } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/RewriteRuleExecutor.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/RewriteRuleExecutor.java index 86aa3d0b20..20fd018ae8 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/RewriteRuleExecutor.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/RewriteRuleExecutor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter; import com.alibaba.druid.sql.ast.expr.SQLQueryExpr; @@ -11,50 +10,42 @@ import java.util.ArrayList; import java.util.List; -/** - * Query RewriteRuleExecutor which will execute the {@link RewriteRule} with registered order. - */ +/** Query RewriteRuleExecutor which will execute the {@link RewriteRule} with registered order. */ public class RewriteRuleExecutor { - private final List> rewriteRules; - - public RewriteRuleExecutor(List> rewriteRules) { - this.rewriteRules = rewriteRules; + private final List> rewriteRules; + + public RewriteRuleExecutor(List> rewriteRules) { + this.rewriteRules = rewriteRules; + } + + /** Execute the registered {@link RewriteRule} in order on the Query. */ + public void executeOn(T expr) throws SQLFeatureNotSupportedException { + for (RewriteRule rule : rewriteRules) { + if (rule.match(expr)) { + rule.rewrite(expr); + } } - - /** - * Execute the registered {@link RewriteRule} in order on the Query. - */ - public void executeOn(T expr) throws SQLFeatureNotSupportedException { - for (RewriteRule rule : rewriteRules) { - if (rule.match(expr)) { - rule.rewrite(expr); - } - } - } - - /** - * Build {@link RewriteRuleExecutor} - */ - public static BuilderOptimizer builder() { - return new BuilderOptimizer(); + } + + /** Build {@link RewriteRuleExecutor} */ + public static BuilderOptimizer builder() { + return new BuilderOptimizer(); + } + + /** Builder of {@link RewriteRuleExecutor} */ + public static class BuilderOptimizer { + private List> rewriteRules; + + public BuilderOptimizer withRule(RewriteRule rule) { + if (rewriteRules == null) { + rewriteRules = new ArrayList<>(); + } + rewriteRules.add(rule); + return this; } - /** - * Builder of {@link RewriteRuleExecutor} - */ - public static class BuilderOptimizer { - private List> rewriteRules; - - public BuilderOptimizer withRule(RewriteRule rule) { - if (rewriteRules == null) { - rewriteRules = new ArrayList<>(); - } - rewriteRules.add(rule); - return this; - } - - public RewriteRuleExecutor build() { - return new RewriteRuleExecutor(rewriteRules); - } + public RewriteRuleExecutor build() { + return new RewriteRuleExecutor(rewriteRules); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/alias/Identifier.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/alias/Identifier.java index 6c708b91b0..9863862af9 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/alias/Identifier.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/alias/Identifier.java @@ -3,42 +3,39 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.alias; import com.alibaba.druid.sql.ast.expr.SQLIdentifierExpr; -/** - * Util class for identifier expression parsing - */ +/** Util class for identifier expression parsing */ class Identifier { - private final SQLIdentifierExpr idExpr; + private final SQLIdentifierExpr idExpr; - Identifier(SQLIdentifierExpr idExpr) { - this.idExpr = idExpr; - } + Identifier(SQLIdentifierExpr idExpr) { + this.idExpr = idExpr; + } - String name() { - return idExpr.getName(); - } + String name() { + return idExpr.getName(); + } - boolean hasPrefix() { - return firstDotIndex() != -1; - } + boolean hasPrefix() { + return firstDotIndex() != -1; + } - /** Assumption: identifier has prefix */ - String prefix() { - return name().substring(0, firstDotIndex()); - } + /** Assumption: identifier has prefix */ + String prefix() { + return name().substring(0, firstDotIndex()); + } - /** Assumption: identifier has prefix */ - void removePrefix() { - String nameWithoutPrefix = name().substring(prefix().length() + 1); - idExpr.setName(nameWithoutPrefix); - } + /** Assumption: identifier has prefix */ + void removePrefix() { + String nameWithoutPrefix = name().substring(prefix().length() + 1); + idExpr.setName(nameWithoutPrefix); + } - private int firstDotIndex() { - return name().indexOf('.', 1); - } + private int firstDotIndex() { + return name().indexOf('.', 1); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/alias/Table.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/alias/Table.java index 63c33d4721..015d8d8858 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/alias/Table.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/alias/Table.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.alias; import static com.alibaba.druid.sql.ast.expr.SQLBinaryOperator.Divide; @@ -14,44 +13,42 @@ import com.alibaba.druid.sql.ast.statement.SQLExprTableSource; import com.google.common.base.Strings; -/** - * Util class for table expression parsing - */ +/** Util class for table expression parsing */ class Table { - private final SQLExprTableSource tableExpr; - - Table(SQLExprTableSource tableExpr) { - this.tableExpr = tableExpr; - } + private final SQLExprTableSource tableExpr; - boolean hasAlias() { - return !alias().isEmpty(); - } + Table(SQLExprTableSource tableExpr) { + this.tableExpr = tableExpr; + } - String alias() { - return Strings.nullToEmpty(tableExpr.getAlias()); - } + boolean hasAlias() { + return !alias().isEmpty(); + } - void removeAlias() { - tableExpr.setAlias(null); - } + String alias() { + return Strings.nullToEmpty(tableExpr.getAlias()); + } - /** Extract table name in table expression */ - String name() { - SQLExpr expr = tableExpr.getExpr(); - if (expr instanceof SQLIdentifierExpr) { - return ((SQLIdentifierExpr) expr).getName(); - } else if (isTableWithType(expr)) { - return ((SQLIdentifierExpr) ((SQLBinaryOpExpr) expr).getLeft()).getName(); - } - return expr.toString(); - } + void removeAlias() { + tableExpr.setAlias(null); + } - /** Return true for table name along with type name, for example 'accounts/_doc' */ - private boolean isTableWithType(SQLExpr expr) { - return expr instanceof SQLBinaryOpExpr - && ((SQLBinaryOpExpr) expr).getLeft() instanceof SQLIdentifierExpr - && ((SQLBinaryOpExpr) expr).getOperator() == Divide; + /** Extract table name in table expression */ + String name() { + SQLExpr expr = tableExpr.getExpr(); + if (expr instanceof SQLIdentifierExpr) { + return ((SQLIdentifierExpr) expr).getName(); + } else if (isTableWithType(expr)) { + return ((SQLIdentifierExpr) ((SQLBinaryOpExpr) expr).getLeft()).getName(); } + return expr.toString(); + } + + /** Return true for table name along with type name, for example 'accounts/_doc' */ + private boolean isTableWithType(SQLExpr expr) { + return expr instanceof SQLBinaryOpExpr + && ((SQLBinaryOpExpr) expr).getLeft() instanceof SQLIdentifierExpr + && ((SQLBinaryOpExpr) expr).getOperator() == Divide; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/alias/TableAliasPrefixRemoveRule.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/alias/TableAliasPrefixRemoveRule.java index b8500454cd..80190a5889 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/alias/TableAliasPrefixRemoveRule.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/alias/TableAliasPrefixRemoveRule.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.alias; import com.alibaba.druid.sql.ast.expr.SQLIdentifierExpr; @@ -17,86 +16,87 @@ import org.opensearch.sql.legacy.rewriter.RewriteRule; import org.opensearch.sql.legacy.rewriter.subquery.utils.FindSubQuery; -/** - * Rewrite rule for removing table alias or table name prefix in field name. - */ +/** Rewrite rule for removing table alias or table name prefix in field name. */ public class TableAliasPrefixRemoveRule implements RewriteRule { - /** Table aliases in FROM clause. Store table name for those without alias. */ - private final Set tableAliasesToRemove = new HashSet<>(); + /** Table aliases in FROM clause. Store table name for those without alias. */ + private final Set tableAliasesToRemove = new HashSet<>(); - @Override - public boolean match(SQLQueryExpr root) { - if (hasSubQuery(root)) { - return false; - } - collectTableAliasesThatCanBeRemoved(root); - return !tableAliasesToRemove.isEmpty(); + @Override + public boolean match(SQLQueryExpr root) { + if (hasSubQuery(root)) { + return false; } + collectTableAliasesThatCanBeRemoved(root); + return !tableAliasesToRemove.isEmpty(); + } - @Override - public void rewrite(SQLQueryExpr root) { - removeTableAliasPrefixInColumnName(root); - } + @Override + public void rewrite(SQLQueryExpr root) { + removeTableAliasPrefixInColumnName(root); + } - private boolean hasSubQuery(SQLQueryExpr root) { - FindSubQuery visitor = new FindSubQuery(); - root.accept(visitor); - return visitor.hasSubQuery(); - } + private boolean hasSubQuery(SQLQueryExpr root) { + FindSubQuery visitor = new FindSubQuery(); + root.accept(visitor); + return visitor.hasSubQuery(); + } - private void collectTableAliasesThatCanBeRemoved(SQLQueryExpr root) { - visitNonJoinedTable(root, tableExpr -> { - Table table = new Table(tableExpr); - if (table.hasAlias()) { - tableAliasesToRemove.add(table.alias()); - table.removeAlias(); - } else { - tableAliasesToRemove.add(table.name()); - } + private void collectTableAliasesThatCanBeRemoved(SQLQueryExpr root) { + visitNonJoinedTable( + root, + tableExpr -> { + Table table = new Table(tableExpr); + if (table.hasAlias()) { + tableAliasesToRemove.add(table.alias()); + table.removeAlias(); + } else { + tableAliasesToRemove.add(table.name()); + } }); - } + } - private void removeTableAliasPrefixInColumnName(SQLQueryExpr root) { - visitColumnName(root, idExpr -> { - Identifier field = new Identifier(idExpr); - if (field.hasPrefix() && tableAliasesToRemove.contains(field.prefix())) { - field.removePrefix(); - } + private void removeTableAliasPrefixInColumnName(SQLQueryExpr root) { + visitColumnName( + root, + idExpr -> { + Identifier field = new Identifier(idExpr); + if (field.hasPrefix() && tableAliasesToRemove.contains(field.prefix())) { + field.removePrefix(); + } }); - } + } - private void visitNonJoinedTable(SQLQueryExpr root, - Consumer visit) { - root.accept(new MySqlASTVisitorAdapter() { - @Override - public boolean visit(SQLJoinTableSource x) { - // Avoid visiting table name in any JOIN including comma/inner/left join - // between 2 indices or between index and nested field. - // For the latter case, alias is taken care of in {@link NestedFieldRewriter}. - return false; - } + private void visitNonJoinedTable(SQLQueryExpr root, Consumer visit) { + root.accept( + new MySqlASTVisitorAdapter() { + @Override + public boolean visit(SQLJoinTableSource x) { + // Avoid visiting table name in any JOIN including comma/inner/left join + // between 2 indices or between index and nested field. + // For the latter case, alias is taken care of in {@link NestedFieldRewriter}. + return false; + } - @Override - public void endVisit(SQLExprTableSource tableExpr) { - visit.accept(tableExpr); - } + @Override + public void endVisit(SQLExprTableSource tableExpr) { + visit.accept(tableExpr); + } }); - } + } - private void visitColumnName(SQLQueryExpr expr, - Consumer visit) { - expr.accept(new MySqlASTVisitorAdapter() { - @Override - public boolean visit(SQLExprTableSource x) { - return false; // Avoid rewriting identifier in table name - } + private void visitColumnName(SQLQueryExpr expr, Consumer visit) { + expr.accept( + new MySqlASTVisitorAdapter() { + @Override + public boolean visit(SQLExprTableSource x) { + return false; // Avoid rewriting identifier in table name + } - @Override - public void endVisit(SQLIdentifierExpr idExpr) { - visit.accept(idExpr); - } + @Override + public void endVisit(SQLIdentifierExpr idExpr) { + visit.accept(idExpr); + } }); - } - + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/identifier/AnonymizeSensitiveDataRule.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/identifier/AnonymizeSensitiveDataRule.java index 2768b269bf..c4f3ee5a10 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/identifier/AnonymizeSensitiveDataRule.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/identifier/AnonymizeSensitiveDataRule.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.identifier; import com.alibaba.druid.sql.ast.expr.SQLBooleanExpr; @@ -17,53 +16,53 @@ import org.opensearch.sql.legacy.rewriter.RewriteRule; /** - * Rewrite rule to anonymize sensitive data in logging queries. - * This rule replace the content of specific nodes (that might involve index data) in AST - * to anonymous content. + * Rewrite rule to anonymize sensitive data in logging queries. This rule replace the content of + * specific nodes (that might involve index data) in AST to anonymous content. */ -public class AnonymizeSensitiveDataRule extends MySqlASTVisitorAdapter implements RewriteRule { +public class AnonymizeSensitiveDataRule extends MySqlASTVisitorAdapter + implements RewriteRule { - @Override - public boolean visit(SQLIdentifierExpr identifierExpr) { - if (identifierExpr.getParent() instanceof SQLExprTableSource) { - identifierExpr.setName("table"); - } else { - identifierExpr.setName("identifier"); - } - return true; + @Override + public boolean visit(SQLIdentifierExpr identifierExpr) { + if (identifierExpr.getParent() instanceof SQLExprTableSource) { + identifierExpr.setName("table"); + } else { + identifierExpr.setName("identifier"); } + return true; + } - @Override - public boolean visit(SQLIntegerExpr integerExpr) { - integerExpr.setNumber(0); - return true; - } + @Override + public boolean visit(SQLIntegerExpr integerExpr) { + integerExpr.setNumber(0); + return true; + } - @Override - public boolean visit(SQLNumberExpr numberExpr) { - numberExpr.setNumber(0); - return true; - } + @Override + public boolean visit(SQLNumberExpr numberExpr) { + numberExpr.setNumber(0); + return true; + } - @Override - public boolean visit(SQLCharExpr charExpr) { - charExpr.setText("string_literal"); - return true; - } + @Override + public boolean visit(SQLCharExpr charExpr) { + charExpr.setText("string_literal"); + return true; + } - @Override - public boolean visit(SQLBooleanExpr booleanExpr) { - booleanExpr.setValue(false); - return true; - } + @Override + public boolean visit(SQLBooleanExpr booleanExpr) { + booleanExpr.setValue(false); + return true; + } - @Override - public boolean match(SQLQueryExpr expr) { - return true; - } + @Override + public boolean match(SQLQueryExpr expr) { + return true; + } - @Override - public void rewrite(SQLQueryExpr expr) { - expr.accept(this); - } + @Override + public void rewrite(SQLQueryExpr expr) { + expr.accept(this); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/identifier/UnquoteIdentifierRule.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/identifier/UnquoteIdentifierRule.java index 31fc732879..b0258420eb 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/identifier/UnquoteIdentifierRule.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/identifier/UnquoteIdentifierRule.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.identifier; import static org.opensearch.sql.legacy.utils.StringUtils.unquoteFullColumn; @@ -16,53 +15,52 @@ import com.alibaba.druid.sql.dialect.mysql.visitor.MySqlASTVisitorAdapter; import org.opensearch.sql.legacy.rewriter.RewriteRule; -/** - * Quoted Identifiers Rewriter Rule - */ -public class UnquoteIdentifierRule extends MySqlASTVisitorAdapter implements RewriteRule { +/** Quoted Identifiers Rewriter Rule */ +public class UnquoteIdentifierRule extends MySqlASTVisitorAdapter + implements RewriteRule { - /** - * - * This method is to adjust the AST in the cases where the field is quoted, - * and the full name in the SELECT field is in the format of indexAlias.fieldName - * (e.g. SELECT b.`lastname` FROM bank AS b). - * - * In this case, the druid parser constructs a SQLSelectItem for the field "b.`lastname`", with SQLIdentifierExpr of - * "b." and alias of "`lastname`". - * - * This method corrects the SQLSelectItem object to have SQLIdentifier of "b.lastname" and alias of null. - */ - @Override - public boolean visit(SQLSelectItem selectItem) { - if (selectItem.getExpr() instanceof SQLIdentifierExpr) { - String identifier = ((SQLIdentifierExpr) selectItem.getExpr()).getName(); - if (identifier.endsWith(".")) { - String correctedIdentifier = identifier + unquoteSingleField(selectItem.getAlias(), "`"); - selectItem.setExpr(new SQLIdentifierExpr(correctedIdentifier)); - selectItem.setAlias(null); - } - } - selectItem.setAlias(unquoteSingleField(selectItem.getAlias(), "`")); - return true; + /** + * This method is to adjust the AST in the cases where the field is quoted, and the full name in + * the SELECT field is in the format of indexAlias.fieldName (e.g. SELECT b.`lastname` FROM bank + * AS b). + * + *

In this case, the druid parser constructs a SQLSelectItem for the field "b.`lastname`", with + * SQLIdentifierExpr of "b." and alias of "`lastname`". + * + *

This method corrects the SQLSelectItem object to have SQLIdentifier of "b.lastname" and + * alias of null. + */ + @Override + public boolean visit(SQLSelectItem selectItem) { + if (selectItem.getExpr() instanceof SQLIdentifierExpr) { + String identifier = ((SQLIdentifierExpr) selectItem.getExpr()).getName(); + if (identifier.endsWith(".")) { + String correctedIdentifier = identifier + unquoteSingleField(selectItem.getAlias(), "`"); + selectItem.setExpr(new SQLIdentifierExpr(correctedIdentifier)); + selectItem.setAlias(null); + } } + selectItem.setAlias(unquoteSingleField(selectItem.getAlias(), "`")); + return true; + } - @Override - public void endVisit(SQLIdentifierExpr identifierExpr) { - identifierExpr.setName(unquoteFullColumn(identifierExpr.getName())); - } + @Override + public void endVisit(SQLIdentifierExpr identifierExpr) { + identifierExpr.setName(unquoteFullColumn(identifierExpr.getName())); + } - @Override - public void endVisit(SQLExprTableSource tableSource) { - tableSource.setAlias(unquoteSingleField(tableSource.getAlias())); - } + @Override + public void endVisit(SQLExprTableSource tableSource) { + tableSource.setAlias(unquoteSingleField(tableSource.getAlias())); + } - @Override - public boolean match(SQLQueryExpr root) { - return true; - } + @Override + public boolean match(SQLQueryExpr root) { + return true; + } - @Override - public void rewrite(SQLQueryExpr root) { - root.accept(new UnquoteIdentifierRule()); - } + @Override + public void rewrite(SQLQueryExpr root) { + root.accept(new UnquoteIdentifierRule()); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/join/JoinRewriteRule.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/join/JoinRewriteRule.java index b32803561e..5a2d107a0a 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/join/JoinRewriteRule.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/join/JoinRewriteRule.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.join; import com.alibaba.druid.sql.ast.expr.SQLIdentifierExpr; @@ -28,22 +27,20 @@ import org.opensearch.sql.legacy.utils.StringUtils; /** - * Rewrite rule to add table alias to columnNames for JOIN queries without table alias. - *

- * We use a map from columnName to tableName. This is required to remove any ambiguity - * while mapping fields to right table. If there is no explicit alias we create one and use that - * to prefix columnName. + * Rewrite rule to add table alias to columnNames for JOIN queries without table alias. + * + *

We use a map from columnName to tableName. This is required to remove any ambiguity while + * mapping fields to right table. If there is no explicit alias we create one and use that to prefix + * columnName. + * + *

Different tableName on either side of join: Case a: If columnName(without alias) present in + * both tables, throw error. Case b: If columnName already has some alias, and that alias is a table + * name, change it to explicit alias of that table. Case c: If columnName is unique to a table * - * Different tableName on either side of join: - * Case a: If columnName(without alias) present in both tables, throw error. - * Case b: If columnName already has some alias, and that alias is a table name, - * change it to explicit alias of that table. - * Case c: If columnName is unique to a table + *

Same tableName on either side of join: Case a: If neither has explicit alias, throw error. + * Case b: If any one table has explicit alias, use explicit alias of other table for columnNames + * with tableName as prefix. (See below example) * - * Same tableName on either side of join: - * Case a: If neither has explicit alias, throw error. - * Case b: If any one table has explicit alias, - * use explicit alias of other table for columnNames with tableName as prefix. (See below example) *

  *       ex: SELECT table.field_a , a.field_b  | SELECT table.field_a , a.field_b
  *            FROM table a                     |  FROM table
@@ -54,164 +51,172 @@
  *                            FROM table a
  *                             JOIN table table_0
  *                              ON table_0.field_c = a.field_d
- *
- *

+ * */ public class JoinRewriteRule implements RewriteRule { - private static final String DOT = "."; - private int aliasSuffix = 0; - private final LocalClusterState clusterState; + private static final String DOT = "."; + private int aliasSuffix = 0; + private final LocalClusterState clusterState; - public JoinRewriteRule(LocalClusterState clusterState) { - this.clusterState = clusterState; - } + public JoinRewriteRule(LocalClusterState clusterState) { + this.clusterState = clusterState; + } - @Override - public boolean match(SQLQueryExpr root) { - return isJoin(root); - } + @Override + public boolean match(SQLQueryExpr root) { + return isJoin(root); + } - private boolean isJoin(SQLQueryExpr sqlExpr) { - SQLSelectQuery sqlSelectQuery = sqlExpr.getSubQuery().getQuery(); + private boolean isJoin(SQLQueryExpr sqlExpr) { + SQLSelectQuery sqlSelectQuery = sqlExpr.getSubQuery().getQuery(); - if (!(sqlSelectQuery instanceof MySqlSelectQueryBlock)) { - return false; - } - - MySqlSelectQueryBlock query = (MySqlSelectQueryBlock) sqlSelectQuery; - return query.getFrom() instanceof SQLJoinTableSource - && ((SQLJoinTableSource) query.getFrom()).getJoinType() != SQLJoinTableSource.JoinType.COMMA; + if (!(sqlSelectQuery instanceof MySqlSelectQueryBlock)) { + return false; } - @Override - public void rewrite(SQLQueryExpr root) { + MySqlSelectQueryBlock query = (MySqlSelectQueryBlock) sqlSelectQuery; + return query.getFrom() instanceof SQLJoinTableSource + && ((SQLJoinTableSource) query.getFrom()).getJoinType() + != SQLJoinTableSource.JoinType.COMMA; + } - final Multimap tableByFieldName = ArrayListMultimap.create(); - final Map tableNameToAlias = new HashMap<>(); + @Override + public void rewrite(SQLQueryExpr root) { - // Used to handle case of same tableNames in JOIN - final Set explicitAliases = new HashSet<>(); + final Multimap tableByFieldName = ArrayListMultimap.create(); + final Map tableNameToAlias = new HashMap<>(); - visitTable(root, tableExpr -> { - // Copied from SubqueryAliasRewriter ; Removes index type name if any - String tableName = tableExpr.getExpr().toString().replaceAll(" ", "").split("/")[0]; + // Used to handle case of same tableNames in JOIN + final Set explicitAliases = new HashSet<>(); - if (tableExpr.getAlias() == null) { - String alias = createAlias(tableName); - tableExpr.setAlias(alias); - explicitAliases.add(alias); - } + visitTable( + root, + tableExpr -> { + // Copied from SubqueryAliasRewriter ; Removes index type name if any + String tableName = tableExpr.getExpr().toString().replaceAll(" ", "").split("/")[0]; - Table table = new Table(tableName, tableExpr.getAlias()); + if (tableExpr.getAlias() == null) { + String alias = createAlias(tableName); + tableExpr.setAlias(alias); + explicitAliases.add(alias); + } - tableNameToAlias.put(table.getName(), table.getAlias()); + Table table = new Table(tableName, tableExpr.getAlias()); - FieldMappings fieldMappings = clusterState. getFieldMappings( - new String[]{tableName}).firstMapping(); - fieldMappings.flat((fieldName, type) -> tableByFieldName.put(fieldName, table)); - }); + tableNameToAlias.put(table.getName(), table.getAlias()); - //Handling cases for same tableName on either side of JOIN - if (tableNameToAlias.size() == 1) { - String tableName = tableNameToAlias.keySet().iterator().next(); - if (explicitAliases.size() == 2) { - // Neither table has explicit alias - throw new VerificationException(StringUtils.format("Not unique table/alias: [%s]", tableName)); - } else if (explicitAliases.size() == 1) { - // One table has explicit alias; use created alias for other table as alias to override fields - // starting with actual tableName as alias to explicit alias - tableNameToAlias.put(tableName, explicitAliases.iterator().next()); - } - } - - visitColumnName(root, idExpr -> { - String columnName = idExpr.getName(); - Collection

tables = tableByFieldName.get(columnName); - if (tables.size() > 1) { - // columnName without alias present in both tables - throw new VerificationException(StringUtils.format("Field name [%s] is ambiguous", columnName)); - } else if (tables.isEmpty()) { - // size() == 0? - // 1. Either the columnName does not exist (handled by SemanticAnalyzer [SemanticAnalysisException]) - // 2. Or column starts with tableName as alias or explicit alias - // If starts with tableName as alias change to explicit alias - tableNameToAlias.keySet().stream().forEach(tableName -> { - if (columnName.startsWith(tableName + DOT)) { - idExpr.setName(columnName.replace(tableName + DOT, tableNameToAlias.get(tableName) + DOT)); - } - }); - } else { - // columnName with any alias and unique to one table - Table table = tables.iterator().next(); - idExpr.setName(String.join(DOT, table.getAlias(), columnName)); - } + FieldMappings fieldMappings = + clusterState.getFieldMappings(new String[] {tableName}).firstMapping(); + fieldMappings.flat((fieldName, type) -> tableByFieldName.put(fieldName, table)); }); + + // Handling cases for same tableName on either side of JOIN + if (tableNameToAlias.size() == 1) { + String tableName = tableNameToAlias.keySet().iterator().next(); + if (explicitAliases.size() == 2) { + // Neither table has explicit alias + throw new VerificationException( + StringUtils.format("Not unique table/alias: [%s]", tableName)); + } else if (explicitAliases.size() == 1) { + // One table has explicit alias; use created alias for other table as alias to override + // fields + // starting with actual tableName as alias to explicit alias + tableNameToAlias.put(tableName, explicitAliases.iterator().next()); + } } - private void visitTable(SQLQueryExpr root, - Consumer visit) { - root.accept(new MySqlASTVisitorAdapter() { - @Override - public void endVisit(SQLExprTableSource tableExpr) { - visit.accept(tableExpr); - } + visitColumnName( + root, + idExpr -> { + String columnName = idExpr.getName(); + Collection
tables = tableByFieldName.get(columnName); + if (tables.size() > 1) { + // columnName without alias present in both tables + throw new VerificationException( + StringUtils.format("Field name [%s] is ambiguous", columnName)); + } else if (tables.isEmpty()) { + // size() == 0? + // 1. Either the columnName does not exist (handled by SemanticAnalyzer + // [SemanticAnalysisException]) + // 2. Or column starts with tableName as alias or explicit alias + // If starts with tableName as alias change to explicit alias + tableNameToAlias.keySet().stream() + .forEach( + tableName -> { + if (columnName.startsWith(tableName + DOT)) { + idExpr.setName( + columnName.replace( + tableName + DOT, tableNameToAlias.get(tableName) + DOT)); + } + }); + } else { + // columnName with any alias and unique to one table + Table table = tables.iterator().next(); + idExpr.setName(String.join(DOT, table.getAlias(), columnName)); + } }); - } + } + + private void visitTable(SQLQueryExpr root, Consumer visit) { + root.accept( + new MySqlASTVisitorAdapter() { + @Override + public void endVisit(SQLExprTableSource tableExpr) { + visit.accept(tableExpr); + } + }); + } + + private void visitColumnName(SQLQueryExpr expr, Consumer visit) { + expr.accept( + new MySqlASTVisitorAdapter() { + @Override + public boolean visit(SQLExprTableSource x) { + // Avoid rewriting identifier in table name + return false; + } - private void visitColumnName(SQLQueryExpr expr, - Consumer visit) { - expr.accept(new MySqlASTVisitorAdapter() { - @Override - public boolean visit(SQLExprTableSource x) { - // Avoid rewriting identifier in table name - return false; - } - - @Override - public void endVisit(SQLIdentifierExpr idExpr) { - visit.accept(idExpr); - } + @Override + public void endVisit(SQLIdentifierExpr idExpr) { + visit.accept(idExpr); + } }); - } + } - private String createAlias(String alias) { - return String.format("%s_%d", alias, next()); - } + private String createAlias(String alias) { + return String.format("%s_%d", alias, next()); + } - private Integer next() { - return aliasSuffix++; - } + private Integer next() { + return aliasSuffix++; + } - private static class Table { + private static class Table { - public String getName() { - return name; - } + public String getName() { + return name; + } - public String getAlias() { - return alias; - } + public String getAlias() { + return alias; + } - /** - * Table Name. - */ - private String name; + /** Table Name. */ + private String name; - /** - * Table Alias. - */ - private String alias; + /** Table Alias. */ + private String alias; - Table(String name, String alias) { - this.name = name; - this.alias = alias; - } + Table(String name, String alias) { + this.name = name; + this.alias = alias; + } - // Added for debugging - @Override - public String toString() { - return this.name + "-->" + this.alias; - } + // Added for debugging + @Override + public String toString() { + return this.name + "-->" + this.alias; } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/matchtoterm/TermFieldRewriter.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/matchtoterm/TermFieldRewriter.java index 5890befbca..312e783c6c 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/matchtoterm/TermFieldRewriter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/matchtoterm/TermFieldRewriter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.matchtoterm; import com.alibaba.druid.sql.ast.SQLExpr; @@ -35,233 +34,228 @@ /** * Visitor to rewrite AST (abstract syntax tree) for supporting term_query in WHERE and IN condition - * Simple changing the matchQuery() to termQuery() will not work when mapping is both text and keyword - * The approach is to implement SQLIdentifier.visit() based on the correct field mapping. + * Simple changing the matchQuery() to termQuery() will not work when mapping is both text and + * keyword The approach is to implement SQLIdentifier.visit() based on the correct field mapping. */ - public class TermFieldRewriter extends MySqlASTVisitorAdapter { - private Deque environment = new ArrayDeque<>(); - private TermRewriterFilter filterType; + private Deque environment = new ArrayDeque<>(); + private TermRewriterFilter filterType; - public TermFieldRewriter() { - this.filterType = TermRewriterFilter.COMMA; - } - - public TermFieldRewriter(TermRewriterFilter filterType) { - this.filterType = filterType; - } - - @Override - public boolean visit(MySqlSelectQueryBlock query) { - environment.push(new TermFieldScope()); - if (query.getFrom() == null) { - return false; - } - - Map indexToType = new HashMap<>(); - collect(query.getFrom(), indexToType, curScope().getAliases()); - if (indexToType.isEmpty()) { - // no index found for current scope, continue. - return true; - } - curScope().setMapper(getMappings(indexToType)); + public TermFieldRewriter() { + this.filterType = TermRewriterFilter.COMMA; + } - if (this.filterType == TermRewriterFilter.COMMA || this.filterType == TermRewriterFilter.MULTI_QUERY) { - checkMappingCompatibility(curScope(), indexToType); - } + public TermFieldRewriter(TermRewriterFilter filterType) { + this.filterType = filterType; + } - return true; + @Override + public boolean visit(MySqlSelectQueryBlock query) { + environment.push(new TermFieldScope()); + if (query.getFrom() == null) { + return false; } - @Override - public void endVisit(MySqlSelectQueryBlock query) { - environment.pop(); + Map indexToType = new HashMap<>(); + collect(query.getFrom(), indexToType, curScope().getAliases()); + if (indexToType.isEmpty()) { + // no index found for current scope, continue. + return true; } + curScope().setMapper(getMappings(indexToType)); - @Override - public boolean visit(SQLSelectItem sqlSelectItem) { - return false; + if (this.filterType == TermRewriterFilter.COMMA + || this.filterType == TermRewriterFilter.MULTI_QUERY) { + checkMappingCompatibility(curScope(), indexToType); } - @Override - public boolean visit(SQLJoinTableSource tableSource) { - return false; - } + return true; + } + + @Override + public void endVisit(MySqlSelectQueryBlock query) { + environment.pop(); + } + + @Override + public boolean visit(SQLSelectItem sqlSelectItem) { + return false; + } + + @Override + public boolean visit(SQLJoinTableSource tableSource) { + return false; + } + + @Override + public boolean visit(SQLExprTableSource tableSource) { + return false; + } + + /** Fix null parent problem which is required when visiting SQLIdentifier */ + public boolean visit(SQLInListExpr inListExpr) { + inListExpr.getExpr().setParent(inListExpr); + return true; + } + + @SuppressWarnings("unchecked") + @Override + public boolean visit(SQLIdentifierExpr expr) { + if (isValidIdentifierForTerm(expr)) { + Map source = null; + if (this.filterType == TermRewriterFilter.COMMA + || this.filterType == TermRewriterFilter.MULTI_QUERY) { + Optional> optionalMap = curScope().resolveFieldMapping(expr.getName()); + if (optionalMap.isPresent()) { + source = optionalMap.get(); + } else { + return true; + } - @Override - public boolean visit(SQLExprTableSource tableSource) { - return false; - } + } else if (this.filterType == TermRewriterFilter.JOIN) { + String[] arr = expr.getName().split("\\.", 2); + if (arr.length < 2) { + throw new VerificationException("table alias or field name missing"); + } + String alias = arr[0]; + String fullFieldName = arr[1]; + + String index = curScope().getAliases().get(alias); + FieldMappings fieldMappings = curScope().getMapper().mapping(index); + if (fieldMappings.has(fullFieldName)) { + source = fieldMappings.mapping(fullFieldName); + } else { + return true; + } + } - /** - * Fix null parent problem which is required when visiting SQLIdentifier - */ - public boolean visit(SQLInListExpr inListExpr) { - inListExpr.getExpr().setParent(inListExpr); - return true; + String keywordAlias = isBothTextAndKeyword(source); + if (keywordAlias != null) { + expr.setName(expr.getName() + "." + keywordAlias); + } } - - - @SuppressWarnings("unchecked") - @Override - public boolean visit(SQLIdentifierExpr expr) { - if (isValidIdentifierForTerm(expr)) { - Map source = null; - if (this.filterType == TermRewriterFilter.COMMA || this.filterType == TermRewriterFilter.MULTI_QUERY) { - Optional> optionalMap = curScope().resolveFieldMapping(expr.getName()); - if (optionalMap.isPresent()) { - source = optionalMap.get(); - } else { - return true; - } - - } else if (this.filterType == TermRewriterFilter.JOIN) { - String[] arr = expr.getName().split("\\.", 2); - if (arr.length < 2) { - throw new VerificationException("table alias or field name missing"); - } - String alias = arr[0]; - String fullFieldName = arr[1]; - - String index = curScope().getAliases().get(alias); - FieldMappings fieldMappings = curScope().getMapper().mapping(index); - if (fieldMappings.has(fullFieldName)) { - source = fieldMappings.mapping(fullFieldName); - } else { - return true; - } - } - - String keywordAlias = isBothTextAndKeyword(source); - if (keywordAlias != null) { - expr.setName(expr.getName() + "." + keywordAlias); - } + return true; + } + + public void collect( + SQLTableSource tableSource, Map indexToType, Map aliases) { + if (tableSource instanceof SQLExprTableSource) { + + String tableName = null; + SQLExprTableSource sqlExprTableSource = (SQLExprTableSource) tableSource; + + if (sqlExprTableSource.getExpr() instanceof SQLIdentifierExpr) { + SQLIdentifierExpr sqlIdentifier = (SQLIdentifierExpr) sqlExprTableSource.getExpr(); + tableName = sqlIdentifier.getName(); + indexToType.put(tableName, null); + } else if (sqlExprTableSource.getExpr() instanceof SQLBinaryOpExpr) { + SQLBinaryOpExpr sqlBinaryOpExpr = (SQLBinaryOpExpr) sqlExprTableSource.getExpr(); + tableName = ((SQLIdentifierExpr) sqlBinaryOpExpr.getLeft()).getName(); + SQLExpr rightSideOfExpression = sqlBinaryOpExpr.getRight(); + + // This assumes that right side of the expression is different name in query + if (rightSideOfExpression instanceof SQLIdentifierExpr) { + SQLIdentifierExpr right = (SQLIdentifierExpr) rightSideOfExpression; + indexToType.put(tableName, right.getName()); + } else { + throw new ParserException( + "Right side of the expression [" + + rightSideOfExpression.toString() + + "] is expected to be an identifier"); } - return true; + } + if (tableSource.getAlias() != null) { + aliases.put(tableSource.getAlias(), tableName); + } else { + aliases.put(tableName, tableName); + } + + } else if (tableSource instanceof SQLJoinTableSource) { + collect(((SQLJoinTableSource) tableSource).getLeft(), indexToType, aliases); + collect(((SQLJoinTableSource) tableSource).getRight(), indexToType, aliases); } - - public void collect(SQLTableSource tableSource, Map indexToType, Map aliases) { - if (tableSource instanceof SQLExprTableSource) { - - String tableName = null; - SQLExprTableSource sqlExprTableSource = (SQLExprTableSource) tableSource; - - if (sqlExprTableSource.getExpr() instanceof SQLIdentifierExpr) { - SQLIdentifierExpr sqlIdentifier = (SQLIdentifierExpr) sqlExprTableSource.getExpr(); - tableName = sqlIdentifier.getName(); - indexToType.put(tableName, null); - } else if (sqlExprTableSource.getExpr() instanceof SQLBinaryOpExpr) { - SQLBinaryOpExpr sqlBinaryOpExpr = (SQLBinaryOpExpr) sqlExprTableSource.getExpr(); - tableName = ((SQLIdentifierExpr) sqlBinaryOpExpr.getLeft()).getName(); - SQLExpr rightSideOfExpression = sqlBinaryOpExpr.getRight(); - - // This assumes that right side of the expression is different name in query - if (rightSideOfExpression instanceof SQLIdentifierExpr) { - SQLIdentifierExpr right = (SQLIdentifierExpr) rightSideOfExpression; - indexToType.put(tableName, right.getName()); - } else { - throw new ParserException("Right side of the expression [" + rightSideOfExpression.toString() - + "] is expected to be an identifier"); - } - } - if (tableSource.getAlias() != null) { - aliases.put(tableSource.getAlias(), tableName); - } else { - aliases.put(tableName, tableName); - } - - } else if (tableSource instanceof SQLJoinTableSource) { - collect(((SQLJoinTableSource) tableSource).getLeft(), indexToType, aliases); - collect(((SQLJoinTableSource) tableSource).getRight(), indexToType, aliases); + } + + /** Current scope which is top of the stack */ + private TermFieldScope curScope() { + return environment.peek(); + } + + public String isBothTextAndKeyword(Map source) { + if (source.containsKey("fields")) { + for (Object key : ((Map) source.get("fields")).keySet()) { + if (key instanceof String + && ((Map) ((Map) source.get("fields")).get(key)).get("type").equals("keyword")) { + return (String) key; } + } } + return null; + } + public boolean isValidIdentifierForTerm(SQLIdentifierExpr expr) { /** - * Current scope which is top of the stack + * Only for following conditions Identifier will be modified Where: WHERE identifier = + * 'something' IN list: IN ('Tom', 'Dick', 'Harry') IN subquery: IN (SELECT firstname from + * accounts/account where firstname = 'John') Group by: GROUP BY state , employer , ... Order + * by: ORDER BY firstname, lastname , ... + * + *

NOTE: Does not impact fields on ON condition clause in JOIN as we skip visiting + * SQLJoinTableSource */ - private TermFieldScope curScope() { - return environment.peek(); + return !expr.getName().startsWith("_") + && (isValidIdentifier(expr) || checkIfNestedIdentifier(expr)); + } + + private boolean checkIfNestedIdentifier(SQLIdentifierExpr expr) { + return expr.getParent() instanceof SQLMethodInvokeExpr + && ((SQLMethodInvokeExpr) expr.getParent()).getMethodName().equals("nested") + && isValidIdentifier(expr.getParent()); + } + + private boolean isValidIdentifier(SQLObject expr) { + SQLObject parent = expr.getParent(); + return isBinaryExprWithValidOperators(parent) + || parent instanceof SQLInListExpr + || parent instanceof SQLInSubQueryExpr + || parent instanceof SQLSelectOrderByItem + || parent instanceof MySqlSelectGroupByExpr; + } + + private boolean isBinaryExprWithValidOperators(SQLObject expr) { + if (!(expr instanceof SQLBinaryOpExpr)) { + return false; } + return Stream.of(SQLBinaryOperator.Equality, SQLBinaryOperator.Is, SQLBinaryOperator.IsNot) + .anyMatch(operator -> operator == ((SQLBinaryOpExpr) expr).getOperator()); + } - public String isBothTextAndKeyword(Map source) { - if (source.containsKey("fields")) { - for (Object key : ((Map) source.get("fields")).keySet()) { - if (key instanceof String - && ((Map) ((Map) source.get("fields")).get(key)).get("type").equals("keyword")) { - return (String) key; - } - } - } - return null; + private void checkMappingCompatibility(TermFieldScope scope, Map indexToType) { + if (scope.getMapper().isEmpty()) { + throw new VerificationException("Unknown index " + indexToType.keySet()); } + } - public boolean isValidIdentifierForTerm(SQLIdentifierExpr expr) { - /** - * Only for following conditions Identifier will be modified - * Where: WHERE identifier = 'something' - * IN list: IN ('Tom', 'Dick', 'Harry') - * IN subquery: IN (SELECT firstname from accounts/account where firstname = 'John') - * Group by: GROUP BY state , employer , ... - * Order by: ORDER BY firstname, lastname , ... - * - * NOTE: Does not impact fields on ON condition clause in JOIN as we skip visiting SQLJoinTableSource - */ - return !expr.getName().startsWith("_") && (isValidIdentifier(expr) || checkIfNestedIdentifier(expr)); - } + public IndexMappings getMappings(Map indexToType) { + String[] allIndexes = indexToType.keySet().stream().toArray(String[]::new); + // GetFieldMappingsRequest takes care of wildcard index expansion + return LocalClusterState.state().getFieldMappings(allIndexes); + } - private boolean checkIfNestedIdentifier(SQLIdentifierExpr expr) { - return - expr.getParent() instanceof SQLMethodInvokeExpr - && ((SQLMethodInvokeExpr) expr.getParent()).getMethodName().equals("nested") - && isValidIdentifier(expr.getParent()); - } + public enum TermRewriterFilter { + COMMA(","), // No joins, multiple tables in SELECT + JOIN("JOIN"), // All JOINs + MULTI_QUERY("MULTI_QUERY"); // MINUS and UNION - private boolean isValidIdentifier(SQLObject expr) { - SQLObject parent = expr.getParent(); - return isBinaryExprWithValidOperators(parent) - || parent instanceof SQLInListExpr - || parent instanceof SQLInSubQueryExpr - || parent instanceof SQLSelectOrderByItem - || parent instanceof MySqlSelectGroupByExpr; - } + public final String name; - private boolean isBinaryExprWithValidOperators(SQLObject expr) { - if (!(expr instanceof SQLBinaryOpExpr)) { - return false; - } - return Stream.of( - SQLBinaryOperator.Equality, - SQLBinaryOperator.Is, - SQLBinaryOperator.IsNot - ).anyMatch(operator -> operator == ((SQLBinaryOpExpr) expr).getOperator()); + TermRewriterFilter(String name) { + this.name = name; } - private void checkMappingCompatibility(TermFieldScope scope, Map indexToType) { - if (scope.getMapper().isEmpty()) { - throw new VerificationException("Unknown index " + indexToType.keySet()); - } - } - - public IndexMappings getMappings(Map indexToType) { - String[] allIndexes = indexToType.keySet().stream().toArray(String[]::new); - // GetFieldMappingsRequest takes care of wildcard index expansion - return LocalClusterState.state().getFieldMappings(allIndexes); - } - - public enum TermRewriterFilter { - COMMA(","), // No joins, multiple tables in SELECT - JOIN("JOIN"), // All JOINs - MULTI_QUERY("MULTI_QUERY"); // MINUS and UNION - - public final String name; - - TermRewriterFilter(String name) { - this.name = name; - } - - public static String toString(TermRewriterFilter filter) { - return filter.name; - } + public static String toString(TermRewriterFilter filter) { + return filter.name; } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/matchtoterm/TermFieldScope.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/matchtoterm/TermFieldScope.java index fd6380d9d1..29f8ed82b8 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/matchtoterm/TermFieldScope.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/matchtoterm/TermFieldScope.java @@ -3,66 +3,69 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.matchtoterm; import java.util.*; -import java.util.stream.Collectors; import org.json.JSONObject; import org.opensearch.sql.legacy.esdomain.mapping.FieldMappings; import org.opensearch.sql.legacy.esdomain.mapping.IndexMappings; -/** - * Index Mapping information in current query being visited. - */ +/** Index Mapping information in current query being visited. */ public class TermFieldScope { - // mapper => index, type, field_name, FieldMappingMetaData - private IndexMappings mapper; - private FieldMappings finalMapping; - private Map aliases; + // mapper => index, type, field_name, FieldMappingMetaData + private IndexMappings mapper; + private FieldMappings finalMapping; + private Map aliases; - public TermFieldScope() { - this.mapper = IndexMappings.EMPTY; - this.aliases = new HashMap<>(); - } + public TermFieldScope() { + this.mapper = IndexMappings.EMPTY; + this.aliases = new HashMap<>(); + } - public Map getAliases() { - return aliases; - } + public Map getAliases() { + return aliases; + } - public void setAliases(Map aliases) { - this.aliases = aliases; - } + public void setAliases(Map aliases) { + this.aliases = aliases; + } - public IndexMappings getMapper() { - return this.mapper; - } + public IndexMappings getMapper() { + return this.mapper; + } - public void setMapper(IndexMappings mapper) { - this.mapper = mapper; - } + public void setMapper(IndexMappings mapper) { + this.mapper = mapper; + } - public Optional> resolveFieldMapping(String fieldName) { - Set indexMappings = new HashSet<>(mapper.allMappings()); - Optional> resolvedMapping = - indexMappings.stream() - .filter(mapping -> mapping.has(fieldName)) - .map(mapping -> mapping.mapping(fieldName)).reduce((map1, map2) -> { - if (!map1.equals(map2)) { - // TODO: Merge mappings if they are compatible, for text and text/keyword to text/keyword. - String exceptionReason = String.format(Locale.ROOT, "Different mappings are not allowed " - + "for the same field[%s]: found [%s] and [%s] ", - fieldName, pretty(map1), pretty(map2)); - throw new VerificationException(exceptionReason); - } - return map1; + public Optional> resolveFieldMapping(String fieldName) { + Set indexMappings = new HashSet<>(mapper.allMappings()); + Optional> resolvedMapping = + indexMappings.stream() + .filter(mapping -> mapping.has(fieldName)) + .map(mapping -> mapping.mapping(fieldName)) + .reduce( + (map1, map2) -> { + if (!map1.equals(map2)) { + // TODO: Merge mappings if they are compatible, for text and text/keyword to + // text/keyword. + String exceptionReason = + String.format( + Locale.ROOT, + "Different mappings are not allowed " + + "for the same field[%s]: found [%s] and [%s] ", + fieldName, + pretty(map1), + pretty(map2)); + throw new VerificationException(exceptionReason); + } + return map1; }); - return resolvedMapping; - } - - private static String pretty(Map mapping) { - return new JSONObject(mapping).toString().replaceAll("\"", ""); - } + return resolvedMapping; + } + private static String pretty(Map mapping) { + return new JSONObject(mapping).toString().replaceAll("\"", ""); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/matchtoterm/VerificationException.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/matchtoterm/VerificationException.java index 51b936bdc3..f8ec8ad215 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/matchtoterm/VerificationException.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/matchtoterm/VerificationException.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.matchtoterm; import org.opensearch.OpenSearchException; @@ -11,12 +10,12 @@ public class VerificationException extends OpenSearchException { - public VerificationException(String message) { - super(message); - } + public VerificationException(String message) { + super(message); + } - @Override - public RestStatus status() { - return RestStatus.BAD_REQUEST; - } + @Override + public RestStatus status() { + return RestStatus.BAD_REQUEST; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/From.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/From.java index 609d26f4a1..893a305096 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/From.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/From.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.nestedfield; import static com.alibaba.druid.sql.ast.statement.SQLJoinTableSource.JoinType.COMMA; @@ -14,119 +13,109 @@ import com.alibaba.druid.sql.ast.statement.SQLTableSource; import com.alibaba.druid.sql.dialect.mysql.ast.statement.MySqlSelectQueryBlock; -/** - * Table (OpenSearch Index) expression in FROM statement. - */ +/** Table (OpenSearch Index) expression in FROM statement. */ class From extends SQLClause { - From(SQLTableSource expr) { - super(expr); - } - - /** - * Collect nested field(s) information and then remove them from FROM statement. - * Assumption: only 1 regular table in FROM (which is the first one) and nested field(s) has alias. - */ - @Override - void rewrite(Scope scope) { - if (!isJoin()) { - return; - } - - // At this point, FROM expr is SQLJoinTableSource. - if (!isCommaJoin()) { - scope.setActualJoinType(((SQLJoinTableSource) expr).getJoinType()); - ((SQLJoinTableSource) expr).setJoinType(COMMA); - } - - if (parentAlias(scope).isEmpty()) { - // Could also be empty now since normal JOIN tables may not have alias - if (scope.getActualJoinType() != null) { - ((SQLJoinTableSource) expr).setJoinType(scope.getActualJoinType()); - } - return; - } - - collectNestedFields(scope); - if (scope.isAnyNestedField()) { - eraseParentAlias(); - keepParentTableOnly(); - } else if (scope.getActualJoinType() != null){ - // set back the JoinType to original value if non COMMA JOIN on regular tables - ((SQLJoinTableSource) expr).setJoinType(scope.getActualJoinType()); - } - } - - private String parentAlias(Scope scope) { - scope.setParentAlias(((SQLJoinTableSource) expr).getLeft().getAlias()); - return emptyIfNull(scope.getParentAlias()); + From(SQLTableSource expr) { + super(expr); + } + + /** + * Collect nested field(s) information and then remove them from FROM statement. Assumption: only + * 1 regular table in FROM (which is the first one) and nested field(s) has alias. + */ + @Override + void rewrite(Scope scope) { + if (!isJoin()) { + return; } - /** - * Erase alias otherwise NLPchina has problem parsing nested field like 't.employees.name' - */ - private void eraseParentAlias() { - left().expr.setAlias(null); + // At this point, FROM expr is SQLJoinTableSource. + if (!isCommaJoin()) { + scope.setActualJoinType(((SQLJoinTableSource) expr).getJoinType()); + ((SQLJoinTableSource) expr).setJoinType(COMMA); } - private void keepParentTableOnly() { - MySqlSelectQueryBlock query = (MySqlSelectQueryBlock) expr.getParent(); - query.setFrom(left().expr); - left().expr.setParent(query); + if (parentAlias(scope).isEmpty()) { + // Could also be empty now since normal JOIN tables may not have alias + if (scope.getActualJoinType() != null) { + ((SQLJoinTableSource) expr).setJoinType(scope.getActualJoinType()); + } + return; } - /** - * Collect path alias and full path mapping of nested field in FROM clause. - * Sample: - * FROM team t, t.employees e ... - *

- * Join - * / \ - * team t Join - * / \ - * t.employees e ... - *

- * t.employees is nested because path "t" == parentAlias "t" - * Save path alias to full path name mapping {"e": "employees"} to Scope - */ - private void collectNestedFields(Scope scope) { - From clause = this; - for (; clause.isCommaJoin(); clause = clause.right()) { - clause.left().addIfNestedField(scope); - } - clause.addIfNestedField(scope); + collectNestedFields(scope); + if (scope.isAnyNestedField()) { + eraseParentAlias(); + keepParentTableOnly(); + } else if (scope.getActualJoinType() != null) { + // set back the JoinType to original value if non COMMA JOIN on regular tables + ((SQLJoinTableSource) expr).setJoinType(scope.getActualJoinType()); } - - private boolean isCommaJoin() { - return expr instanceof SQLJoinTableSource && ((SQLJoinTableSource) expr).getJoinType() == COMMA; + } + + private String parentAlias(Scope scope) { + scope.setParentAlias(((SQLJoinTableSource) expr).getLeft().getAlias()); + return emptyIfNull(scope.getParentAlias()); + } + + /** Erase alias otherwise NLPchina has problem parsing nested field like 't.employees.name' */ + private void eraseParentAlias() { + left().expr.setAlias(null); + } + + private void keepParentTableOnly() { + MySqlSelectQueryBlock query = (MySqlSelectQueryBlock) expr.getParent(); + query.setFrom(left().expr); + left().expr.setParent(query); + } + + /** + * Collect path alias and full path mapping of nested field in FROM clause. Sample: FROM team t, + * t.employees e ... + * + *

Join / \ team t Join / \ t.employees e ... + * + *

t.employees is nested because path "t" == parentAlias "t" Save path alias to full path name + * mapping {"e": "employees"} to Scope + */ + private void collectNestedFields(Scope scope) { + From clause = this; + for (; clause.isCommaJoin(); clause = clause.right()) { + clause.left().addIfNestedField(scope); } + clause.addIfNestedField(scope); + } - private boolean isJoin() { - return expr instanceof SQLJoinTableSource; - } + private boolean isCommaJoin() { + return expr instanceof SQLJoinTableSource && ((SQLJoinTableSource) expr).getJoinType() == COMMA; + } - private From left() { - return new From(((SQLJoinTableSource) expr).getLeft()); - } + private boolean isJoin() { + return expr instanceof SQLJoinTableSource; + } - private From right() { - return new From(((SQLJoinTableSource) expr).getRight()); - } + private From left() { + return new From(((SQLJoinTableSource) expr).getLeft()); + } - private void addIfNestedField(Scope scope) { - if (!(expr instanceof SQLExprTableSource - && ((SQLExprTableSource) expr).getExpr() instanceof SQLIdentifierExpr)) { - return; - } + private From right() { + return new From(((SQLJoinTableSource) expr).getRight()); + } - Identifier table = new Identifier((SQLIdentifierExpr) ((SQLExprTableSource) expr).getExpr()); - if (table.path().equals(scope.getParentAlias())) { - scope.addAliasFullPath(emptyIfNull(expr.getAlias()), table.name()); - } + private void addIfNestedField(Scope scope) { + if (!(expr instanceof SQLExprTableSource + && ((SQLExprTableSource) expr).getExpr() instanceof SQLIdentifierExpr)) { + return; } - private String emptyIfNull(String str) { - return str == null ? "" : str; + Identifier table = new Identifier((SQLIdentifierExpr) ((SQLExprTableSource) expr).getExpr()); + if (table.path().equals(scope.getParentAlias())) { + scope.addAliasFullPath(emptyIfNull(expr.getAlias()), table.name()); } + } + private String emptyIfNull(String str) { + return str == null ? "" : str; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/Identifier.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/Identifier.java index 635cc63671..64938c71b1 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/Identifier.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/Identifier.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.nestedfield; import com.alibaba.druid.sql.ast.expr.SQLBinaryOpExpr; @@ -11,87 +10,85 @@ /** * Identifier expression in SELECT, FROM, WHERE, GROUP BY, ORDER BY etc. - *

- * Ex. To make concepts clear, for "e.firstname AND t.region" in "FROM team t, t.employees e": - * parent alias (to erase): 't' - * path: 'e' (full path saved in Scope is 'employees') - * name: 'firstname' + * + *

Ex. To make concepts clear, for "e.firstname AND t.region" in "FROM team t, t.employees e": + * parent alias (to erase): 't' path: 'e' (full path saved in Scope is 'employees') name: + * 'firstname' */ class Identifier extends SQLClause { - private static final String SEPARATOR = "."; - - Identifier(SQLIdentifierExpr expr) { - super(expr); - } - - /** - * Erase parent alias for all identifiers but only rewrite those (nested field identifier) NOT in WHERE. - * For identifier in conditions in WHERE, use full path as tag and delay the rewrite in Where.rewrite(). - */ - @Override - void rewrite(Scope scope) { - eraseParentAlias(scope); - if (isNestedField(scope)) { - renameByFullPath(scope); - if (isInCondition()) { - useFullPathAsTag(scope); - } else { - replaceByNestedFunction(expr, pathFromIdentifier(expr)); - } - } - } - - /** - * return the path of the expr name. e.g. - * expecting p returned as path in both WHERE p.name = 'A' and WHERE p IS NULL cases, - * in which expr.name = p.name and p separately - */ - String path() { - return separatorIndex() == -1 ? expr.getName() : expr.getName().substring(0, separatorIndex()); - } - - String name() { - return expr.getName().substring(separatorIndex() + 1); + private static final String SEPARATOR = "."; + + Identifier(SQLIdentifierExpr expr) { + super(expr); + } + + /** + * Erase parent alias for all identifiers but only rewrite those (nested field identifier) NOT in + * WHERE. For identifier in conditions in WHERE, use full path as tag and delay the rewrite in + * Where.rewrite(). + */ + @Override + void rewrite(Scope scope) { + eraseParentAlias(scope); + if (isNestedField(scope)) { + renameByFullPath(scope); + if (isInCondition()) { + useFullPathAsTag(scope); + } else { + replaceByNestedFunction(expr, pathFromIdentifier(expr)); + } } - - private int separatorIndex() { - return expr.getName().indexOf(SEPARATOR); - } - - /** - * Erase parent alias otherwise it's required to specify it everywhere even on nested - * field (which NLPchina has problem with). - * Sample: "FROM team t, t.employees e WHERE t.region = 'US'" => "WHERE region = 'US'" - */ - private void eraseParentAlias(Scope scope) { - if (isStartWithParentAlias(scope)) { - expr.setName(name()); - } - } - - private boolean isStartWithParentAlias(Scope scope) { - return path().equals(scope.getParentAlias()); + } + + /** + * return the path of the expr name. e.g. expecting p returned as path in both WHERE p.name = 'A' + * and WHERE p IS NULL cases, in which expr.name = p.name and p separately + */ + String path() { + return separatorIndex() == -1 ? expr.getName() : expr.getName().substring(0, separatorIndex()); + } + + String name() { + return expr.getName().substring(separatorIndex() + 1); + } + + private int separatorIndex() { + return expr.getName().indexOf(SEPARATOR); + } + + /** + * Erase parent alias otherwise it's required to specify it everywhere even on nested field (which + * NLPchina has problem with). Sample: "FROM team t, t.employees e WHERE t.region = 'US'" => + * "WHERE region = 'US'" + */ + private void eraseParentAlias(Scope scope) { + if (isStartWithParentAlias(scope)) { + expr.setName(name()); } + } - private boolean isNestedField(Scope scope) { - return !scope.getFullPath(path()).isEmpty(); - } + private boolean isStartWithParentAlias(Scope scope) { + return path().equals(scope.getParentAlias()); + } - private void renameByFullPath(Scope scope) { - String fullPath = scope.getFullPath(path()); - if (fullPath.isEmpty()) { - throw new IllegalStateException("Full path not found for identifier:" + expr.getName()); - } - expr.setName(expr.getName().replaceFirst(path(), fullPath)); - } + private boolean isNestedField(Scope scope) { + return !scope.getFullPath(path()).isEmpty(); + } - private void useFullPathAsTag(Scope scope) { - scope.addConditionTag((SQLBinaryOpExpr) expr.getParent(), path()); + private void renameByFullPath(Scope scope) { + String fullPath = scope.getFullPath(path()); + if (fullPath.isEmpty()) { + throw new IllegalStateException("Full path not found for identifier:" + expr.getName()); } + expr.setName(expr.getName().replaceFirst(path(), fullPath)); + } - private boolean isInCondition() { - return expr.getParent() instanceof SQLBinaryOpExpr; - } + private void useFullPathAsTag(Scope scope) { + scope.addConditionTag((SQLBinaryOpExpr) expr.getParent(), path()); + } + private boolean isInCondition() { + return expr.getParent() instanceof SQLBinaryOpExpr; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/NestedFieldProjection.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/NestedFieldProjection.java index 4fa4611f9a..11e5007122 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/NestedFieldProjection.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/NestedFieldProjection.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.nestedfield; import static com.alibaba.druid.sql.ast.statement.SQLJoinTableSource.JoinType; @@ -33,152 +32,153 @@ import org.opensearch.sql.legacy.rewriter.matchtoterm.VerificationException; import org.opensearch.sql.legacy.utils.StringUtils; -/** - * Nested field projection class to make OpenSearch return matched rows in nested field. - */ +/** Nested field projection class to make OpenSearch return matched rows in nested field. */ public class NestedFieldProjection { - private final SearchRequestBuilder request; - - public NestedFieldProjection(SearchRequestBuilder request) { - this.request = request; - } - - /** - * Project nested field in SELECT clause to InnerHit in NestedQueryBuilder - * - * @param fields list of field domain object - */ - public void project(List fields, JoinType nestedJoinType) { - if (isAnyNestedField(fields)) { - initBoolQueryFilterIfNull(); - List nestedQueries = extractNestedQueries(query()); - - if (nestedJoinType == JoinType.LEFT_OUTER_JOIN) { - // for LEFT JOIN on nested field as right table, the query will have only one nested field, so one path - Map> fieldNamesByPath = groupFieldNamesByPath(fields); - - if (fieldNamesByPath.size() > 1) { - String message = StringUtils.format( - "only single nested field is allowed as right table for LEFT JOIN, found %s ", - fieldNamesByPath.keySet() - ); - - throw new VerificationException(message); - } - - Map.Entry> pathToFields = fieldNamesByPath.entrySet().iterator().next(); - String path = pathToFields.getKey(); - List fieldNames = pathToFields.getValue(); - buildNestedLeftJoinQuery(path, fieldNames); - } else { - - groupFieldNamesByPath(fields).forEach( - (path, fieldNames) -> buildInnerHit(fieldNames, findNestedQueryWithSamePath(nestedQueries, path)) - ); - } - } - } - - /** - * Check via traditional for loop first to avoid lambda performance impact on all queries - * even though those without nested field - */ - private boolean isAnyNestedField(List fields) { - for (Field field : fields) { - if (field.isNested() && !field.isReverseNested()) { - return true; - } - } - return false; - } - - private void initBoolQueryFilterIfNull() { - if (request.request().source() == null || query() == null) { - request.setQuery(boolQuery()); - } - if (query().filter().isEmpty()) { - query().filter(boolQuery()); - } - } - - private Map> groupFieldNamesByPath(List fields) { - return fields.stream(). - filter(Field::isNested). - filter(not(Field::isReverseNested)). - collect(groupingBy(Field::getNestedPath, mapping(Field::getName, toList()))); - } - - /** - * Why search for NestedQueryBuilder recursively? - * Because 1) it was added and wrapped by BoolQuery when WHERE explained (far from here) - * 2) InnerHit must be added to the NestedQueryBuilder related - *

- * Either we store it to global data structure (which requires to be thread-safe or ThreadLocal) - * or we peel off BoolQuery to find it (the way we followed here because recursion tree should be very thin). - */ - private List extractNestedQueries(QueryBuilder query) { - List result = new ArrayList<>(); - if (query instanceof NestedQueryBuilder) { - result.add((NestedQueryBuilder) query); - } else if (query instanceof BoolQueryBuilder) { - BoolQueryBuilder boolQ = (BoolQueryBuilder) query; - Stream.of(boolQ.filter(), boolQ.must(), boolQ.should()). - flatMap(Collection::stream). - forEach(q -> result.addAll(extractNestedQueries(q))); + private final SearchRequestBuilder request; + + public NestedFieldProjection(SearchRequestBuilder request) { + this.request = request; + } + + /** + * Project nested field in SELECT clause to InnerHit in NestedQueryBuilder + * + * @param fields list of field domain object + */ + public void project(List fields, JoinType nestedJoinType) { + if (isAnyNestedField(fields)) { + initBoolQueryFilterIfNull(); + List nestedQueries = extractNestedQueries(query()); + + if (nestedJoinType == JoinType.LEFT_OUTER_JOIN) { + // for LEFT JOIN on nested field as right table, the query will have only one nested field, + // so one path + Map> fieldNamesByPath = groupFieldNamesByPath(fields); + + if (fieldNamesByPath.size() > 1) { + String message = + StringUtils.format( + "only single nested field is allowed as right table for LEFT JOIN, found %s ", + fieldNamesByPath.keySet()); + + throw new VerificationException(message); } - return result; - } - - private void buildInnerHit(List fieldNames, NestedQueryBuilder query) { - query.innerHit(new InnerHitBuilder().setFetchSourceContext( - new FetchSourceContext(true, fieldNames.toArray(new String[0]), null) - )); - } - - /** - * Why linear search? Because NestedQueryBuilder hides "path" field from any access. - * Assumption: collected NestedQueryBuilder list should be very small or mostly only one. - */ - private NestedQueryBuilder findNestedQueryWithSamePath(List nestedQueries, String path) { - return nestedQueries.stream(). - filter(query -> isSamePath(path, query)). - findAny(). - orElseGet(createEmptyNestedQuery(path)); - } - private boolean isSamePath(String path, NestedQueryBuilder query) { - return nestedQuery(path, query.query(), query.scoreMode()).equals(query); + Map.Entry> pathToFields = + fieldNamesByPath.entrySet().iterator().next(); + String path = pathToFields.getKey(); + List fieldNames = pathToFields.getValue(); + buildNestedLeftJoinQuery(path, fieldNames); + } else { + + groupFieldNamesByPath(fields) + .forEach( + (path, fieldNames) -> + buildInnerHit(fieldNames, findNestedQueryWithSamePath(nestedQueries, path))); + } } - - /** - * Create a nested query with match all filter to place inner hits - */ - private Supplier createEmptyNestedQuery(String path) { - return () -> { - NestedQueryBuilder nestedQuery = nestedQuery(path, matchAllQuery(), ScoreMode.None); - ((BoolQueryBuilder) query().filter().get(0)).must(nestedQuery); - return nestedQuery; - }; + } + + /** + * Check via traditional for loop first to avoid lambda performance impact on all queries even + * though those without nested field + */ + private boolean isAnyNestedField(List fields) { + for (Field field : fields) { + if (field.isNested() && !field.isReverseNested()) { + return true; + } } + return false; + } - private BoolQueryBuilder query() { - return (BoolQueryBuilder) request.request().source().query(); + private void initBoolQueryFilterIfNull() { + if (request.request().source() == null || query() == null) { + request.setQuery(boolQuery()); } - - private Predicate not(Predicate predicate) { - return predicate.negate(); + if (query().filter().isEmpty()) { + query().filter(boolQuery()); } - - - private void buildNestedLeftJoinQuery(String path, List fieldNames) { - BoolQueryBuilder existsNestedQuery = boolQuery(); - existsNestedQuery.mustNot().add(nestedQuery(path, existsQuery(path), ScoreMode.None)); - - NestedQueryBuilder matchAllNestedQuery = nestedQuery(path, matchAllQuery(), ScoreMode.None); - buildInnerHit(fieldNames, matchAllNestedQuery); - - ((BoolQueryBuilder) query().filter().get(0)).should().add(existsNestedQuery); - ((BoolQueryBuilder) query().filter().get(0)).should().add(matchAllNestedQuery); + } + + private Map> groupFieldNamesByPath(List fields) { + return fields.stream() + .filter(Field::isNested) + .filter(not(Field::isReverseNested)) + .collect(groupingBy(Field::getNestedPath, mapping(Field::getName, toList()))); + } + + /** + * Why search for NestedQueryBuilder recursively? Because 1) it was added and wrapped by BoolQuery + * when WHERE explained (far from here) 2) InnerHit must be added to the NestedQueryBuilder + * related + * + *

Either we store it to global data structure (which requires to be thread-safe or + * ThreadLocal) or we peel off BoolQuery to find it (the way we followed here because recursion + * tree should be very thin). + */ + private List extractNestedQueries(QueryBuilder query) { + List result = new ArrayList<>(); + if (query instanceof NestedQueryBuilder) { + result.add((NestedQueryBuilder) query); + } else if (query instanceof BoolQueryBuilder) { + BoolQueryBuilder boolQ = (BoolQueryBuilder) query; + Stream.of(boolQ.filter(), boolQ.must(), boolQ.should()) + .flatMap(Collection::stream) + .forEach(q -> result.addAll(extractNestedQueries(q))); } + return result; + } + + private void buildInnerHit(List fieldNames, NestedQueryBuilder query) { + query.innerHit( + new InnerHitBuilder() + .setFetchSourceContext( + new FetchSourceContext(true, fieldNames.toArray(new String[0]), null))); + } + + /** + * Why linear search? Because NestedQueryBuilder hides "path" field from any access. Assumption: + * collected NestedQueryBuilder list should be very small or mostly only one. + */ + private NestedQueryBuilder findNestedQueryWithSamePath( + List nestedQueries, String path) { + return nestedQueries.stream() + .filter(query -> isSamePath(path, query)) + .findAny() + .orElseGet(createEmptyNestedQuery(path)); + } + + private boolean isSamePath(String path, NestedQueryBuilder query) { + return nestedQuery(path, query.query(), query.scoreMode()).equals(query); + } + + /** Create a nested query with match all filter to place inner hits */ + private Supplier createEmptyNestedQuery(String path) { + return () -> { + NestedQueryBuilder nestedQuery = nestedQuery(path, matchAllQuery(), ScoreMode.None); + ((BoolQueryBuilder) query().filter().get(0)).must(nestedQuery); + return nestedQuery; + }; + } + + private BoolQueryBuilder query() { + return (BoolQueryBuilder) request.request().source().query(); + } + + private Predicate not(Predicate predicate) { + return predicate.negate(); + } + + private void buildNestedLeftJoinQuery(String path, List fieldNames) { + BoolQueryBuilder existsNestedQuery = boolQuery(); + existsNestedQuery.mustNot().add(nestedQuery(path, existsQuery(path), ScoreMode.None)); + + NestedQueryBuilder matchAllNestedQuery = nestedQuery(path, matchAllQuery(), ScoreMode.None); + buildInnerHit(fieldNames, matchAllNestedQuery); + + ((BoolQueryBuilder) query().filter().get(0)).should().add(existsNestedQuery); + ((BoolQueryBuilder) query().filter().get(0)).should().add(matchAllNestedQuery); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/NestedFieldRewriter.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/NestedFieldRewriter.java index f93f5e344e..c128b1f6f0 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/NestedFieldRewriter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/NestedFieldRewriter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.nestedfield; import static org.opensearch.sql.legacy.utils.Util.NESTED_JOIN_TYPE; @@ -16,97 +15,93 @@ import java.util.Deque; /** - * Visitor to rewrite AST (abstract syntax tree) for nested type fields to support implicit nested() function call. - * Intuitively, the approach is to implement SQLIdentifier.visit() and wrap nested() function for nested field. - * The parsing result of FROM clause will be used to determine if an identifier is nested field. - *

- * State transition table (state here means Scope + Query AST): + * Visitor to rewrite AST (abstract syntax tree) for nested type fields to support implicit nested() + * function call. Intuitively, the approach is to implement SQLIdentifier.visit() and wrap nested() + * function for nested field. The parsing result of FROM clause will be used to determine if an + * identifier is nested field. + * + *

State transition table (state here means Scope + Query AST): * _________________________________________________________________________________________________________________ - * | Rewrite | Scope | Sample Query | + * | Rewrite | Scope | Sample Query | * |---------------------------------------------------------------------------------------------------------------| - * | (Start) | () | SELECT e.lastname, COUNT(*) FROM team t, employees e | - * | | | WHERE region = 'US' and e.firstname = 'John' | - * | | | GROUP BY e.lastname | + * | (Start) | () | SELECT e.lastname, COUNT(*) FROM team t, employees e | | | | WHERE region = 'US' + * and e.firstname = 'John' | | | | GROUP BY e.lastname | * |---------------------------------------------------------------------------------------------------------------| - * | FROM | (parentAlias='t' | SELECT e.lastname, COUNT(*) FROM team | - * | | aliasFullPaths={e: employees}| WHERE region = 'US' and e.firstname = 'John' | - * | | conditionTags={}) | GROUP BY e.lastname | + * | FROM | (parentAlias='t' | SELECT e.lastname, COUNT(*) FROM team | | | aliasFullPaths={e: + * employees}| WHERE region = 'US' and e.firstname = 'John' | | | conditionTags={}) | GROUP BY + * e.lastname | * |---------------------------------------------------------------------------------------------------------------| - * | Identifier | (parentAlias='t' | SELECT nested(employees.lastname), COUNT(*) FROM team | - * | | aliasFullPaths={e: employees}| WHERE region = 'US' and employees.firstname = 'John' | - * | | conditionTags={c: employees})| GROUP BY nested(employees.lastname) | + * | Identifier | (parentAlias='t' | SELECT nested(employees.lastname), COUNT(*) FROM team | | | + * aliasFullPaths={e: employees}| WHERE region = 'US' and employees.firstname = 'John' | | | + * conditionTags={c: employees})| GROUP BY nested(employees.lastname) | * |---------------------------------------------------------------------------------------------------------------| - * | WHERE | (parentAlias='t' | SELECT nested(employees.lastname), COUNT(*) FROM team | - * | | aliasFullPaths={e: employees}| WHERE region = 'US' and nested(employees.firstname) = 'John' | - * | | conditionTags={c: employees})| GROUP BY nested(employees.lastname) | + * | WHERE | (parentAlias='t' | SELECT nested(employees.lastname), COUNT(*) FROM team | | | + * aliasFullPaths={e: employees}| WHERE region = 'US' and nested(employees.firstname) = 'John' | | | + * conditionTags={c: employees})| GROUP BY nested(employees.lastname) | * |---------------------------------------------------------------------------------------------------------------| - *

- * Note 'c' in conditionTag refer to the reference to SQLBinaryOpExpr object of condition 'employees.firstname = 'John' - *

- * More details: - * 1) Manage environment in the case of subquery - * 2) Add nested field to select for SELECT * - * 3) Merge conditions of same nested field to single nested() call + * + *

Note 'c' in conditionTag refer to the reference to SQLBinaryOpExpr object of condition + * 'employees.firstname = 'John' + * + *

More details: 1) Manage environment in the case of subquery 2) Add nested field to select for + * SELECT * 3) Merge conditions of same nested field to single nested() call */ public class NestedFieldRewriter extends MySqlASTVisitorAdapter { - /** - * Scope stack to record the state (nested field names etc) for current query. - * In the case of subquery, the active scope of current query is the top element of the stack. - */ - private Deque environment = new ArrayDeque<>(); - - /** - * Rewrite FROM here to make sure FROM statement always be visited before other statement in query. - * Note that return true anyway to continue visiting FROM in subquery if any. - */ - @Override - public boolean visit(MySqlSelectQueryBlock query) { - environment.push(new Scope()); - if (query.getFrom() == null) { - return false; - } - - query.getFrom().setParent(query); - new From(query.getFrom()).rewrite(curScope()); + /** + * Scope stack to record the state (nested field names etc) for current query. In the case of + * subquery, the active scope of current query is the top element of the stack. + */ + private Deque environment = new ArrayDeque<>(); + + /** + * Rewrite FROM here to make sure FROM statement always be visited before other statement in + * query. Note that return true anyway to continue visiting FROM in subquery if any. + */ + @Override + public boolean visit(MySqlSelectQueryBlock query) { + environment.push(new Scope()); + if (query.getFrom() == null) { + return false; + } - if (curScope().isAnyNestedField() && isNotGroupBy(query)) { - new Select(query.getSelectList()).rewrite(curScope()); - } + query.getFrom().setParent(query); + new From(query.getFrom()).rewrite(curScope()); - query.putAttribute(NESTED_JOIN_TYPE, curScope().getActualJoinType()); - return true; + if (curScope().isAnyNestedField() && isNotGroupBy(query)) { + new Select(query.getSelectList()).rewrite(curScope()); } - @Override - public boolean visit(SQLIdentifierExpr expr) { - if (curScope().isAnyNestedField()) { - new Identifier(expr).rewrite(curScope()); - } - return true; - } + query.putAttribute(NESTED_JOIN_TYPE, curScope().getActualJoinType()); + return true; + } - @Override - public void endVisit(SQLBinaryOpExpr expr) { - if (curScope().isAnyNestedField()) { - new Where(expr).rewrite(curScope()); - } + @Override + public boolean visit(SQLIdentifierExpr expr) { + if (curScope().isAnyNestedField()) { + new Identifier(expr).rewrite(curScope()); } + return true; + } - @Override - public void endVisit(MySqlSelectQueryBlock query) { - environment.pop(); + @Override + public void endVisit(SQLBinaryOpExpr expr) { + if (curScope().isAnyNestedField()) { + new Where(expr).rewrite(curScope()); } + } - /** - * Current scope which is top of the stack - */ - private Scope curScope() { - return environment.peek(); - } + @Override + public void endVisit(MySqlSelectQueryBlock query) { + environment.pop(); + } - private boolean isNotGroupBy(MySqlSelectQueryBlock query) { - return query.getGroupBy() == null; - } + /** Current scope which is top of the stack */ + private Scope curScope() { + return environment.peek(); + } + private boolean isNotGroupBy(MySqlSelectQueryBlock query) { + return query.getGroupBy() == null; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/SQLClause.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/SQLClause.java index 160403ab11..fb4c1b9fe9 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/SQLClause.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/SQLClause.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.nestedfield; import com.alibaba.druid.sql.ast.SQLExpr; @@ -28,71 +27,69 @@ */ abstract class SQLClause { - protected final T expr; - - SQLClause(T expr) { - this.expr = expr; - } + protected final T expr; - /** - * Rewrite nested fields in query according to/fill into information in scope. - * - * @param scope Scope of current query - */ - abstract void rewrite(Scope scope); + SQLClause(T expr) { + this.expr = expr; + } - SQLMethodInvokeExpr replaceByNestedFunction(SQLExpr expr, String nestedPath) { - final int nestedPathIndex = 1; - SQLMethodInvokeExpr nestedFunc = replaceByNestedFunction(expr); - nestedFunc.getParameters().add(nestedPathIndex, new SQLCharExpr(nestedPath)); - return nestedFunc; - } + /** + * Rewrite nested fields in query according to/fill into information in scope. + * + * @param scope Scope of current query + */ + abstract void rewrite(Scope scope); - /** - * Replace expr by nested(expr) and set pointer in parent properly - */ - SQLMethodInvokeExpr replaceByNestedFunction(SQLExpr expr) { - SQLObject parent = expr.getParent(); - SQLMethodInvokeExpr nestedFunc = wrapNestedFunction(expr); - if (parent instanceof SQLAggregateExpr) { - List args = ((SQLAggregateExpr) parent).getArguments(); - args.set(args.indexOf(expr), nestedFunc); - } else if (parent instanceof SQLSelectItem) { - ((SQLSelectItem) parent).setExpr(nestedFunc); - } else if (parent instanceof MySqlSelectGroupByExpr) { - ((MySqlSelectGroupByExpr) parent).setExpr(nestedFunc); - } else if (parent instanceof SQLSelectOrderByItem) { - ((SQLSelectOrderByItem) parent).setExpr(nestedFunc); - } else if (parent instanceof SQLInSubQueryExpr) { - ((SQLInSubQueryExpr) parent).setExpr(nestedFunc); - } else if (parent instanceof SQLBinaryOpExpr) { - SQLBinaryOpExpr parentOp = (SQLBinaryOpExpr) parent; - if (parentOp.getLeft() == expr) { - parentOp.setLeft(nestedFunc); - } else { - parentOp.setRight(nestedFunc); - } - } else if (parent instanceof MySqlSelectQueryBlock) { - ((MySqlSelectQueryBlock) parent).setWhere(nestedFunc); - } else if (parent instanceof SQLNotExpr) { - ((SQLNotExpr) parent).setExpr(nestedFunc); - } else { - throw new IllegalStateException("Unsupported place to use nested field under parent: " + parent); - } - return nestedFunc; - } + SQLMethodInvokeExpr replaceByNestedFunction(SQLExpr expr, String nestedPath) { + final int nestedPathIndex = 1; + SQLMethodInvokeExpr nestedFunc = replaceByNestedFunction(expr); + nestedFunc.getParameters().add(nestedPathIndex, new SQLCharExpr(nestedPath)); + return nestedFunc; + } - private SQLMethodInvokeExpr wrapNestedFunction(SQLExpr expr) { - SQLMethodInvokeExpr nestedFunc = new SQLMethodInvokeExpr("nested"); - nestedFunc.setParent(expr.getParent()); - nestedFunc.addParameter(expr); // this will auto set parent of expr - return nestedFunc; + /** Replace expr by nested(expr) and set pointer in parent properly */ + SQLMethodInvokeExpr replaceByNestedFunction(SQLExpr expr) { + SQLObject parent = expr.getParent(); + SQLMethodInvokeExpr nestedFunc = wrapNestedFunction(expr); + if (parent instanceof SQLAggregateExpr) { + List args = ((SQLAggregateExpr) parent).getArguments(); + args.set(args.indexOf(expr), nestedFunc); + } else if (parent instanceof SQLSelectItem) { + ((SQLSelectItem) parent).setExpr(nestedFunc); + } else if (parent instanceof MySqlSelectGroupByExpr) { + ((MySqlSelectGroupByExpr) parent).setExpr(nestedFunc); + } else if (parent instanceof SQLSelectOrderByItem) { + ((SQLSelectOrderByItem) parent).setExpr(nestedFunc); + } else if (parent instanceof SQLInSubQueryExpr) { + ((SQLInSubQueryExpr) parent).setExpr(nestedFunc); + } else if (parent instanceof SQLBinaryOpExpr) { + SQLBinaryOpExpr parentOp = (SQLBinaryOpExpr) parent; + if (parentOp.getLeft() == expr) { + parentOp.setLeft(nestedFunc); + } else { + parentOp.setRight(nestedFunc); + } + } else if (parent instanceof MySqlSelectQueryBlock) { + ((MySqlSelectQueryBlock) parent).setWhere(nestedFunc); + } else if (parent instanceof SQLNotExpr) { + ((SQLNotExpr) parent).setExpr(nestedFunc); + } else { + throw new IllegalStateException( + "Unsupported place to use nested field under parent: " + parent); } + return nestedFunc; + } - String pathFromIdentifier(SQLExpr identifier) { - String field = Util.extendedToString(identifier); - int lastDot = field.lastIndexOf("."); - return lastDot == -1 ? field :field.substring(0, lastDot); - } + private SQLMethodInvokeExpr wrapNestedFunction(SQLExpr expr) { + SQLMethodInvokeExpr nestedFunc = new SQLMethodInvokeExpr("nested"); + nestedFunc.setParent(expr.getParent()); + nestedFunc.addParameter(expr); // this will auto set parent of expr + return nestedFunc; + } + String pathFromIdentifier(SQLExpr identifier) { + String field = Util.extendedToString(identifier); + int lastDot = field.lastIndexOf("."); + return lastDot == -1 ? field : field.substring(0, lastDot); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/Scope.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/Scope.java index 5f035bc725..f65d7f166b 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/Scope.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/Scope.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.nestedfield; import static com.alibaba.druid.sql.ast.statement.SQLJoinTableSource.JoinType; @@ -14,71 +13,68 @@ import java.util.Map; import java.util.Set; -/** - * Nested field information in current query being visited. - */ +/** Nested field information in current query being visited. */ class Scope { - /** Join Type as passed in the actual SQL subquery */ - private JoinType actualJoinType; - - /** Alias of parent such as alias "t" of parent table "team" in "FROM team t, t.employees e" */ - - private String parentAlias; - - /** - * Mapping from nested field path alias to path full name in FROM. - * eg. e in {e => employees} in "FROM t.employees e" - */ - private Map aliasFullPaths = new HashMap<>(); - - /** - * Mapping from binary operation condition (in WHERE) to nested - * field tag (full path for nested, EMPTY for non-nested field) - */ - private Map conditionTags = new IdentityHashMap<>(); - - String getParentAlias() { - return parentAlias; - } - - void setParentAlias(String parentAlias) { - this.parentAlias = parentAlias; + /** Join Type as passed in the actual SQL subquery */ + private JoinType actualJoinType; + + /** Alias of parent such as alias "t" of parent table "team" in "FROM team t, t.employees e" */ + private String parentAlias; + + /** + * Mapping from nested field path alias to path full name in FROM. eg. e in {e => employees} in + * "FROM t.employees e" + */ + private Map aliasFullPaths = new HashMap<>(); + + /** + * Mapping from binary operation condition (in WHERE) to nested field tag (full path for nested, + * EMPTY for non-nested field) + */ + private Map conditionTags = new IdentityHashMap<>(); + + String getParentAlias() { + return parentAlias; + } + + void setParentAlias(String parentAlias) { + this.parentAlias = parentAlias; + } + + void addAliasFullPath(String alias, String path) { + if (alias.isEmpty()) { + aliasFullPaths.put(path, path); + } else { + aliasFullPaths.put(alias, path); } + } - void addAliasFullPath(String alias, String path) { - if (alias.isEmpty()) { - aliasFullPaths.put(path, path); - } else { - aliasFullPaths.put(alias, path); - } - } + String getFullPath(String alias) { + return aliasFullPaths.getOrDefault(alias, ""); + } - String getFullPath(String alias) { - return aliasFullPaths.getOrDefault(alias, ""); - } + boolean isAnyNestedField() { + return !aliasFullPaths.isEmpty(); + } - boolean isAnyNestedField() { - return !aliasFullPaths.isEmpty(); - } + Set getAliases() { + return aliasFullPaths.keySet(); + } - Set getAliases() { - return aliasFullPaths.keySet(); - } + String getConditionTag(SQLBinaryOpExpr expr) { + return conditionTags.getOrDefault(expr, ""); + } - String getConditionTag(SQLBinaryOpExpr expr) { - return conditionTags.getOrDefault(expr, ""); - } + void addConditionTag(SQLBinaryOpExpr expr, String tag) { + conditionTags.put(expr, tag); + } - void addConditionTag(SQLBinaryOpExpr expr, String tag) { - conditionTags.put(expr, tag); - } - - JoinType getActualJoinType() { - return actualJoinType; - } + JoinType getActualJoinType() { + return actualJoinType; + } - void setActualJoinType(JoinType joinType) { - actualJoinType = joinType; - } + void setActualJoinType(JoinType joinType) { + actualJoinType = joinType; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/Select.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/Select.java index f514e6d081..8d2d6402e1 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/Select.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/Select.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.nestedfield; import com.alibaba.druid.sql.ast.expr.SQLAllColumnExpr; @@ -11,39 +10,37 @@ import com.alibaba.druid.sql.ast.statement.SQLSelectItem; import java.util.List; -/** - * Column list in SELECT statement. - */ +/** Column list in SELECT statement. */ class Select extends SQLClause> { - Select(List expr) { - super(expr); - } - - /** - * Rewrite by adding nested field to SELECT in the case of 'SELECT *'. - *

- * Ex. 'SELECT *' => 'SELECT *, employees.*' - * So that NestedFieldProjection will add 'employees.*' to includes list in inner_hits. - */ - @Override - void rewrite(Scope scope) { - if (isSelectAllOnly()) { - addSelectAllForNestedField(scope); - } + Select(List expr) { + super(expr); + } + + /** + * Rewrite by adding nested field to SELECT in the case of 'SELECT *'. + * + *

Ex. 'SELECT *' => 'SELECT *, employees.*' So that NestedFieldProjection will add + * 'employees.*' to includes list in inner_hits. + */ + @Override + void rewrite(Scope scope) { + if (isSelectAllOnly()) { + addSelectAllForNestedField(scope); } + } - private boolean isSelectAllOnly() { - return expr.size() == 1 && expr.get(0).getExpr() instanceof SQLAllColumnExpr; - } + private boolean isSelectAllOnly() { + return expr.size() == 1 && expr.get(0).getExpr() instanceof SQLAllColumnExpr; + } - private void addSelectAllForNestedField(Scope scope) { - for (String alias : scope.getAliases()) { - expr.add(createSelectItem(alias + ".*")); - } + private void addSelectAllForNestedField(Scope scope) { + for (String alias : scope.getAliases()) { + expr.add(createSelectItem(alias + ".*")); } + } - private SQLSelectItem createSelectItem(String name) { - return new SQLSelectItem(new SQLIdentifierExpr(name)); - } + private SQLSelectItem createSelectItem(String name) { + return new SQLSelectItem(new SQLIdentifierExpr(name)); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/Where.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/Where.java index c126bb264f..3c1b995a4c 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/Where.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/nestedfield/Where.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.nestedfield; import com.alibaba.druid.sql.ast.expr.SQLBinaryOpExpr; @@ -11,116 +10,92 @@ import com.alibaba.druid.sql.ast.expr.SQLNotExpr; import com.alibaba.druid.sql.dialect.mysql.ast.statement.MySqlSelectQueryBlock; -/** - * Condition expression in WHERE statement. - */ +/** Condition expression in WHERE statement. */ class Where extends SQLClause { - Where(SQLBinaryOpExpr expr) { - super(expr); - } + Where(SQLBinaryOpExpr expr) { + super(expr); + } - /** - * Rewrite if left and right tag is different (or reach root of WHERE). - * Otherwise continue delaying the rewrite. - *

- * Assumption: there are only 2 forms of condition - * 1) BinaryOp: Left=Identifier, right=value - * 2) BinaryOp: Left=BinaryOp, right=BinaryOp - */ - @Override - void rewrite(Scope scope) { - if (isLeftChildCondition()) { - if (isChildTagEquals(scope)) { - useAnyChildTag(scope); - } else { - left().mergeNestedField(scope); - right().mergeNestedField(scope); - } - } - mergeIfHaveTagAndIsRootOfWhereOrNot(scope); + /** + * Rewrite if left and right tag is different (or reach root of WHERE). Otherwise continue + * delaying the rewrite. + * + *

Assumption: there are only 2 forms of condition 1) BinaryOp: Left=Identifier, right=value 2) + * BinaryOp: Left=BinaryOp, right=BinaryOp + */ + @Override + void rewrite(Scope scope) { + if (isLeftChildCondition()) { + if (isChildTagEquals(scope)) { + useAnyChildTag(scope); + } else { + left().mergeNestedField(scope); + right().mergeNestedField(scope); + } } + mergeIfHaveTagAndIsRootOfWhereOrNot(scope); + } - private boolean isLeftChildCondition() { - return expr.getLeft() instanceof SQLBinaryOpExpr; - } + private boolean isLeftChildCondition() { + return expr.getLeft() instanceof SQLBinaryOpExpr; + } - private boolean isChildTagEquals(Scope scope) { - String left = scope.getConditionTag((SQLBinaryOpExpr) expr.getLeft()); - String right = scope.getConditionTag((SQLBinaryOpExpr) expr.getRight()); - return left.equals(right); - } + private boolean isChildTagEquals(Scope scope) { + String left = scope.getConditionTag((SQLBinaryOpExpr) expr.getLeft()); + String right = scope.getConditionTag((SQLBinaryOpExpr) expr.getRight()); + return left.equals(right); + } - private void useAnyChildTag(Scope scope) { - scope.addConditionTag(expr, scope.getConditionTag((SQLBinaryOpExpr) expr.getLeft())); - } + private void useAnyChildTag(Scope scope) { + scope.addConditionTag(expr, scope.getConditionTag((SQLBinaryOpExpr) expr.getLeft())); + } - /** - * Merge anyway if the root of WHERE clause or {@link SQLNotExpr} be reached. - */ - private void mergeIfHaveTagAndIsRootOfWhereOrNot(Scope scope) { - if (scope.getConditionTag(expr).isEmpty()) { - return; - } - if (expr.getParent() instanceof MySqlSelectQueryBlock - || expr.getParent() instanceof SQLNotExpr) { - mergeNestedField(scope); - } + /** Merge anyway if the root of WHERE clause or {@link SQLNotExpr} be reached. */ + private void mergeIfHaveTagAndIsRootOfWhereOrNot(Scope scope) { + if (scope.getConditionTag(expr).isEmpty()) { + return; } - - private Where left() { - return new Where((SQLBinaryOpExpr) expr.getLeft()); + if (expr.getParent() instanceof MySqlSelectQueryBlock + || expr.getParent() instanceof SQLNotExpr) { + mergeNestedField(scope); } + } - private Where right() { - return new Where((SQLBinaryOpExpr) expr.getRight()); - } + private Where left() { + return new Where((SQLBinaryOpExpr) expr.getLeft()); + } - /** - * There are 2 cases: - * 1) For a single condition, just wrap nested() function. That's it. - *

- * BinaryOp - * / \ - * Identifier Value - * "employees.age" "30" - *

- * to - *

- * BinaryOp - * / \ - * Method Value - * "nested" "30" - * | - * Identifier - * "employees.age" - *

- * 2) For multiple conditions, put entire BinaryOp to the parameter and add function name "nested()" first - *

- * BinaryOp (a) - * / \ - * BinaryOp BinaryOp - * | | - * ... ... - *

- * to - *

- * Method - * "nested" - * | - * BinaryOp (a) - * / \ - * ... ... - */ - private void mergeNestedField(Scope scope) { - String tag = scope.getConditionTag(expr); - if (!tag.isEmpty()) { - if (isLeftChildCondition()) { - replaceByNestedFunction(expr).getParameters().add(0, new SQLCharExpr(tag)); - } else { - replaceByNestedFunction(expr.getLeft(), pathFromIdentifier(expr.getLeft())); - } - } - } + private Where right() { + return new Where((SQLBinaryOpExpr) expr.getRight()); + } + /** + * There are 2 cases: 1) For a single condition, just wrap nested() function. That's it. + * + *

BinaryOp / \ Identifier Value "employees.age" "30" + * + *

to + * + *

BinaryOp / \ Method Value "nested" "30" | Identifier "employees.age" + * + *

2) For multiple conditions, put entire BinaryOp to the parameter and add function name + * "nested()" first + * + *

BinaryOp (a) / \ BinaryOp BinaryOp | | ... ... + * + *

to + * + *

Method "nested" | BinaryOp (a) / \ ... ... + */ + private void mergeNestedField(Scope scope) { + String tag = scope.getConditionTag(expr); + if (!tag.isEmpty()) { + if (isLeftChildCondition()) { + replaceByNestedFunction(expr).getParameters().add(0, new SQLCharExpr(tag)); + } else { + replaceByNestedFunction(expr.getLeft(), pathFromIdentifier(expr.getLeft())); + } + } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/ordinal/OrdinalRewriterRule.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/ordinal/OrdinalRewriterRule.java index 1d44ac8261..f9766e60ce 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/ordinal/OrdinalRewriterRule.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/ordinal/OrdinalRewriterRule.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.ordinal; import com.alibaba.druid.sql.ast.SQLExpr; @@ -23,148 +22,145 @@ import org.opensearch.sql.legacy.rewriter.matchtoterm.VerificationException; /** - * Rewrite rule for changing ordinal alias in order by and group by to actual select field. - * Since we cannot clone or deepcopy the Druid SQL objects, we need to generate the - * two syntax tree from the original query to map Group By and Order By fields with ordinal alias - * to Select fields in newly generated syntax tree. + * Rewrite rule for changing ordinal alias in order by and group by to actual select field. Since we + * cannot clone or deepcopy the Druid SQL objects, we need to generate the two syntax tree from the + * original query to map Group By and Order By fields with ordinal alias to Select fields in newly + * generated syntax tree. * - * This rewriter assumes that all the backticks have been removed from identifiers. - * It also assumes that table alias have been removed from SELECT, WHERE, GROUP BY, ORDER BY fields. + *

This rewriter assumes that all the backticks have been removed from identifiers. It also + * assumes that table alias have been removed from SELECT, WHERE, GROUP BY, ORDER BY fields. */ - public class OrdinalRewriterRule implements RewriteRule { - private final String sql; + private final String sql; - public OrdinalRewriterRule(String sql) { - this.sql = sql; - } + public OrdinalRewriterRule(String sql) { + this.sql = sql; + } - @Override - public boolean match(SQLQueryExpr root) { - SQLSelectQuery sqlSelectQuery = root.getSubQuery().getQuery(); - if (!(sqlSelectQuery instanceof MySqlSelectQueryBlock)) { - // it could be SQLUnionQuery - return false; - } - - MySqlSelectQueryBlock query = (MySqlSelectQueryBlock) sqlSelectQuery; - if (!hasGroupByWithOrdinals(query) && !hasOrderByWithOrdinals(query)) { - return false; - } - return true; + @Override + public boolean match(SQLQueryExpr root) { + SQLSelectQuery sqlSelectQuery = root.getSubQuery().getQuery(); + if (!(sqlSelectQuery instanceof MySqlSelectQueryBlock)) { + // it could be SQLUnionQuery + return false; } - @Override - public void rewrite(SQLQueryExpr root) { - // we cannot clone SQLSelectItem, so we need similar objects to assign to GroupBy and OrderBy items - SQLQueryExpr sqlExprGroupCopy = toSqlExpr(); - SQLQueryExpr sqlExprOrderCopy = toSqlExpr(); - - changeOrdinalAliasInGroupAndOrderBy(root, sqlExprGroupCopy, sqlExprOrderCopy); + MySqlSelectQueryBlock query = (MySqlSelectQueryBlock) sqlSelectQuery; + if (!hasGroupByWithOrdinals(query) && !hasOrderByWithOrdinals(query)) { + return false; } - - private void changeOrdinalAliasInGroupAndOrderBy(SQLQueryExpr root, - SQLQueryExpr exprGroup, - SQLQueryExpr exprOrder) { - root.accept(new MySqlASTVisitorAdapter() { - - private String groupException = "Invalid ordinal [%s] specified in [GROUP BY %s]"; - private String orderException = "Invalid ordinal [%s] specified in [ORDER BY %s]"; - - private List groupSelectList = ((MySqlSelectQueryBlock) exprGroup.getSubQuery().getQuery()) - .getSelectList(); - - private List orderSelectList = ((MySqlSelectQueryBlock) exprOrder.getSubQuery().getQuery()) - .getSelectList(); - - @Override - public boolean visit(MySqlSelectGroupByExpr groupByExpr) { - SQLExpr expr = groupByExpr.getExpr(); - if (expr instanceof SQLIntegerExpr) { - Integer ordinalValue = ((SQLIntegerExpr) expr).getNumber().intValue(); - SQLExpr newExpr = checkAndGet(groupSelectList, ordinalValue, groupException); - groupByExpr.setExpr(newExpr); - newExpr.setParent(groupByExpr); - } - return false; + return true; + } + + @Override + public void rewrite(SQLQueryExpr root) { + // we cannot clone SQLSelectItem, so we need similar objects to assign to GroupBy and OrderBy + // items + SQLQueryExpr sqlExprGroupCopy = toSqlExpr(); + SQLQueryExpr sqlExprOrderCopy = toSqlExpr(); + + changeOrdinalAliasInGroupAndOrderBy(root, sqlExprGroupCopy, sqlExprOrderCopy); + } + + private void changeOrdinalAliasInGroupAndOrderBy( + SQLQueryExpr root, SQLQueryExpr exprGroup, SQLQueryExpr exprOrder) { + root.accept( + new MySqlASTVisitorAdapter() { + + private String groupException = "Invalid ordinal [%s] specified in [GROUP BY %s]"; + private String orderException = "Invalid ordinal [%s] specified in [ORDER BY %s]"; + + private List groupSelectList = + ((MySqlSelectQueryBlock) exprGroup.getSubQuery().getQuery()).getSelectList(); + + private List orderSelectList = + ((MySqlSelectQueryBlock) exprOrder.getSubQuery().getQuery()).getSelectList(); + + @Override + public boolean visit(MySqlSelectGroupByExpr groupByExpr) { + SQLExpr expr = groupByExpr.getExpr(); + if (expr instanceof SQLIntegerExpr) { + Integer ordinalValue = ((SQLIntegerExpr) expr).getNumber().intValue(); + SQLExpr newExpr = checkAndGet(groupSelectList, ordinalValue, groupException); + groupByExpr.setExpr(newExpr); + newExpr.setParent(groupByExpr); } - - @Override - public boolean visit(SQLSelectOrderByItem orderByItem) { - SQLExpr expr = orderByItem.getExpr(); - Integer ordinalValue; - - if (expr instanceof SQLIntegerExpr) { - ordinalValue = ((SQLIntegerExpr) expr).getNumber().intValue(); - SQLExpr newExpr = checkAndGet(orderSelectList, ordinalValue, orderException); - orderByItem.setExpr(newExpr); - newExpr.setParent(orderByItem); - } else if (expr instanceof SQLBinaryOpExpr - && ((SQLBinaryOpExpr) expr).getLeft() instanceof SQLIntegerExpr) { - // support ORDER BY IS NULL/NOT NULL - SQLBinaryOpExpr binaryOpExpr = (SQLBinaryOpExpr) expr; - SQLIntegerExpr integerExpr = (SQLIntegerExpr) binaryOpExpr.getLeft(); - - ordinalValue = integerExpr.getNumber().intValue(); - SQLExpr newExpr = checkAndGet(orderSelectList, ordinalValue, orderException); - binaryOpExpr.setLeft(newExpr); - newExpr.setParent(binaryOpExpr); - } - - return false; + return false; + } + + @Override + public boolean visit(SQLSelectOrderByItem orderByItem) { + SQLExpr expr = orderByItem.getExpr(); + Integer ordinalValue; + + if (expr instanceof SQLIntegerExpr) { + ordinalValue = ((SQLIntegerExpr) expr).getNumber().intValue(); + SQLExpr newExpr = checkAndGet(orderSelectList, ordinalValue, orderException); + orderByItem.setExpr(newExpr); + newExpr.setParent(orderByItem); + } else if (expr instanceof SQLBinaryOpExpr + && ((SQLBinaryOpExpr) expr).getLeft() instanceof SQLIntegerExpr) { + // support ORDER BY IS NULL/NOT NULL + SQLBinaryOpExpr binaryOpExpr = (SQLBinaryOpExpr) expr; + SQLIntegerExpr integerExpr = (SQLIntegerExpr) binaryOpExpr.getLeft(); + + ordinalValue = integerExpr.getNumber().intValue(); + SQLExpr newExpr = checkAndGet(orderSelectList, ordinalValue, orderException); + binaryOpExpr.setLeft(newExpr); + newExpr.setParent(binaryOpExpr); } - }); - } - private SQLExpr checkAndGet(List selectList, Integer ordinal, String exception) { - if (ordinal > selectList.size()) { - throw new VerificationException(String.format(exception, ordinal, ordinal)); - } + return false; + } + }); + } - return selectList.get(ordinal-1).getExpr(); + private SQLExpr checkAndGet(List selectList, Integer ordinal, String exception) { + if (ordinal > selectList.size()) { + throw new VerificationException(String.format(exception, ordinal, ordinal)); } - private boolean hasGroupByWithOrdinals(MySqlSelectQueryBlock query) { - if (query.getGroupBy() == null) { - return false; - } else if (query.getGroupBy().getItems().isEmpty()){ - return false; - } + return selectList.get(ordinal - 1).getExpr(); + } - return query.getGroupBy().getItems().stream().anyMatch(x -> - x instanceof MySqlSelectGroupByExpr && ((MySqlSelectGroupByExpr) x).getExpr() instanceof SQLIntegerExpr - ); + private boolean hasGroupByWithOrdinals(MySqlSelectQueryBlock query) { + if (query.getGroupBy() == null) { + return false; + } else if (query.getGroupBy().getItems().isEmpty()) { + return false; } - private boolean hasOrderByWithOrdinals(MySqlSelectQueryBlock query) { - if (query.getOrderBy() == null) { - return false; - } else if (query.getOrderBy().getItems().isEmpty()){ - return false; - } - - /** - * The second condition checks valid AST that meets ORDER BY IS NULL/NOT NULL condition - * - * SQLSelectOrderByItem - * | - * SQLBinaryOpExpr (Is || IsNot) - * / \ - * SQLIdentifierExpr SQLNullExpr - */ - return query.getOrderBy().getItems().stream().anyMatch(x -> - x.getExpr() instanceof SQLIntegerExpr - || ( - x.getExpr() instanceof SQLBinaryOpExpr - && ((SQLBinaryOpExpr) x.getExpr()).getLeft() instanceof SQLIntegerExpr - ) - ); + return query.getGroupBy().getItems().stream() + .anyMatch( + x -> + x instanceof MySqlSelectGroupByExpr + && ((MySqlSelectGroupByExpr) x).getExpr() instanceof SQLIntegerExpr); + } + + private boolean hasOrderByWithOrdinals(MySqlSelectQueryBlock query) { + if (query.getOrderBy() == null) { + return false; + } else if (query.getOrderBy().getItems().isEmpty()) { + return false; } - private SQLQueryExpr toSqlExpr() { - SQLExprParser parser = new ElasticSqlExprParser(sql); - SQLExpr expr = parser.expr(); - return (SQLQueryExpr) expr; - } + /** + * The second condition checks valid AST that meets ORDER BY IS NULL/NOT NULL condition + * + *

SQLSelectOrderByItem | SQLBinaryOpExpr (Is || IsNot) / \ SQLIdentifierExpr SQLNullExpr + */ + return query.getOrderBy().getItems().stream() + .anyMatch( + x -> + x.getExpr() instanceof SQLIntegerExpr + || (x.getExpr() instanceof SQLBinaryOpExpr + && ((SQLBinaryOpExpr) x.getExpr()).getLeft() instanceof SQLIntegerExpr)); + } + + private SQLQueryExpr toSqlExpr() { + SQLExprParser parser = new ElasticSqlExprParser(sql); + SQLExpr expr = parser.expr(); + return (SQLQueryExpr) expr; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/parent/SQLExprParentSetter.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/parent/SQLExprParentSetter.java index 9de81f2ab1..3ad2955798 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/parent/SQLExprParentSetter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/parent/SQLExprParentSetter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.parent; import com.alibaba.druid.sql.ast.expr.SQLInListExpr; @@ -12,35 +11,27 @@ import com.alibaba.druid.sql.ast.expr.SQLQueryExpr; import com.alibaba.druid.sql.dialect.mysql.visitor.MySqlASTVisitorAdapter; -/** - * Add the parent for the node in {@link SQLQueryExpr} if it is missing. - */ +/** Add the parent for the node in {@link SQLQueryExpr} if it is missing. */ public class SQLExprParentSetter extends MySqlASTVisitorAdapter { - /** - * Fix null parent problem which is required by SQLIdentifier.visit() - */ - @Override - public boolean visit(SQLInSubQueryExpr subQuery) { - subQuery.getExpr().setParent(subQuery); - return true; - } + /** Fix null parent problem which is required by SQLIdentifier.visit() */ + @Override + public boolean visit(SQLInSubQueryExpr subQuery) { + subQuery.getExpr().setParent(subQuery); + return true; + } - /** - * Fix null parent problem which is required by SQLIdentifier.visit() - */ - @Override - public boolean visit(SQLInListExpr expr) { - expr.getExpr().setParent(expr); - return true; - } + /** Fix null parent problem which is required by SQLIdentifier.visit() */ + @Override + public boolean visit(SQLInListExpr expr) { + expr.getExpr().setParent(expr); + return true; + } - /** - * Fix the expr in {@link SQLNotExpr} without parent. - */ - @Override - public boolean visit(SQLNotExpr notExpr) { - notExpr.getExpr().setParent(notExpr); - return true; - } + /** Fix the expr in {@link SQLNotExpr} without parent. */ + @Override + public boolean visit(SQLNotExpr notExpr) { + notExpr.getExpr().setParent(notExpr); + return true; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/parent/SQLExprParentSetterRule.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/parent/SQLExprParentSetterRule.java index 62ad0765d8..b623998b6e 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/parent/SQLExprParentSetterRule.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/parent/SQLExprParentSetterRule.java @@ -3,24 +3,21 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.parent; import com.alibaba.druid.sql.ast.expr.SQLQueryExpr; import org.opensearch.sql.legacy.rewriter.RewriteRule; -/** - * The {@link RewriteRule} which will apply {@link SQLExprParentSetter} for {@link SQLQueryExpr} - */ +/** The {@link RewriteRule} which will apply {@link SQLExprParentSetter} for {@link SQLQueryExpr} */ public class SQLExprParentSetterRule implements RewriteRule { - @Override - public boolean match(SQLQueryExpr expr) { - return true; - } + @Override + public boolean match(SQLQueryExpr expr) { + return true; + } - @Override - public void rewrite(SQLQueryExpr expr) { - expr.accept(new SQLExprParentSetter()); - } + @Override + public void rewrite(SQLQueryExpr expr) { + expr.accept(new SQLExprParentSetter()); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/NestedQueryContext.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/NestedQueryContext.java index ce254e2103..4be5f3ec69 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/NestedQueryContext.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/NestedQueryContext.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.subquery; import com.alibaba.druid.sql.ast.statement.SQLExprTableSource; @@ -14,65 +13,60 @@ import java.util.Map; /** - * {@link NestedQueryContext} build the context with Query to detected the specified table is nested or not. - * Todo current implementation doesn't rely on the index mapping which should be added after the semantics is builded. + * {@link NestedQueryContext} build the context with Query to detected the specified table is nested + * or not. Todo current implementation doesn't rely on the index mapping which should be added after + * the semantics is builded. */ public class NestedQueryContext { - private static final String SEPARATOR = "."; - private static final String EMPTY = ""; - // , if parentTable not exist, parentTableAlias = ""; - private final Map aliasParents = new HashMap<>(); + private static final String SEPARATOR = "."; + private static final String EMPTY = ""; + // , if parentTable not exist, parentTableAlias = ""; + private final Map aliasParents = new HashMap<>(); - /** - * Is the table refer to the nested field of the parent table. - */ - public boolean isNested(SQLExprTableSource table) { - String parent = parent(table); - if (Strings.isNullOrEmpty(parent)) { - return !Strings.isNullOrEmpty(aliasParents.get(alias(table))); - } else { - return aliasParents.containsKey(parent); - } + /** Is the table refer to the nested field of the parent table. */ + public boolean isNested(SQLExprTableSource table) { + String parent = parent(table); + if (Strings.isNullOrEmpty(parent)) { + return !Strings.isNullOrEmpty(aliasParents.get(alias(table))); + } else { + return aliasParents.containsKey(parent); } + } - /** - * add table to the context. - */ - public void add(SQLTableSource table) { - if (table instanceof SQLExprTableSource) { - process((SQLExprTableSource) table); - } else if (table instanceof SQLJoinTableSource) { - add(((SQLJoinTableSource) table).getLeft()); - add(((SQLJoinTableSource) table).getRight()); - } else { - throw new IllegalStateException("unsupported table source"); - } + /** add table to the context. */ + public void add(SQLTableSource table) { + if (table instanceof SQLExprTableSource) { + process((SQLExprTableSource) table); + } else if (table instanceof SQLJoinTableSource) { + add(((SQLJoinTableSource) table).getLeft()); + add(((SQLJoinTableSource) table).getRight()); + } else { + throw new IllegalStateException("unsupported table source"); } + } - private void process(SQLExprTableSource table) { - String alias = alias(table); - String parent = parent(table); - if (!Strings.isNullOrEmpty(alias)) { - aliasParents.putIfAbsent(alias, parent); - } + private void process(SQLExprTableSource table) { + String alias = alias(table); + String parent = parent(table); + if (!Strings.isNullOrEmpty(alias)) { + aliasParents.putIfAbsent(alias, parent); } + } - /** - * Extract the parent alias from the tableName. For example - * SELECT * FROM employee e, e.project as p, - * For expr: employee, the parent alias is "". - * For expr: e.project, the parent alias is e. - */ - private String parent(SQLExprTableSource table) { - String tableName = table.getExpr().toString(); - int index = tableName.indexOf(SEPARATOR); - return index == -1 ? EMPTY : tableName.substring(0, index); - } + /** + * Extract the parent alias from the tableName. For example SELECT * FROM employee e, e.project as + * p, For expr: employee, the parent alias is "". For expr: e.project, the parent alias is e. + */ + private String parent(SQLExprTableSource table) { + String tableName = table.getExpr().toString(); + int index = tableName.indexOf(SEPARATOR); + return index == -1 ? EMPTY : tableName.substring(0, index); + } - private String alias(SQLExprTableSource table) { - if (Strings.isNullOrEmpty(table.getAlias())) { - return table.getExpr().toString(); - } - return table.getAlias(); + private String alias(SQLExprTableSource table) { + if (Strings.isNullOrEmpty(table.getAlias())) { + return table.getExpr().toString(); } + return table.getAlias(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/RewriterContext.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/RewriterContext.java index 09698095e6..54cba6547b 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/RewriterContext.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/RewriterContext.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.subquery; import com.alibaba.druid.sql.ast.SQLExpr; @@ -19,68 +18,66 @@ import java.util.Deque; import java.util.List; -/** - * Environment for rewriting the SQL. - */ +/** Environment for rewriting the SQL. */ public class RewriterContext { - private final Deque tableStack = new ArrayDeque<>(); - private final Deque conditionStack = new ArrayDeque<>(); - private final List sqlInSubQueryExprs = new ArrayList<>(); - private final List sqlExistsExprs = new ArrayList<>(); - private final NestedQueryContext nestedQueryDetector = new NestedQueryContext(); + private final Deque tableStack = new ArrayDeque<>(); + private final Deque conditionStack = new ArrayDeque<>(); + private final List sqlInSubQueryExprs = new ArrayList<>(); + private final List sqlExistsExprs = new ArrayList<>(); + private final NestedQueryContext nestedQueryDetector = new NestedQueryContext(); - public SQLTableSource popJoin() { - return tableStack.pop(); - } + public SQLTableSource popJoin() { + return tableStack.pop(); + } - public SQLExpr popWhere() { - return conditionStack.pop(); - } + public SQLExpr popWhere() { + return conditionStack.pop(); + } - public void addWhere(SQLExpr expr) { - conditionStack.push(expr); - } + public void addWhere(SQLExpr expr) { + conditionStack.push(expr); + } - /** - * Add the Join right table and {@link JoinType} and {@link SQLBinaryOpExpr} which will - * merge the left table in the tableStack. - */ - public void addJoin(SQLTableSource right, JoinType joinType, SQLBinaryOpExpr condition) { - SQLTableSource left = tableStack.pop(); - SQLJoinTableSource joinTableSource = new SQLJoinTableSource(); - joinTableSource.setLeft(left); - joinTableSource.setRight(right); - joinTableSource.setJoinType(joinType); - joinTableSource.setCondition(condition); - tableStack.push(joinTableSource); - } + /** + * Add the Join right table and {@link JoinType} and {@link SQLBinaryOpExpr} which will merge the + * left table in the tableStack. + */ + public void addJoin(SQLTableSource right, JoinType joinType, SQLBinaryOpExpr condition) { + SQLTableSource left = tableStack.pop(); + SQLJoinTableSource joinTableSource = new SQLJoinTableSource(); + joinTableSource.setLeft(left); + joinTableSource.setRight(right); + joinTableSource.setJoinType(joinType); + joinTableSource.setCondition(condition); + tableStack.push(joinTableSource); + } - public void addJoin(SQLTableSource right, JoinType joinType) { - addJoin(right, joinType, null); - } + public void addJoin(SQLTableSource right, JoinType joinType) { + addJoin(right, joinType, null); + } - public void addTable(SQLTableSource table) { - tableStack.push(table); - nestedQueryDetector.add(table); - } + public void addTable(SQLTableSource table) { + tableStack.push(table); + nestedQueryDetector.add(table); + } - public boolean isNestedQuery(SQLExprTableSource table) { - return nestedQueryDetector.isNested(table); - } + public boolean isNestedQuery(SQLExprTableSource table) { + return nestedQueryDetector.isNested(table); + } - public void setInSubQuery(SQLInSubQueryExpr expr) { - sqlInSubQueryExprs.add(expr); - } + public void setInSubQuery(SQLInSubQueryExpr expr) { + sqlInSubQueryExprs.add(expr); + } - public void setExistsSubQuery(SQLExistsExpr expr) { - sqlExistsExprs.add(expr); - } + public void setExistsSubQuery(SQLExistsExpr expr) { + sqlExistsExprs.add(expr); + } - public List getSqlInSubQueryExprs() { - return sqlInSubQueryExprs; - } + public List getSqlInSubQueryExprs() { + return sqlInSubQueryExprs; + } - public List getSqlExistsExprs() { - return sqlExistsExprs; - } + public List getSqlExistsExprs() { + return sqlExistsExprs; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/SubQueryRewriteRule.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/SubQueryRewriteRule.java index 44a68b1bbb..5177b2d6d3 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/SubQueryRewriteRule.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/SubQueryRewriteRule.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.subquery; import com.alibaba.druid.sql.ast.expr.SQLQueryExpr; @@ -12,42 +11,44 @@ import org.opensearch.sql.legacy.rewriter.subquery.rewriter.SubqueryAliasRewriter; import org.opensearch.sql.legacy.rewriter.subquery.utils.FindSubQuery; -/** - * Subquery Rewriter Rule. - */ +/** Subquery Rewriter Rule. */ public class SubQueryRewriteRule implements RewriteRule { - private FindSubQuery findAllSubQuery = new FindSubQuery(); - - @Override - public boolean match(SQLQueryExpr expr) throws SQLFeatureNotSupportedException { - expr.accept(findAllSubQuery); - - if (isContainSubQuery(findAllSubQuery)) { - if (isSupportedSubQuery(findAllSubQuery)) { - return true; - } else { - throw new SQLFeatureNotSupportedException("Unsupported subquery. Only one EXISTS or IN is supported"); - } - } else { - return false; - } - } - - @Override - public void rewrite(SQLQueryExpr expr) { - expr.accept(new SubqueryAliasRewriter()); - new SubQueryRewriter().convert(expr.getSubQuery()); + private FindSubQuery findAllSubQuery = new FindSubQuery(); + + @Override + public boolean match(SQLQueryExpr expr) throws SQLFeatureNotSupportedException { + expr.accept(findAllSubQuery); + + if (isContainSubQuery(findAllSubQuery)) { + if (isSupportedSubQuery(findAllSubQuery)) { + return true; + } else { + throw new SQLFeatureNotSupportedException( + "Unsupported subquery. Only one EXISTS or IN is supported"); + } + } else { + return false; } - - private boolean isContainSubQuery(FindSubQuery allSubQuery) { - return !allSubQuery.getSqlExistsExprs().isEmpty() || !allSubQuery.getSqlInSubQueryExprs().isEmpty(); - } - - private boolean isSupportedSubQuery(FindSubQuery allSubQuery) { - if ((allSubQuery.getSqlInSubQueryExprs().size() == 1 && allSubQuery.getSqlExistsExprs().size() == 0) - || (allSubQuery.getSqlInSubQueryExprs().size() == 0 && allSubQuery.getSqlExistsExprs().size() == 1)) { - return true; - } - return false; + } + + @Override + public void rewrite(SQLQueryExpr expr) { + expr.accept(new SubqueryAliasRewriter()); + new SubQueryRewriter().convert(expr.getSubQuery()); + } + + private boolean isContainSubQuery(FindSubQuery allSubQuery) { + return !allSubQuery.getSqlExistsExprs().isEmpty() + || !allSubQuery.getSqlInSubQueryExprs().isEmpty(); + } + + private boolean isSupportedSubQuery(FindSubQuery allSubQuery) { + if ((allSubQuery.getSqlInSubQueryExprs().size() == 1 + && allSubQuery.getSqlExistsExprs().size() == 0) + || (allSubQuery.getSqlInSubQueryExprs().size() == 0 + && allSubQuery.getSqlExistsExprs().size() == 1)) { + return true; } + return false; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/SubQueryRewriter.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/SubQueryRewriter.java index fd503a0e9b..c788e8f559 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/SubQueryRewriter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/SubQueryRewriter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.subquery; import com.alibaba.druid.sql.ast.SQLExpr; @@ -19,73 +18,73 @@ import org.opensearch.sql.legacy.rewriter.subquery.utils.FindSubQuery; public class SubQueryRewriter { - private final RewriterContext ctx = new RewriterContext(); + private final RewriterContext ctx = new RewriterContext(); - public void convert(SQLSelect query) { - SQLSelectQuery queryExpr = query.getQuery(); - if (queryExpr instanceof MySqlSelectQueryBlock) { - MySqlSelectQueryBlock queryBlock = (MySqlSelectQueryBlock) queryExpr; - ctx.addTable(queryBlock.getFrom()); + public void convert(SQLSelect query) { + SQLSelectQuery queryExpr = query.getQuery(); + if (queryExpr instanceof MySqlSelectQueryBlock) { + MySqlSelectQueryBlock queryBlock = (MySqlSelectQueryBlock) queryExpr; + ctx.addTable(queryBlock.getFrom()); - queryBlock.setWhere(convertWhere(queryBlock.getWhere())); - queryBlock.setFrom(convertFrom(queryBlock.getFrom())); - } + queryBlock.setWhere(convertWhere(queryBlock.getWhere())); + queryBlock.setFrom(convertFrom(queryBlock.getFrom())); } + } - private SQLTableSource convertFrom(SQLTableSource expr) { - SQLTableSource join = ctx.popJoin(); - if (join != null) { - return join; - } - return expr; + private SQLTableSource convertFrom(SQLTableSource expr) { + SQLTableSource join = ctx.popJoin(); + if (join != null) { + return join; } + return expr; + } - private SQLExpr convertWhere(SQLExpr expr) { - if (expr instanceof SQLExistsExpr) { - ctx.setExistsSubQuery((SQLExistsExpr) expr); - rewriteSubQuery(expr, ((SQLExistsExpr) expr).getSubQuery()); - return ctx.popWhere(); - } else if (expr instanceof SQLInSubQueryExpr) { - ctx.setInSubQuery((SQLInSubQueryExpr) expr); - rewriteSubQuery(expr, ((SQLInSubQueryExpr) expr).getSubQuery()); - return ctx.popWhere(); - } else if (expr instanceof SQLBinaryOpExpr) { - SQLBinaryOpExpr binaryOpExpr = (SQLBinaryOpExpr) expr; - SQLExpr left = convertWhere(binaryOpExpr.getLeft()); - left.setParent(binaryOpExpr); - binaryOpExpr.setLeft(left); - SQLExpr right = convertWhere(binaryOpExpr.getRight()); - right.setParent(binaryOpExpr); - binaryOpExpr.setRight(right); - } - return expr; + private SQLExpr convertWhere(SQLExpr expr) { + if (expr instanceof SQLExistsExpr) { + ctx.setExistsSubQuery((SQLExistsExpr) expr); + rewriteSubQuery(expr, ((SQLExistsExpr) expr).getSubQuery()); + return ctx.popWhere(); + } else if (expr instanceof SQLInSubQueryExpr) { + ctx.setInSubQuery((SQLInSubQueryExpr) expr); + rewriteSubQuery(expr, ((SQLInSubQueryExpr) expr).getSubQuery()); + return ctx.popWhere(); + } else if (expr instanceof SQLBinaryOpExpr) { + SQLBinaryOpExpr binaryOpExpr = (SQLBinaryOpExpr) expr; + SQLExpr left = convertWhere(binaryOpExpr.getLeft()); + left.setParent(binaryOpExpr); + binaryOpExpr.setLeft(left); + SQLExpr right = convertWhere(binaryOpExpr.getRight()); + right.setParent(binaryOpExpr); + binaryOpExpr.setRight(right); } + return expr; + } - private void rewriteSubQuery(SQLExpr subQueryExpr, SQLSelect subQuerySelect) { - if (containSubQuery(subQuerySelect)) { - convert(subQuerySelect); - } else if (isSupportedSubQuery(ctx)){ - for (Rewriter rewriter : RewriterFactory.createRewriterList(subQueryExpr, ctx)) { - if (rewriter.canRewrite()) { - rewriter.rewrite(); - return; - } - } + private void rewriteSubQuery(SQLExpr subQueryExpr, SQLSelect subQuerySelect) { + if (containSubQuery(subQuerySelect)) { + convert(subQuerySelect); + } else if (isSupportedSubQuery(ctx)) { + for (Rewriter rewriter : RewriterFactory.createRewriterList(subQueryExpr, ctx)) { + if (rewriter.canRewrite()) { + rewriter.rewrite(); + return; } - throw new IllegalStateException("Unsupported subquery"); + } } + throw new IllegalStateException("Unsupported subquery"); + } - private boolean containSubQuery(SQLSelect query) { - FindSubQuery findSubQuery = new FindSubQuery().continueVisitWhenFound(false); - query.accept(findSubQuery); - return findSubQuery.hasSubQuery(); - } + private boolean containSubQuery(SQLSelect query) { + FindSubQuery findSubQuery = new FindSubQuery().continueVisitWhenFound(false); + query.accept(findSubQuery); + return findSubQuery.hasSubQuery(); + } - private boolean isSupportedSubQuery(RewriterContext ctx) { - if ((ctx.getSqlInSubQueryExprs().size() == 1 && ctx.getSqlExistsExprs().size() == 0) - || (ctx.getSqlInSubQueryExprs().size() == 0 && ctx.getSqlExistsExprs().size() == 1)) { - return true; - } - return false; + private boolean isSupportedSubQuery(RewriterContext ctx) { + if ((ctx.getSqlInSubQueryExprs().size() == 1 && ctx.getSqlExistsExprs().size() == 0) + || (ctx.getSqlInSubQueryExprs().size() == 0 && ctx.getSqlExistsExprs().size() == 1)) { + return true; } + return false; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/InRewriter.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/InRewriter.java index 99505e5e49..587651fb92 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/InRewriter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/InRewriter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.subquery.rewriter; import com.alibaba.druid.sql.ast.SQLExpr; @@ -17,87 +16,76 @@ import org.opensearch.sql.legacy.rewriter.subquery.RewriterContext; /** - * IN Subquery Rewriter. - * For example, - * SELECT * FROM A WHERE a IN (SELECT b FROM B) and c > 10 should be rewritten to - * SELECT A.* FROM A JOIN B ON A.a = B.b WHERE c > 10 and B.b IS NOT NULL. + * IN Subquery Rewriter. For example, SELECT * FROM A WHERE a IN (SELECT b FROM B) and c > 10 should + * be rewritten to SELECT A.* FROM A JOIN B ON A.a = B.b WHERE c > 10 and B.b IS NOT NULL. */ public class InRewriter implements Rewriter { - private final SQLInSubQueryExpr inExpr; - private final RewriterContext ctx; - private final MySqlSelectQueryBlock queryBlock; + private final SQLInSubQueryExpr inExpr; + private final RewriterContext ctx; + private final MySqlSelectQueryBlock queryBlock; - public InRewriter(SQLInSubQueryExpr inExpr, RewriterContext ctx) { - this.inExpr = inExpr; - this.ctx = ctx; - this.queryBlock = (MySqlSelectQueryBlock) inExpr.getSubQuery().getQuery(); - } + public InRewriter(SQLInSubQueryExpr inExpr, RewriterContext ctx) { + this.inExpr = inExpr; + this.ctx = ctx; + this.queryBlock = (MySqlSelectQueryBlock) inExpr.getSubQuery().getQuery(); + } - @Override - public boolean canRewrite() { - return !inExpr.isNot(); - } + @Override + public boolean canRewrite() { + return !inExpr.isNot(); + } - /** - * Build Where clause from input query. - *

- * With the input query. - * Query - * / | \ - * SELECT FROM WHERE - * | | / | \ - * * A c>10 AND INSubquery - * / \ - * a Query - * / \ - * SELECT FROM - * | | - * b B - *

- * - */ - @Override - public void rewrite() { - SQLTableSource from = queryBlock.getFrom(); - addJoinTable(from); + /** + * Build Where clause from input query. + * + *

With the input query. Query / | \ SELECT FROM WHERE | | / | \ * A c>10 AND INSubquery / \ a + * Query / \ SELECT FROM | | b B + * + *

+ */ + @Override + public void rewrite() { + SQLTableSource from = queryBlock.getFrom(); + addJoinTable(from); - SQLExpr where = queryBlock.getWhere(); - if (null == where) { - ctx.addWhere(generateNullOp()); - } else if (where instanceof SQLBinaryOpExpr) { - ctx.addWhere(and(generateNullOp(), (SQLBinaryOpExpr) where)); - } else { - throw new IllegalStateException("unsupported where class type " + where.getClass()); - } + SQLExpr where = queryBlock.getWhere(); + if (null == where) { + ctx.addWhere(generateNullOp()); + } else if (where instanceof SQLBinaryOpExpr) { + ctx.addWhere(and(generateNullOp(), (SQLBinaryOpExpr) where)); + } else { + throw new IllegalStateException("unsupported where class type " + where.getClass()); } + } - /** - * Build the Null check expression. For example, - * SELECT * FROM A WHERE a IN (SELECT b FROM B), should return B.b IS NOT NULL - */ - private SQLBinaryOpExpr generateNullOp() { - SQLBinaryOpExpr binaryOpExpr = new SQLBinaryOpExpr(); - binaryOpExpr.setLeft(fetchJoinExpr()); - binaryOpExpr.setRight(new SQLNullExpr()); - binaryOpExpr.setOperator(SQLBinaryOperator.IsNot); + /** + * Build the Null check expression. For example, SELECT * FROM A WHERE a IN (SELECT b FROM B), + * should return B.b IS NOT NULL + */ + private SQLBinaryOpExpr generateNullOp() { + SQLBinaryOpExpr binaryOpExpr = new SQLBinaryOpExpr(); + binaryOpExpr.setLeft(fetchJoinExpr()); + binaryOpExpr.setRight(new SQLNullExpr()); + binaryOpExpr.setOperator(SQLBinaryOperator.IsNot); - return binaryOpExpr; - } + return binaryOpExpr; + } - /** - * Add the {@link SQLTableSource} with {@link JoinType} and {@link SQLBinaryOpExpr} to the {@link RewriterContext}. - */ - private void addJoinTable(SQLTableSource right) { - SQLBinaryOpExpr binaryOpExpr = new SQLBinaryOpExpr(inExpr.getExpr(), - SQLBinaryOperator.Equality, - fetchJoinExpr()); - ctx.addJoin(right, JoinType.JOIN, binaryOpExpr); - } + /** + * Add the {@link SQLTableSource} with {@link JoinType} and {@link SQLBinaryOpExpr} to the {@link + * RewriterContext}. + */ + private void addJoinTable(SQLTableSource right) { + SQLBinaryOpExpr binaryOpExpr = + new SQLBinaryOpExpr(inExpr.getExpr(), SQLBinaryOperator.Equality, fetchJoinExpr()); + ctx.addJoin(right, JoinType.JOIN, binaryOpExpr); + } - private SQLExpr fetchJoinExpr() { - if (queryBlock.getSelectList().size() > 1) { - throw new IllegalStateException("Unsupported subquery with multiple select " + queryBlock.getSelectList()); - } - return queryBlock.getSelectList().get(0).getExpr(); + private SQLExpr fetchJoinExpr() { + if (queryBlock.getSelectList().size() > 1) { + throw new IllegalStateException( + "Unsupported subquery with multiple select " + queryBlock.getSelectList()); } + return queryBlock.getSelectList().get(0).getExpr(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/NestedExistsRewriter.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/NestedExistsRewriter.java index c7656e420f..ad4996875a 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/NestedExistsRewriter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/NestedExistsRewriter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.subquery.rewriter; import com.alibaba.druid.sql.ast.SQLExpr; @@ -18,78 +17,71 @@ import org.opensearch.sql.legacy.rewriter.subquery.RewriterContext; /** - * Nested EXISTS SQL Rewriter. - * The EXISTS clause will be remove from the SQL. The translated SQL will use ElasticSearch's nested query logic. + * Nested EXISTS SQL Rewriter. The EXISTS clause will be remove from the SQL. The translated SQL + * will use ElasticSearch's nested query logic. + * + *

For example, * - * For example, - *

- * SELECT e.name - * FROM employee as e, e.projects as p - * WHERE EXISTS (SELECT * FROM p) - * should be rewritten to - * SELECT e.name - * FROM employee as e, e.projects as p - * WHERE p is not null - *

+ *

SELECT e.name FROM employee as e, e.projects as p WHERE EXISTS (SELECT * FROM p) should be + * rewritten to SELECT e.name FROM employee as e, e.projects as p WHERE p is not null */ public class NestedExistsRewriter implements Rewriter { - private final SQLExistsExpr existsExpr; - private final RewriterContext ctx; - private final SQLExprTableSource from; - private final SQLExpr where; + private final SQLExistsExpr existsExpr; + private final RewriterContext ctx; + private final SQLExprTableSource from; + private final SQLExpr where; - public NestedExistsRewriter(SQLExistsExpr existsExpr, RewriterContext board) { - this.existsExpr = existsExpr; - this.ctx = board; - MySqlSelectQueryBlock queryBlock = (MySqlSelectQueryBlock) existsExpr.getSubQuery().getQuery(); - if (queryBlock.getFrom() instanceof SQLExprTableSource) { - this.from = (SQLExprTableSource) queryBlock.getFrom(); - } else { - throw new IllegalStateException("unsupported expression in from " + queryBlock.getFrom().getClass()); - } - this.where = queryBlock.getWhere(); + public NestedExistsRewriter(SQLExistsExpr existsExpr, RewriterContext board) { + this.existsExpr = existsExpr; + this.ctx = board; + MySqlSelectQueryBlock queryBlock = (MySqlSelectQueryBlock) existsExpr.getSubQuery().getQuery(); + if (queryBlock.getFrom() instanceof SQLExprTableSource) { + this.from = (SQLExprTableSource) queryBlock.getFrom(); + } else { + throw new IllegalStateException( + "unsupported expression in from " + queryBlock.getFrom().getClass()); } + this.where = queryBlock.getWhere(); + } - /** - * The from table must be nested field. - */ - @Override - public boolean canRewrite() { - return ctx.isNestedQuery(from); - } + /** The from table must be nested field. */ + @Override + public boolean canRewrite() { + return ctx.isNestedQuery(from); + } - @Override - public void rewrite() { - ctx.addJoin(from, JoinType.COMMA); - ctx.addWhere(rewriteExistsWhere()); - } + @Override + public void rewrite() { + ctx.addJoin(from, JoinType.COMMA); + ctx.addWhere(rewriteExistsWhere()); + } - private SQLExpr rewriteExistsWhere() { - SQLBinaryOpExpr translatedWhere; - SQLBinaryOpExpr notMissingOp = buildNotMissingOp(); - if (null == where) { - translatedWhere = notMissingOp; - } else if (where instanceof SQLBinaryOpExpr) { - translatedWhere = and(notMissingOp, (SQLBinaryOpExpr) where); - } else { - throw new IllegalStateException("unsupported expression in where " + where.getClass()); - } + private SQLExpr rewriteExistsWhere() { + SQLBinaryOpExpr translatedWhere; + SQLBinaryOpExpr notMissingOp = buildNotMissingOp(); + if (null == where) { + translatedWhere = notMissingOp; + } else if (where instanceof SQLBinaryOpExpr) { + translatedWhere = and(notMissingOp, (SQLBinaryOpExpr) where); + } else { + throw new IllegalStateException("unsupported expression in where " + where.getClass()); + } - if (existsExpr.isNot()) { - SQLNotExpr sqlNotExpr = new SQLNotExpr(translatedWhere); - translatedWhere.setParent(sqlNotExpr); - return sqlNotExpr; - } else { - return translatedWhere; - } + if (existsExpr.isNot()) { + SQLNotExpr sqlNotExpr = new SQLNotExpr(translatedWhere); + translatedWhere.setParent(sqlNotExpr); + return sqlNotExpr; + } else { + return translatedWhere; } + } - private SQLBinaryOpExpr buildNotMissingOp() { - SQLBinaryOpExpr binaryOpExpr = new SQLBinaryOpExpr(); - binaryOpExpr.setLeft(new SQLIdentifierExpr(from.getAlias())); - binaryOpExpr.setRight(new SQLIdentifierExpr("MISSING")); - binaryOpExpr.setOperator(SQLBinaryOperator.IsNot); + private SQLBinaryOpExpr buildNotMissingOp() { + SQLBinaryOpExpr binaryOpExpr = new SQLBinaryOpExpr(); + binaryOpExpr.setLeft(new SQLIdentifierExpr(from.getAlias())); + binaryOpExpr.setRight(new SQLIdentifierExpr("MISSING")); + binaryOpExpr.setOperator(SQLBinaryOperator.IsNot); - return binaryOpExpr; - } + return binaryOpExpr; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/Rewriter.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/Rewriter.java index 5ca0a38d7f..a23eaaf514 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/Rewriter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/Rewriter.java @@ -3,28 +3,21 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.subquery.rewriter; import com.alibaba.druid.sql.ast.expr.SQLBinaryOpExpr; import com.alibaba.druid.sql.ast.expr.SQLBinaryOperator; -/** - * Interface of SQL Rewriter - */ +/** Interface of SQL Rewriter */ public interface Rewriter { - /** - * Whether the Rewriter can rewrite the SQL? - */ - boolean canRewrite(); + /** Whether the Rewriter can rewrite the SQL? */ + boolean canRewrite(); - /** - * Rewrite the SQL. - */ - void rewrite(); + /** Rewrite the SQL. */ + void rewrite(); - default SQLBinaryOpExpr and(SQLBinaryOpExpr left, SQLBinaryOpExpr right) { - return new SQLBinaryOpExpr(left, SQLBinaryOperator.BooleanAnd, right); - } + default SQLBinaryOpExpr and(SQLBinaryOpExpr left, SQLBinaryOpExpr right) { + return new SQLBinaryOpExpr(left, SQLBinaryOperator.BooleanAnd, right); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/RewriterFactory.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/RewriterFactory.java index ace333e981..6e6656ec37 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/RewriterFactory.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/RewriterFactory.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.subquery.rewriter; import com.alibaba.druid.sql.ast.SQLExpr; @@ -13,32 +12,26 @@ import java.util.List; import org.opensearch.sql.legacy.rewriter.subquery.RewriterContext; -/** - * Factory for generating the {@link Rewriter}. - */ +/** Factory for generating the {@link Rewriter}. */ public class RewriterFactory { - /** - * Create list of {@link Rewriter}. - */ - public static List createRewriterList(SQLExpr expr, RewriterContext bb) { - if (expr instanceof SQLExistsExpr) { - return existRewriterList((SQLExistsExpr) expr, bb); - } else if (expr instanceof SQLInSubQueryExpr) { - return inRewriterList((SQLInSubQueryExpr) expr, bb); - } - return ImmutableList.of(); + /** Create list of {@link Rewriter}. */ + public static List createRewriterList(SQLExpr expr, RewriterContext bb) { + if (expr instanceof SQLExistsExpr) { + return existRewriterList((SQLExistsExpr) expr, bb); + } else if (expr instanceof SQLInSubQueryExpr) { + return inRewriterList((SQLInSubQueryExpr) expr, bb); } + return ImmutableList.of(); + } - private static List existRewriterList(SQLExistsExpr existsExpr, RewriterContext bb) { - return new ImmutableList.Builder() - .add(new NestedExistsRewriter(existsExpr, bb)) - .build(); - } + private static List existRewriterList(SQLExistsExpr existsExpr, RewriterContext bb) { + return new ImmutableList.Builder() + .add(new NestedExistsRewriter(existsExpr, bb)) + .build(); + } - private static List inRewriterList(SQLInSubQueryExpr inExpr, RewriterContext bb) { - return new ImmutableList.Builder() - .add(new InRewriter(inExpr, bb)) - .build(); - } + private static List inRewriterList(SQLInSubQueryExpr inExpr, RewriterContext bb) { + return new ImmutableList.Builder().add(new InRewriter(inExpr, bb)).build(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/SubqueryAliasRewriter.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/SubqueryAliasRewriter.java index e47027f024..cc9ba1e37b 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/SubqueryAliasRewriter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/rewriter/SubqueryAliasRewriter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.subquery.rewriter; import com.alibaba.druid.sql.ast.expr.SQLAllColumnExpr; @@ -20,147 +19,146 @@ import java.util.Deque; /** - * Add the alias for identifier the subquery query. - * Use the table alias if it already has one, Auto generate if it doesn't has one. - *

- * The following table demonstrate how the rewriter works with scope and query. + * Add the alias for identifier the subquery query. Use the table alias if it already has one, Auto + * generate if it doesn't has one. + * + *

The following table demonstrate how the rewriter works with scope and query. * +-----------------------+-------------+-----------------------------------------------------------------------------------------------------+ - * | Rewrite | TableScope | Query | + * | Rewrite | TableScope | Query | * +-----------------------+-------------+-----------------------------------------------------------------------------------------------------+ - * | (Start) | () | SELECT * FROM TbA WHERE a IN (SELECT b FROM TbB) and c > 10 | + * | (Start) | () | SELECT * FROM TbA WHERE a IN (SELECT b FROM TbB) and c > 10 | * +-----------------------+-------------+-----------------------------------------------------------------------------------------------------+ - * | MySqlSelectQueryBlock | (TbA,TbA_0) | SELECT * FROM TbA as TbA_0 WHERE a IN (SELECT b FROM TbB) and c > 10 | + * | MySqlSelectQueryBlock | (TbA,TbA_0) | SELECT * FROM TbA as TbA_0 WHERE a IN (SELECT b FROM TbB) + * and c > 10 | * +-----------------------+-------------+-----------------------------------------------------------------------------------------------------+ - * | Identifier in Select | (TbA,TbA_0) | SELECT TbA.* FROM TbA as TbA_0 WHERE a IN (SELECT b FROM TbB) and c > 10 | + * | Identifier in Select | (TbA,TbA_0) | SELECT TbA.* FROM TbA as TbA_0 WHERE a IN (SELECT b FROM + * TbB) and c > 10 | * +-----------------------+-------------+-----------------------------------------------------------------------------------------------------+ - * | Identifier in Where | (TbA,TbA_0) | SELECT TbA.* FROM TbA as TbA_0 WHERE TbA_0.a IN (SELECT b FROM TbB) and TbA_0.c > 10 | + * | Identifier in Where | (TbA,TbA_0) | SELECT TbA.* FROM TbA as TbA_0 WHERE TbA_0.a IN (SELECT b + * FROM TbB) and TbA_0.c > 10 | * +-----------------------+-------------+-----------------------------------------------------------------------------------------------------+ - * | MySqlSelectQueryBlock | (TbA,TbA_0) | SELECT TbA.* FROM TbA as TbA_0 WHERE TbA_0.a IN (SELECT b FROM TbB as TbB_0) and TbA_0.c > 10 | - * | | (TbB,TbB_0) | | + * | MySqlSelectQueryBlock | (TbA,TbA_0) | SELECT TbA.* FROM TbA as TbA_0 WHERE TbA_0.a IN (SELECT b + * FROM TbB as TbB_0) and TbA_0.c > 10 | | | (TbB,TbB_0) | | * +-----------------------+-------------+-----------------------------------------------------------------------------------------------------+ - * | Identifier in Select | (TbA,TbA_0) | SELECT TbA.* FROM TbA as TbA_0 WHERE TbA_0.a IN (SELECT TbB_0.b FROM TbB as TbB_0) and TbA_0.c > 10 | - * | | (TbB,TbB_0) | | + * | Identifier in Select | (TbA,TbA_0) | SELECT TbA.* FROM TbA as TbA_0 WHERE TbA_0.a IN (SELECT + * TbB_0.b FROM TbB as TbB_0) and TbA_0.c > 10 | | | (TbB,TbB_0) | | * +-----------------------+-------------+-----------------------------------------------------------------------------------------------------+ */ public class SubqueryAliasRewriter extends MySqlASTVisitorAdapter { - private final Deque

tableScope = new ArrayDeque<>(); - private int aliasSuffix = 0; - private static final String DOT = "."; - - @Override - public boolean visit(MySqlSelectQueryBlock query) { - SQLTableSource from = query.getFrom(); - if (from instanceof SQLExprTableSource) { - SQLExprTableSource expr = (SQLExprTableSource) from; - String tableName = expr.getExpr().toString().replaceAll(" ", ""); - - if (expr.getAlias() != null) { - tableScope.push(new Table(tableName, expr.getAlias())); - } else { - expr.setAlias(createAlias(tableName)); - tableScope.push(new Table(tableName, expr.getAlias())); - } - } - return true; + private final Deque
tableScope = new ArrayDeque<>(); + private int aliasSuffix = 0; + private static final String DOT = "."; + + @Override + public boolean visit(MySqlSelectQueryBlock query) { + SQLTableSource from = query.getFrom(); + if (from instanceof SQLExprTableSource) { + SQLExprTableSource expr = (SQLExprTableSource) from; + String tableName = expr.getExpr().toString().replaceAll(" ", ""); + + if (expr.getAlias() != null) { + tableScope.push(new Table(tableName, expr.getAlias())); + } else { + expr.setAlias(createAlias(tableName)); + tableScope.push(new Table(tableName, expr.getAlias())); + } } + return true; + } - @Override - public boolean visit(SQLIdentifierExpr expr) { - if (!tableScope.isEmpty() && (inSelect(expr) || inWhere(expr) || inSubquery(expr))) { - rewrite(tableScope.peek(), expr); - } - return true; + @Override + public boolean visit(SQLIdentifierExpr expr) { + if (!tableScope.isEmpty() && (inSelect(expr) || inWhere(expr) || inSubquery(expr))) { + rewrite(tableScope.peek(), expr); } + return true; + } - @Override - public boolean visit(SQLAllColumnExpr expr) { - if (!tableScope.isEmpty() && inSelect(expr)) { - ((SQLSelectItem) expr.getParent()).setExpr(createIdentifierExpr(tableScope.peek())); - } - return true; + @Override + public boolean visit(SQLAllColumnExpr expr) { + if (!tableScope.isEmpty() && inSelect(expr)) { + ((SQLSelectItem) expr.getParent()).setExpr(createIdentifierExpr(tableScope.peek())); } - - private boolean inSelect(SQLIdentifierExpr expr) { - return expr.getParent() instanceof SQLSelectItem; + return true; + } + + private boolean inSelect(SQLIdentifierExpr expr) { + return expr.getParent() instanceof SQLSelectItem; + } + + private boolean inSelect(SQLAllColumnExpr expr) { + return expr.getParent() instanceof SQLSelectItem; + } + + private boolean inWhere(SQLIdentifierExpr expr) { + return expr.getParent() instanceof SQLBinaryOpExpr + && !isESTable((SQLBinaryOpExpr) expr.getParent()); + } + + /** + * The table name in OpenSearch could be "index/type". Which represent as SQLBinaryOpExpr in AST. + */ + private boolean isESTable(SQLBinaryOpExpr expr) { + return expr.getOperator() == SQLBinaryOperator.Divide + && expr.getParent() instanceof SQLExprTableSource; + } + + private boolean inSubquery(SQLIdentifierExpr expr) { + return expr.getParent() instanceof SQLInSubQueryExpr; + } + + @Override + public void endVisit(MySqlSelectQueryBlock query) { + if (!tableScope.isEmpty()) { + tableScope.pop(); } + } - private boolean inSelect(SQLAllColumnExpr expr) { - return expr.getParent() instanceof SQLSelectItem; - } + private void rewrite(Table table, SQLIdentifierExpr expr) { + String tableAlias = table.getAlias(); + String tableName = table.getName(); - private boolean inWhere(SQLIdentifierExpr expr) { - return expr.getParent() instanceof SQLBinaryOpExpr && !isESTable((SQLBinaryOpExpr) expr.getParent()); + String exprName = expr.getName(); + if (exprName.startsWith(tableName + DOT) || exprName.startsWith(tableAlias + DOT)) { + expr.setName(exprName.replace(tableName + DOT, tableAlias + DOT)); + } else { + expr.setName(String.join(DOT, tableAlias, exprName)); } + } - /** - * The table name in OpenSearch could be "index/type". Which represent as SQLBinaryOpExpr in AST. - */ - private boolean isESTable(SQLBinaryOpExpr expr) { - return expr.getOperator() == SQLBinaryOperator.Divide && expr.getParent() instanceof SQLExprTableSource; - } + private SQLIdentifierExpr createIdentifierExpr(Table table) { + String newIdentifierName = String.join(DOT, table.getAlias(), "*"); + return new SQLIdentifierExpr(newIdentifierName); + } - private boolean inSubquery(SQLIdentifierExpr expr) { - return expr.getParent() instanceof SQLInSubQueryExpr; - } + private String createAlias(String alias) { + return String.format("%s_%d", alias, next()); + } - @Override - public void endVisit(MySqlSelectQueryBlock query) { - if (!tableScope.isEmpty()) { - tableScope.pop(); - } - } + private Integer next() { + return aliasSuffix++; + } - private void rewrite(Table table, SQLIdentifierExpr expr) { - String tableAlias = table.getAlias(); - String tableName = table.getName(); + /** Table Bean. */ + private static class Table { - String exprName = expr.getName(); - if (exprName.startsWith(tableName + DOT) || exprName.startsWith(tableAlias + DOT)) { - expr.setName(exprName.replace(tableName + DOT, tableAlias + DOT)); - } else { - expr.setName(String.join(DOT, tableAlias, exprName)); - } + public String getName() { + return name; } - private SQLIdentifierExpr createIdentifierExpr(Table table) { - String newIdentifierName = String.join(DOT, table.getAlias(), "*"); - return new SQLIdentifierExpr(newIdentifierName); + public String getAlias() { + return alias; } - private String createAlias(String alias) { - return String.format("%s_%d", alias, next()); - } + /** Table Name. */ + private String name; - private Integer next() { - return aliasSuffix++; - } + /** Table Alias. */ + private String alias; - /** - * Table Bean. - */ - private static class Table { - - public String getName() { - return name; - } - - public String getAlias() { - return alias; - } - - /** - * Table Name. - */ - private String name; - - /** - * Table Alias. - */ - private String alias; - - Table(String name, String alias) { - this.name = name; - this.alias = alias; - } + Table(String name, String alias) { + this.name = name; + this.alias = alias; } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/utils/FindSubQuery.java b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/utils/FindSubQuery.java index ec35151e4d..de6694d90d 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/utils/FindSubQuery.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/rewriter/subquery/utils/FindSubQuery.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.subquery.utils; import com.alibaba.druid.sql.ast.expr.SQLExistsExpr; @@ -12,43 +11,39 @@ import java.util.ArrayList; import java.util.List; -/** - * Visitor which try to find the SubQuery. - */ +/** Visitor which try to find the SubQuery. */ public class FindSubQuery extends MySqlASTVisitorAdapter { - private final List sqlInSubQueryExprs = new ArrayList<>(); - private final List sqlExistsExprs = new ArrayList<>(); - private boolean continueVisit = true; - - public FindSubQuery continueVisitWhenFound(boolean continueVisit) { - this.continueVisit = continueVisit; - return this; - } - - /** - * Return true if has SubQuery. - */ - public boolean hasSubQuery() { - return !sqlInSubQueryExprs.isEmpty() || !sqlExistsExprs.isEmpty(); - } - - @Override - public boolean visit(SQLInSubQueryExpr query) { - sqlInSubQueryExprs.add(query); - return continueVisit; - } - - @Override - public boolean visit(SQLExistsExpr query) { - sqlExistsExprs.add(query); - return continueVisit; - } - - public List getSqlInSubQueryExprs() { - return sqlInSubQueryExprs; - } - - public List getSqlExistsExprs() { - return sqlExistsExprs; - } + private final List sqlInSubQueryExprs = new ArrayList<>(); + private final List sqlExistsExprs = new ArrayList<>(); + private boolean continueVisit = true; + + public FindSubQuery continueVisitWhenFound(boolean continueVisit) { + this.continueVisit = continueVisit; + return this; + } + + /** Return true if has SubQuery. */ + public boolean hasSubQuery() { + return !sqlInSubQueryExprs.isEmpty() || !sqlExistsExprs.isEmpty(); + } + + @Override + public boolean visit(SQLInSubQueryExpr query) { + sqlInSubQueryExprs.add(query); + return continueVisit; + } + + @Override + public boolean visit(SQLExistsExpr query) { + sqlExistsExprs.add(query); + return continueVisit; + } + + public List getSqlInSubQueryExprs() { + return sqlInSubQueryExprs; + } + + public List getSqlExistsExprs() { + return sqlExistsExprs; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/spatial/BoundingBoxFilterParams.java b/legacy/src/main/java/org/opensearch/sql/legacy/spatial/BoundingBoxFilterParams.java index df9f4c88b2..fb62f60ae7 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/spatial/BoundingBoxFilterParams.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/spatial/BoundingBoxFilterParams.java @@ -3,26 +3,23 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.spatial; -/** - * Created by Eliran on 1/8/2015. - */ +/** Created by Eliran on 1/8/2015. */ public class BoundingBoxFilterParams { - private Point topLeft; - private Point bottomRight; + private Point topLeft; + private Point bottomRight; - public BoundingBoxFilterParams(Point topLeft, Point bottomRight) { - this.topLeft = topLeft; - this.bottomRight = bottomRight; - } + public BoundingBoxFilterParams(Point topLeft, Point bottomRight) { + this.topLeft = topLeft; + this.bottomRight = bottomRight; + } - public Point getTopLeft() { - return topLeft; - } + public Point getTopLeft() { + return topLeft; + } - public Point getBottomRight() { - return bottomRight; - } + public Point getBottomRight() { + return bottomRight; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/spatial/CellFilterParams.java b/legacy/src/main/java/org/opensearch/sql/legacy/spatial/CellFilterParams.java index fc3dc35f07..6c50c17467 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/spatial/CellFilterParams.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/spatial/CellFilterParams.java @@ -3,36 +3,33 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.spatial; -/** - * Created by Eliran on 15/8/2015. - */ +/** Created by Eliran on 15/8/2015. */ public class CellFilterParams { - private Point geohashPoint; - private int precision; - private boolean neighbors; - - public CellFilterParams(Point geohashPoint, int precision, boolean neighbors) { - this.geohashPoint = geohashPoint; - this.precision = precision; - this.neighbors = neighbors; - } - - public CellFilterParams(Point geohashPoint, int precision) { - this(geohashPoint, precision, false); - } - - public Point getGeohashPoint() { - return geohashPoint; - } - - public int getPrecision() { - return precision; - } - - public boolean isNeighbors() { - return neighbors; - } + private Point geohashPoint; + private int precision; + private boolean neighbors; + + public CellFilterParams(Point geohashPoint, int precision, boolean neighbors) { + this.geohashPoint = geohashPoint; + this.precision = precision; + this.neighbors = neighbors; + } + + public CellFilterParams(Point geohashPoint, int precision) { + this(geohashPoint, precision, false); + } + + public Point getGeohashPoint() { + return geohashPoint; + } + + public int getPrecision() { + return precision; + } + + public boolean isNeighbors() { + return neighbors; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/spatial/DistanceFilterParams.java b/legacy/src/main/java/org/opensearch/sql/legacy/spatial/DistanceFilterParams.java index 1141da08ca..8c419de58d 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/spatial/DistanceFilterParams.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/spatial/DistanceFilterParams.java @@ -3,26 +3,23 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.spatial; -/** - * Created by Eliran on 1/8/2015. - */ +/** Created by Eliran on 1/8/2015. */ public class DistanceFilterParams { - private String distance; - private Point from; + private String distance; + private Point from; - public DistanceFilterParams(String distance, Point from) { - this.distance = distance; - this.from = from; - } + public DistanceFilterParams(String distance, Point from) { + this.distance = distance; + this.from = from; + } - public String getDistance() { - return distance; - } + public String getDistance() { + return distance; + } - public Point getFrom() { - return from; - } + public Point getFrom() { + return from; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/spatial/Point.java b/legacy/src/main/java/org/opensearch/sql/legacy/spatial/Point.java index c449ef1364..f3f8639a1c 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/spatial/Point.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/spatial/Point.java @@ -3,26 +3,23 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.spatial; -/** - * Created by Eliran on 1/8/2015. - */ +/** Created by Eliran on 1/8/2015. */ public class Point { - private double lon; - private double lat; + private double lon; + private double lat; - public Point(double lon, double lat) { - this.lon = lon; - this.lat = lat; - } + public Point(double lon, double lat) { + this.lon = lon; + this.lat = lat; + } - public double getLon() { - return lon; - } + public double getLon() { + return lon; + } - public double getLat() { - return lat; - } + public double getLat() { + return lat; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/spatial/PolygonFilterParams.java b/legacy/src/main/java/org/opensearch/sql/legacy/spatial/PolygonFilterParams.java index 0d0592f519..1aeddb24a4 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/spatial/PolygonFilterParams.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/spatial/PolygonFilterParams.java @@ -3,22 +3,19 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.spatial; import java.util.List; -/** - * Created by Eliran on 15/8/2015. - */ +/** Created by Eliran on 15/8/2015. */ public class PolygonFilterParams { - private List polygon; + private List polygon; - public PolygonFilterParams(List polygon) { - this.polygon = polygon; - } + public PolygonFilterParams(List polygon) { + this.polygon = polygon; + } - public List getPolygon() { - return polygon; - } + public List getPolygon() { + return polygon; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/spatial/RangeDistanceFilterParams.java b/legacy/src/main/java/org/opensearch/sql/legacy/spatial/RangeDistanceFilterParams.java index 91962332bf..0bdb01c3ce 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/spatial/RangeDistanceFilterParams.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/spatial/RangeDistanceFilterParams.java @@ -3,25 +3,22 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.spatial; -/** - * Created by Eliran on 15/8/2015. - */ +/** Created by Eliran on 15/8/2015. */ public class RangeDistanceFilterParams extends DistanceFilterParams { - private String distanceTo; + private String distanceTo; - public RangeDistanceFilterParams(String distanceFrom, String distanceTo, Point from) { - super(distanceFrom, from); - this.distanceTo = distanceTo; - } + public RangeDistanceFilterParams(String distanceFrom, String distanceTo, Point from) { + super(distanceFrom, from); + this.distanceTo = distanceTo; + } - public String getDistanceTo() { - return distanceTo; - } + public String getDistanceTo() { + return distanceTo; + } - public String getDistanceFrom() { - return this.getDistance(); - } + public String getDistanceFrom() { + return this.getDistance(); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/spatial/SpatialParamsFactory.java b/legacy/src/main/java/org/opensearch/sql/legacy/spatial/SpatialParamsFactory.java index 7b99d52e68..5e1102994e 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/spatial/SpatialParamsFactory.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/spatial/SpatialParamsFactory.java @@ -3,103 +3,105 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.spatial; - import com.alibaba.druid.sql.ast.SQLExpr; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Set; -/** - * Created by Eliran on 1/8/2015. - */ +/** Created by Eliran on 1/8/2015. */ public class SpatialParamsFactory { - public static Set allowedMethods; + public static Set allowedMethods; - static { - allowedMethods = new HashSet<>(); - allowedMethods.add("GEO_INTERSECTS"); - allowedMethods.add("GEO_BOUNDING_BOX"); - allowedMethods.add("GEO_DISTANCE"); - allowedMethods.add("GEO_DISTANCE_RANGE"); - allowedMethods.add("GEO_POLYGON"); - allowedMethods.add("GEO_CELL"); - } + static { + allowedMethods = new HashSet<>(); + allowedMethods.add("GEO_INTERSECTS"); + allowedMethods.add("GEO_BOUNDING_BOX"); + allowedMethods.add("GEO_DISTANCE"); + allowedMethods.add("GEO_DISTANCE_RANGE"); + allowedMethods.add("GEO_POLYGON"); + allowedMethods.add("GEO_CELL"); + } - public static boolean isAllowedMethod(String name) { - return allowedMethods.contains(name); - } + public static boolean isAllowedMethod(String name) { + return allowedMethods.contains(name); + } - public static Object generateSpatialParamsObject(String methodName, List params) { - switch (methodName) { - case "GEO_INTERSECTS": - if (params.size() != 2) { - throw new RuntimeException("GEO_INTERSECTS should have exactly 2 parameters : (fieldName,'WKT') "); - } - return params.get(1).toString(); - case "GEO_BOUNDING_BOX": - if (params.size() != 5) { - throw new RuntimeException("GEO_BOUNDING_BOX should have exactly 5 parameters : " - + "(fieldName,topLeftLon,topLeftLan,bottomRightLon,bottomRightLan) "); - } - double topLeftLon = Double.parseDouble(params.get(1).toString()); - double topLeftLat = Double.parseDouble(params.get(2).toString()); - double bottomRightLon = Double.parseDouble(params.get(3).toString()); - double bottomRightLat = Double.parseDouble(params.get(4).toString()); - return new BoundingBoxFilterParams(new Point(topLeftLon, topLeftLat), - new Point(bottomRightLon, bottomRightLat)); - case "GEO_DISTANCE": - if (params.size() != 4) { - throw new RuntimeException("GEO_DISTANCE should have exactly 4 parameters : " - + "(fieldName,distance,fromLon,fromLat) "); - } - String distance = params.get(1).toString(); - double lon = Double.parseDouble(params.get(2).toString()); - double lat = Double.parseDouble(params.get(3).toString()); - return new DistanceFilterParams(distance, new Point(lon, lat)); - case "GEO_DISTANCE_RANGE": - if (params.size() != 5) { - throw new RuntimeException("GEO_DISTANCE should have exactly 5 parameters : " - + "(fieldName,distanceFrom,distanceTo,fromLon,fromLat) "); - } - String distanceFrom = params.get(1).toString(); - String distanceTo = params.get(2).toString(); - lon = Double.parseDouble(params.get(3).toString()); - lat = Double.parseDouble(params.get(4).toString()); - return new RangeDistanceFilterParams(distanceFrom, distanceTo, new Point(lon, lat)); - case "GEO_POLYGON": - if (params.size() % 2 == 0 || params.size() <= 5) { - throw new RuntimeException("GEO_POLYGON should have odd num of parameters and > 5 : " - + "(fieldName,lon1,lat1,lon2,lat2,lon3,lat3,...) "); - } - int numberOfPoints = (params.size() - 1) / 2; - List points = new LinkedList<>(); - for (int i = 0; i < numberOfPoints; i++) { - int currentPointLocation = 1 + i * 2; - lon = Double.parseDouble(params.get(currentPointLocation).toString()); - lat = Double.parseDouble(params.get(currentPointLocation + 1).toString()); - points.add(new Point(lon, lat)); - } - return new PolygonFilterParams(points); - case "GEO_CELL": - if (params.size() < 4 || params.size() > 5) { - throw new RuntimeException("GEO_CELL should have 4 or 5 params " - + "(fieldName,lon,lat,precision,neighbors(optional)) "); - } - lon = Double.parseDouble(params.get(1).toString()); - lat = Double.parseDouble(params.get(2).toString()); - Point geoHashPoint = new Point(lon, lat); - int precision = Integer.parseInt(params.get(3).toString()); - if (params.size() == 4) { - return new CellFilterParams(geoHashPoint, precision); - } - boolean neighbors = Boolean.parseBoolean(params.get(4).toString()); - return new CellFilterParams(geoHashPoint, precision, neighbors); - default: - throw new RuntimeException(String.format("Unknown method name: %s", methodName)); + public static Object generateSpatialParamsObject(String methodName, List params) { + switch (methodName) { + case "GEO_INTERSECTS": + if (params.size() != 2) { + throw new RuntimeException( + "GEO_INTERSECTS should have exactly 2 parameters : (fieldName,'WKT') "); + } + return params.get(1).toString(); + case "GEO_BOUNDING_BOX": + if (params.size() != 5) { + throw new RuntimeException( + "GEO_BOUNDING_BOX should have exactly 5 parameters : " + + "(fieldName,topLeftLon,topLeftLan,bottomRightLon,bottomRightLan) "); + } + double topLeftLon = Double.parseDouble(params.get(1).toString()); + double topLeftLat = Double.parseDouble(params.get(2).toString()); + double bottomRightLon = Double.parseDouble(params.get(3).toString()); + double bottomRightLat = Double.parseDouble(params.get(4).toString()); + return new BoundingBoxFilterParams( + new Point(topLeftLon, topLeftLat), new Point(bottomRightLon, bottomRightLat)); + case "GEO_DISTANCE": + if (params.size() != 4) { + throw new RuntimeException( + "GEO_DISTANCE should have exactly 4 parameters : " + + "(fieldName,distance,fromLon,fromLat) "); + } + String distance = params.get(1).toString(); + double lon = Double.parseDouble(params.get(2).toString()); + double lat = Double.parseDouble(params.get(3).toString()); + return new DistanceFilterParams(distance, new Point(lon, lat)); + case "GEO_DISTANCE_RANGE": + if (params.size() != 5) { + throw new RuntimeException( + "GEO_DISTANCE should have exactly 5 parameters : " + + "(fieldName,distanceFrom,distanceTo,fromLon,fromLat) "); + } + String distanceFrom = params.get(1).toString(); + String distanceTo = params.get(2).toString(); + lon = Double.parseDouble(params.get(3).toString()); + lat = Double.parseDouble(params.get(4).toString()); + return new RangeDistanceFilterParams(distanceFrom, distanceTo, new Point(lon, lat)); + case "GEO_POLYGON": + if (params.size() % 2 == 0 || params.size() <= 5) { + throw new RuntimeException( + "GEO_POLYGON should have odd num of parameters and > 5 : " + + "(fieldName,lon1,lat1,lon2,lat2,lon3,lat3,...) "); + } + int numberOfPoints = (params.size() - 1) / 2; + List points = new LinkedList<>(); + for (int i = 0; i < numberOfPoints; i++) { + int currentPointLocation = 1 + i * 2; + lon = Double.parseDouble(params.get(currentPointLocation).toString()); + lat = Double.parseDouble(params.get(currentPointLocation + 1).toString()); + points.add(new Point(lon, lat)); + } + return new PolygonFilterParams(points); + case "GEO_CELL": + if (params.size() < 4 || params.size() > 5) { + throw new RuntimeException( + "GEO_CELL should have 4 or 5 params " + + "(fieldName,lon,lat,precision,neighbors(optional)) "); + } + lon = Double.parseDouble(params.get(1).toString()); + lat = Double.parseDouble(params.get(2).toString()); + Point geoHashPoint = new Point(lon, lat); + int precision = Integer.parseInt(params.get(3).toString()); + if (params.size() == 4) { + return new CellFilterParams(geoHashPoint, precision); } + boolean neighbors = Boolean.parseBoolean(params.get(4).toString()); + return new CellFilterParams(geoHashPoint, precision, neighbors); + default: + throw new RuntimeException(String.format("Unknown method name: %s", methodName)); } + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/spatial/WktToGeoJsonConverter.java b/legacy/src/main/java/org/opensearch/sql/legacy/spatial/WktToGeoJsonConverter.java index 99bc8f0742..13f51ed777 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/spatial/WktToGeoJsonConverter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/spatial/WktToGeoJsonConverter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.spatial; import com.google.common.base.Joiner; @@ -11,160 +10,156 @@ import java.util.List; import org.opensearch.sql.legacy.utils.StringUtils; -/** - * Created by Eliran on 4/8/2015. - */ +/** Created by Eliran on 4/8/2015. */ public class WktToGeoJsonConverter { - public static String toGeoJson(String wkt) { - wkt = wkt.toLowerCase(); - int startOfCoordinates = wkt.indexOf("("); - if (startOfCoordinates == -1) { - throw new IllegalArgumentException( - StringUtils.format("Failed to convert well-known-text [%s] to geometry type", wkt)); - } - - String wktType = wkt.substring(0, startOfCoordinates).trim(); - wkt = wkt.substring(startOfCoordinates); - - String type = ""; - String coordinates = ""; - switch (wktType) { - case ("point"): - type = "Point"; - coordinates = pointCoordinatesFromWkt(wkt); - break; - case ("polygon"): - type = "Polygon"; - coordinates = polygonCoordinatesFromWkt(wkt); - break; - case ("linestring"): - type = "LineString"; - coordinates = lineStringCoordinatesFromWkt(wkt); - break; - case ("multipolygon"): - type = "MultiPolygon"; - coordinates = multiPolygonCoordinatesFromWkt(wkt); - break; - case ("multipoint"): - type = "MultiPoint"; - coordinates = multiPointCoordinatesFromWkt(wkt); - break; - case ("multilinestring"): - type = "MultiLineString"; - coordinates = multiLineStringCoordinatesFromWkt(wkt); - break; - default: - throw new IllegalArgumentException("Unsupported well-known-text type: " + wktType); - - } - - return buildGeoJson(type, coordinates); + public static String toGeoJson(String wkt) { + wkt = wkt.toLowerCase(); + int startOfCoordinates = wkt.indexOf("("); + if (startOfCoordinates == -1) { + throw new IllegalArgumentException( + StringUtils.format("Failed to convert well-known-text [%s] to geometry type", wkt)); } - //input: ((10 10, 20 20, 10 40),(40 40, 30 30, 40 20, 30 10)) - private static String multiLineStringCoordinatesFromWkt(String wkt) { - wkt = removeBrackets(wkt, 1); - String lineStringsWithPipeSeparator = wkt.replaceAll("\\s*\\)\\s*,\\s*\\(", ")|("); - String[] lineStrings = lineStringsWithPipeSeparator.split("\\|"); - String[] coordinates = new String[lineStrings.length]; - for (int i = 0; i < lineStrings.length; i++) { - coordinates[i] = lineStringCoordinatesFromWkt(lineStrings[i]); - } - String multiLineStringCoordinates = Joiner.on(",").join(coordinates); - return String.format("[%s]", multiLineStringCoordinates); - + String wktType = wkt.substring(0, startOfCoordinates).trim(); + wkt = wkt.substring(startOfCoordinates); + + String type = ""; + String coordinates = ""; + switch (wktType) { + case ("point"): + type = "Point"; + coordinates = pointCoordinatesFromWkt(wkt); + break; + case ("polygon"): + type = "Polygon"; + coordinates = polygonCoordinatesFromWkt(wkt); + break; + case ("linestring"): + type = "LineString"; + coordinates = lineStringCoordinatesFromWkt(wkt); + break; + case ("multipolygon"): + type = "MultiPolygon"; + coordinates = multiPolygonCoordinatesFromWkt(wkt); + break; + case ("multipoint"): + type = "MultiPoint"; + coordinates = multiPointCoordinatesFromWkt(wkt); + break; + case ("multilinestring"): + type = "MultiLineString"; + coordinates = multiLineStringCoordinatesFromWkt(wkt); + break; + default: + throw new IllegalArgumentException("Unsupported well-known-text type: " + wktType); } - //input v1:MULTIPOINT (10 40, 40 30, 20 20, 30 10) - //v2:MULTIPOINT ((10 40), (40 30), (20 20), (30 10)) - private static String multiPointCoordinatesFromWkt(String wkt) { - wkt = removeBrackets(wkt, 1); - boolean isSecondVersionMultiPoint = wkt.contains("("); - String coordinates = ""; - if (isSecondVersionMultiPoint) { - //(10 40), (40 30), (20 20)-> 10 40, 40 30, 20 20 - wkt = wkt.replaceAll("\\(|\\)", ""); - } - coordinates = getJsonArrayFromListOfPoints(wkt); - return coordinates; + return buildGeoJson(type, coordinates); + } + + // input: ((10 10, 20 20, 10 40),(40 40, 30 30, 40 20, 30 10)) + private static String multiLineStringCoordinatesFromWkt(String wkt) { + wkt = removeBrackets(wkt, 1); + String lineStringsWithPipeSeparator = wkt.replaceAll("\\s*\\)\\s*,\\s*\\(", ")|("); + String[] lineStrings = lineStringsWithPipeSeparator.split("\\|"); + String[] coordinates = new String[lineStrings.length]; + for (int i = 0; i < lineStrings.length; i++) { + coordinates[i] = lineStringCoordinatesFromWkt(lineStrings[i]); } - - //input (((30 20, 45 40, 10 40, 30 20)),((15 5, 40 10, 10 20, 5 10, 15 5))) - private static String multiPolygonCoordinatesFromWkt(String wkt) { - wkt = removeBrackets(wkt, 1); - String polygonsWithPipeSeparator = wkt.replaceAll("\\s*\\)\\s*\\)\\s*,\\s*\\(\\s*\\(\\s*", "))|(("); - String[] polygons = polygonsWithPipeSeparator.split("\\|"); - String[] polygonsCoordinates = new String[polygons.length]; - for (int i = 0; i < polygons.length; i++) { - polygonsCoordinates[i] = polygonCoordinatesFromWkt(polygons[i]); - } - String coordinates = Joiner.on(",").join(polygonsCoordinates); - return String.format("[%s]", coordinates); + String multiLineStringCoordinates = Joiner.on(",").join(coordinates); + return String.format("[%s]", multiLineStringCoordinates); + } + + // input v1:MULTIPOINT (10 40, 40 30, 20 20, 30 10) + // v2:MULTIPOINT ((10 40), (40 30), (20 20), (30 10)) + private static String multiPointCoordinatesFromWkt(String wkt) { + wkt = removeBrackets(wkt, 1); + boolean isSecondVersionMultiPoint = wkt.contains("("); + String coordinates = ""; + if (isSecondVersionMultiPoint) { + // (10 40), (40 30), (20 20)-> 10 40, 40 30, 20 20 + wkt = wkt.replaceAll("\\(|\\)", ""); } - - //input : (30 10, 10 30, 40 40) - private static String lineStringCoordinatesFromWkt(String wkt) { - wkt = removeBrackets(wkt, 1); - return getJsonArrayFromListOfPoints(wkt); + coordinates = getJsonArrayFromListOfPoints(wkt); + return coordinates; + } + + // input (((30 20, 45 40, 10 40, 30 20)),((15 5, 40 10, 10 20, 5 10, 15 5))) + private static String multiPolygonCoordinatesFromWkt(String wkt) { + wkt = removeBrackets(wkt, 1); + String polygonsWithPipeSeparator = + wkt.replaceAll("\\s*\\)\\s*\\)\\s*,\\s*\\(\\s*\\(\\s*", "))|(("); + String[] polygons = polygonsWithPipeSeparator.split("\\|"); + String[] polygonsCoordinates = new String[polygons.length]; + for (int i = 0; i < polygons.length; i++) { + polygonsCoordinates[i] = polygonCoordinatesFromWkt(polygons[i]); } - - //input: v1:((35 10, 45 45, 15 40, 10 20, 35 10)) - //v2:((35 10, 45 45, 15 40, 10 20, 35 10),(20 30, 35 35, 30 20, 20 30)) - private static String polygonCoordinatesFromWkt(String wkt) { - wkt = removeBrackets(wkt, 2); - String coordinates; - boolean polygonContainsInnerHoles = wkt.contains("("); - if (polygonContainsInnerHoles) { - String[] polygons = wkt.split("\\s*\\)\\s*,\\s*\\(\\s*"); - String[] coordinatesOfPolygons = new String[polygons.length]; - for (int i = 0; i < polygons.length; i++) { - String polygonCoordinates = getJsonArrayFromListOfPoints(polygons[i]); - coordinatesOfPolygons[i] = polygonCoordinates; - } - coordinates = Joiner.on(",").join(coordinatesOfPolygons); - } else { - coordinates = getJsonArrayFromListOfPoints(wkt); - } - return String.format("[%s]", coordinates); + String coordinates = Joiner.on(",").join(polygonsCoordinates); + return String.format("[%s]", coordinates); + } + + // input : (30 10, 10 30, 40 40) + private static String lineStringCoordinatesFromWkt(String wkt) { + wkt = removeBrackets(wkt, 1); + return getJsonArrayFromListOfPoints(wkt); + } + + // input: v1:((35 10, 45 45, 15 40, 10 20, 35 10)) + // v2:((35 10, 45 45, 15 40, 10 20, 35 10),(20 30, 35 35, 30 20, 20 30)) + private static String polygonCoordinatesFromWkt(String wkt) { + wkt = removeBrackets(wkt, 2); + String coordinates; + boolean polygonContainsInnerHoles = wkt.contains("("); + if (polygonContainsInnerHoles) { + String[] polygons = wkt.split("\\s*\\)\\s*,\\s*\\(\\s*"); + String[] coordinatesOfPolygons = new String[polygons.length]; + for (int i = 0; i < polygons.length; i++) { + String polygonCoordinates = getJsonArrayFromListOfPoints(polygons[i]); + coordinatesOfPolygons[i] = polygonCoordinates; + } + coordinates = Joiner.on(",").join(coordinatesOfPolygons); + } else { + coordinates = getJsonArrayFromListOfPoints(wkt); } - - private static String getJsonArrayFromListOfPoints(String pointsInWkt) { - String[] points = pointsInWkt.split(","); - List coordinates = new ArrayList<>(); - for (String point : points) { - coordinates.add(extractCoordinateFromPoint(point)); - } - - String joinedCoordinates = Joiner.on(",").join(coordinates); - return String.format("[%s]", joinedCoordinates); + return String.format("[%s]", coordinates); + } + + private static String getJsonArrayFromListOfPoints(String pointsInWkt) { + String[] points = pointsInWkt.split(","); + List coordinates = new ArrayList<>(); + for (String point : points) { + coordinates.add(extractCoordinateFromPoint(point)); } - private static String buildGeoJson(String type, String coordinates) { - return String.format("{\"type\":\"%s\", \"coordinates\": %s}", type, coordinates); + String joinedCoordinates = Joiner.on(",").join(coordinates); + return String.format("[%s]", joinedCoordinates); + } + + private static String buildGeoJson(String type, String coordinates) { + return String.format("{\"type\":\"%s\", \"coordinates\": %s}", type, coordinates); + } + + // input : (30 10) + public static String pointCoordinatesFromWkt(String wkt) { + wkt = removeBrackets(wkt, 1); + return extractCoordinateFromPoint(wkt); + } + + private static String extractCoordinateFromPoint(String point) { + String pointPattern = "(\\s*)([0-9\\.-]+)(\\s*)([0-9\\.-]+)(\\s*)"; + return point.replaceAll(pointPattern, "[$2,$4]"); + } + + private static String removeBrackets(String wkt, int num) { + String result = wkt; + for (int i = 0; i < num; i++) { + int lastClosingBrackets = result.lastIndexOf(")"); + int firstOpenBrackets = result.indexOf("("); + if (lastClosingBrackets == -1 || firstOpenBrackets == -1) { + throw new IllegalArgumentException("Illegal syntax: " + wkt); + } + result = result.substring(firstOpenBrackets + 1, lastClosingBrackets); } - - //input : (30 10) - public static String pointCoordinatesFromWkt(String wkt) { - wkt = removeBrackets(wkt, 1); - return extractCoordinateFromPoint(wkt); - } - - private static String extractCoordinateFromPoint(String point) { - String pointPattern = "(\\s*)([0-9\\.-]+)(\\s*)([0-9\\.-]+)(\\s*)"; - return point.replaceAll(pointPattern, "[$2,$4]"); - } - - private static String removeBrackets(String wkt, int num) { - String result = wkt; - for (int i = 0; i < num; i++) { - int lastClosingBrackets = result.lastIndexOf(")"); - int firstOpenBrackets = result.indexOf("("); - if (lastClosingBrackets == -1 || firstOpenBrackets == -1) { - throw new IllegalArgumentException("Illegal syntax: " + wkt); - } - result = result.substring(firstOpenBrackets + 1, lastClosingBrackets); - } - return result; - } - + return result; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/utils/JsonPrettyFormatter.java b/legacy/src/main/java/org/opensearch/sql/legacy/utils/JsonPrettyFormatter.java index ecc86877ee..26f17feeb6 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/utils/JsonPrettyFormatter.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/utils/JsonPrettyFormatter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.utils; import com.fasterxml.jackson.core.JsonFactory; @@ -15,31 +14,29 @@ import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.core.xcontent.XContentParser; -/** - * Utility Class for formatting Json string pretty. - */ +/** Utility Class for formatting Json string pretty. */ public class JsonPrettyFormatter { - /** - * @param jsonString Json string without/with pretty format - * @return A standard and pretty formatted json string - * @throws IOException - */ - public static String format(String jsonString) throws IOException { - //turn _explain response into pretty formatted Json - XContentBuilder contentBuilder = XContentFactory.jsonBuilder().prettyPrint(); - try ( - XContentParser contentParser = new JsonXContentParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - new JsonFactory().createParser(jsonString)) - ){ - contentBuilder.copyCurrentStructure(contentParser); - } - return contentBuilder.toString(); + /** + * @param jsonString Json string without/with pretty format + * @return A standard and pretty formatted json string + * @throws IOException + */ + public static String format(String jsonString) throws IOException { + // turn _explain response into pretty formatted Json + XContentBuilder contentBuilder = XContentFactory.jsonBuilder().prettyPrint(); + try (XContentParser contentParser = + new JsonXContentParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + new JsonFactory().createParser(jsonString))) { + contentBuilder.copyCurrentStructure(contentParser); } + return contentBuilder.toString(); + } - private JsonPrettyFormatter() { - throw new AssertionError(getClass().getCanonicalName() + " is a utility class and must not be initialized"); - } + private JsonPrettyFormatter() { + throw new AssertionError( + getClass().getCanonicalName() + " is a utility class and must not be initialized"); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/utils/QueryDataAnonymizer.java b/legacy/src/main/java/org/opensearch/sql/legacy/utils/QueryDataAnonymizer.java index b58691c022..acf7a73ba5 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/utils/QueryDataAnonymizer.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/utils/QueryDataAnonymizer.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.utils; import static org.opensearch.sql.legacy.utils.Util.toSqlExpr; @@ -14,35 +13,35 @@ import org.apache.logging.log4j.Logger; import org.opensearch.sql.legacy.rewriter.identifier.AnonymizeSensitiveDataRule; -/** - * Utility class to mask sensitive information in incoming SQL queries - */ +/** Utility class to mask sensitive information in incoming SQL queries */ public class QueryDataAnonymizer { - private static final Logger LOG = LogManager.getLogger(QueryDataAnonymizer.class); + private static final Logger LOG = LogManager.getLogger(QueryDataAnonymizer.class); - /** - * This method is used to anonymize sensitive data in SQL query. - * Sensitive data includes index names, column names etc., - * which in druid parser are parsed to SQLIdentifierExpr instances - * @param query entire sql query string - * @return sql query string with all identifiers replaced with "***" on success - * and failure string otherwise to ensure no non-anonymized data is logged in production. - */ - public static String anonymizeData(String query) { - String resultQuery; - try { - AnonymizeSensitiveDataRule rule = new AnonymizeSensitiveDataRule(); - SQLQueryExpr sqlExpr = (SQLQueryExpr) toSqlExpr(query); - rule.rewrite(sqlExpr); - resultQuery = SQLUtils.toMySqlString(sqlExpr).replaceAll("0", "number") - .replaceAll("false", "boolean_literal") - .replaceAll("[\\n][\\t]+", " "); - } catch (Exception e) { - LOG.warn("Caught an exception when anonymizing sensitive data."); - LOG.debug("String {} failed anonymization.", query); - resultQuery = "Failed to anonymize data."; - } - return resultQuery; + /** + * This method is used to anonymize sensitive data in SQL query. Sensitive data includes index + * names, column names etc., which in druid parser are parsed to SQLIdentifierExpr instances + * + * @param query entire sql query string + * @return sql query string with all identifiers replaced with "***" on success and failure string + * otherwise to ensure no non-anonymized data is logged in production. + */ + public static String anonymizeData(String query) { + String resultQuery; + try { + AnonymizeSensitiveDataRule rule = new AnonymizeSensitiveDataRule(); + SQLQueryExpr sqlExpr = (SQLQueryExpr) toSqlExpr(query); + rule.rewrite(sqlExpr); + resultQuery = + SQLUtils.toMySqlString(sqlExpr) + .replaceAll("0", "number") + .replaceAll("false", "boolean_literal") + .replaceAll("[\\n][\\t]+", " "); + } catch (Exception e) { + LOG.warn("Caught an exception when anonymizing sensitive data."); + LOG.debug("String {} failed anonymization.", query); + resultQuery = "Failed to anonymize data."; } + return resultQuery; + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/utils/SQLFunctions.java b/legacy/src/main/java/org/opensearch/sql/legacy/utils/SQLFunctions.java index de8e6eb0fa..d46a80f6d3 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/utils/SQLFunctions.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/utils/SQLFunctions.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.utils; import static org.opensearch.sql.legacy.utils.StringUtils.format; @@ -39,1019 +38,1171 @@ import org.opensearch.sql.legacy.exception.SqlParseException; import org.opensearch.sql.legacy.executor.format.Schema; -/** - * Created by allwefantasy on 8/19/16. - */ +/** Created by allwefantasy on 8/19/16. */ public class SQLFunctions { - private static final Set numberOperators = Sets.newHashSet( - "exp", "expm1", "log", "log2", "log10", "ln", "sqrt", "cbrt", "ceil", "floor", "rint", "pow", "power", - "round", "rand", "abs", "sign", "signum" - ); - - private static final Set mathConstants = Sets.newHashSet("e", "pi"); - - private static final Set trigFunctions = Sets.newHashSet( - "degrees", "radians", "sin", "cos", "tan", "asin", "acos", "atan", "atan2", "sinh", "cosh", "cot" - ); - - private static final Set stringOperators = Sets.newHashSet( - "split", "concat_ws", "substring", "trim", "lower", "upper", "rtrim", "ltrim", "replace", - "left", "right" - ); - - private static final Set stringFunctions = Sets.newHashSet( - "length", "locate", "ascii" - ); - - private static final Set binaryOperators = Sets.newHashSet( - "add", "multiply", "divide", "subtract", "modulus" - ); - - private static final Set dateFunctions = Sets.newHashSet( - "date_format", "year", "month_of_year", "week_of_year", "day_of_year", "day_of_month", - "day_of_week", "hour_of_day", "minute_of_day", "minute_of_hour", "second_of_minute", "month", "dayofmonth", - "date", "monthname", "timestamp", "maketime", "now", "curdate" - ); - - private static final Set conditionalFunctions = Sets.newHashSet( - "if", "ifnull", "isnull" - ); - - private static final Set utilityFunctions = Sets.newHashSet("field", "assign", "cast"); - - public static final Set builtInFunctions = Stream.of( - numberOperators, - mathConstants, - trigFunctions, - stringOperators, - stringFunctions, - binaryOperators, - dateFunctions, - conditionalFunctions, - utilityFunctions) - .flatMap(Set::stream).collect(Collectors.toSet()); - - private Map generatedIds = new HashMap<>(); - - /** - * Generates next id for given method name. The id's are increasing for each method name, so - * nextId("a"), nextId("a"), nextId("b") will return a_1, a_2, b_1 - */ - public String nextId(String methodName) { - return methodName + "_" + generatedIds.merge(methodName, 1, Integer::sum); - } - - - /** - * Is the function actually translated into Elastic DSL script during execution? - */ - public static boolean isFunctionTranslatedToScript(String function) { - return builtInFunctions.contains(function.toLowerCase()); - } - - public Tuple function(String methodName, List paramers, String name, - boolean returnValue) throws SqlParseException { - Tuple functionStr = null; - switch (methodName.toLowerCase()) { - case "cast": { - SQLCastExpr castExpr = (SQLCastExpr) ((SQLIdentifierExpr) paramers.get(0).value).getParent(); - String typeName = castExpr.getDataType().getName(); - functionStr = cast(typeName, paramers); - break; - } - case "lower": { - functionStr = lower( - (SQLExpr) paramers.get(0).value, - getLocaleForCaseChangingFunction(paramers), - name - ); - break; - } - case "upper": { - functionStr = upper( - (SQLExpr) paramers.get(0).value, - getLocaleForCaseChangingFunction(paramers), - name); - break; - } - - // Split is currently not supported since its using .split() in painless which is not allow-listed - case "split": - if (paramers.size() == 3) { - functionStr = split((SQLExpr) paramers.get(0).value, - Util.expr2Object((SQLExpr) paramers.get(1).value).toString(), - Integer.parseInt(Util.expr2Object((SQLExpr) paramers.get(2).value).toString()), name); - } else { - functionStr = split((SQLExpr) paramers.get(0).value, - paramers.get(1).value.toString(), - name); - } - - break; - - case "concat_ws": - List result = Lists.newArrayList(); - for (int i = 1; i < paramers.size(); i++) { - result.add((SQLExpr) paramers.get(i).value); - } - functionStr = concat_ws(paramers.get(0).value.toString(), result); - - break; - - - case "date_format": - functionStr = date_format( - (SQLExpr) paramers.get(0).value, - Util.expr2Object((SQLExpr) paramers.get(1).value).toString(), - paramers.size() > 2 ? Util.expr2Object((SQLExpr) paramers.get(2).value).toString() : null, - name); - break; - - case "year": - functionStr = dateFunctionTemplate("year", (SQLExpr) paramers.get(0).value); - break; - case "month_of_year": - case "month": - functionStr = dateFunctionTemplate("monthValue", (SQLExpr) paramers.get(0).value); - break; - case "monthname": - functionStr = dateFunctionTemplate("month", (SQLExpr) paramers.get(0).value); - break; - case "week_of_year": - functionStr = dateFunctionTemplate("weekOfWeekyear", - "get(WeekFields.ISO.weekOfWeekBasedYear())", - (SQLExpr) paramers.get(0).value); - break; - case "day_of_year": - functionStr = dateFunctionTemplate("dayOfYear", (SQLExpr) paramers.get(0).value); - break; - case "day_of_month": - case "dayofmonth": - functionStr = dateFunctionTemplate("dayOfMonth", (SQLExpr) paramers.get(0).value); - break; - case "day_of_week": - functionStr = dateFunctionTemplate("dayOfWeek", - "getDayOfWeekEnum().getValue()", - (SQLExpr) paramers.get(0).value); - break; - case "date": - functionStr = date((SQLExpr) paramers.get(0).value); - break; - case "hour_of_day": - functionStr = dateFunctionTemplate("hour", (SQLExpr) paramers.get(0).value); - break; - case "minute_of_day": - functionStr = dateFunctionTemplate("minuteOfDay", - "get(ChronoField.MINUTE_OF_DAY)", - (SQLExpr) paramers.get(0).value); - break; - case "minute_of_hour": - functionStr = dateFunctionTemplate("minute", (SQLExpr) paramers.get(0).value); - break; - case "second_of_minute": - functionStr = dateFunctionTemplate("second", (SQLExpr) paramers.get(0).value); - break; - case "timestamp": - functionStr = timestamp((SQLExpr) paramers.get(0).value); - break; - case "maketime": - functionStr = maketime((SQLExpr) paramers.get(0).value, (SQLExpr) paramers.get(1).value, - (SQLExpr) paramers.get(2).value); - break; - case "now": - functionStr = now(); - break; - case "curdate": - functionStr = curdate(); - break; - - case "e": - case "pi": - methodName = methodName.toUpperCase(); - functionStr = mathConstantTemplate("Math." + methodName, methodName); - break; - - case "abs": - case "round": - case "floor": - case "ceil": - case "cbrt": - case "rint": - case "exp": - case "expm1": - case "sqrt": - case "sin": - case "cos": - case "tan": - case "asin": - case "acos": - case "atan": - case "sinh": - case "cosh": - functionStr = mathSingleValueTemplate("Math." + methodName, methodName, - (SQLExpr) paramers.get(0).value, name); - break; - - case "rand": - if (paramers.isEmpty()) { - functionStr = rand(); - } else { - functionStr = rand((SQLExpr) paramers.get(0).value); - } - break; - - case "cot": - // OpenSearch does not support the function name cot - functionStr = mathSingleValueTemplate("1 / Math.tan", methodName, - (SQLExpr) paramers.get(0).value, name); - break; - - case "sign": - case "signum": - methodName = "signum"; - functionStr = mathSingleValueTemplate("Math." + methodName, methodName, - (SQLExpr) paramers.get(0).value, name); - break; - - case "pow": - case "power": - methodName = "pow"; - functionStr = mathDoubleValueTemplate("Math." + methodName, methodName, - (SQLExpr) paramers.get(0).value, Util.expr2Object((SQLExpr) paramers.get(1).value).toString(), - name); - break; - - case "atan2": - functionStr = mathDoubleValueTemplate("Math." + methodName, methodName, - (SQLExpr) paramers.get(0).value, (SQLExpr) paramers.get(1).value); - break; - - case "substring": - functionStr = substring((SQLExpr) paramers.get(0).value, - Integer.parseInt(Util.expr2Object((SQLExpr) paramers.get(1).value).toString()), - Integer.parseInt(Util.expr2Object((SQLExpr) paramers.get(2).value).toString())); - break; - - case "degrees": - functionStr = degrees((SQLExpr) paramers.get(0).value, name); - break; - case "radians": - functionStr = radians((SQLExpr) paramers.get(0).value, name); - break; - - case "trim": - functionStr = trim((SQLExpr) paramers.get(0).value, name); - break; - - case "add": - functionStr = add((SQLExpr) paramers.get(0).value, (SQLExpr) paramers.get(1).value); - break; - - case "subtract": - functionStr = subtract((SQLExpr) paramers.get(0).value, (SQLExpr) paramers.get(1).value); - break; - case "divide": - functionStr = divide((SQLExpr) paramers.get(0).value, (SQLExpr) paramers.get(1).value); - break; - - case "multiply": - functionStr = multiply((SQLExpr) paramers.get(0).value, (SQLExpr) paramers.get(1).value); - break; - case "modulus": - functionStr = modulus((SQLExpr) paramers.get(0).value, (SQLExpr) paramers.get(1).value); - break; - - case "field": - functionStr = field(Util.expr2Object((SQLExpr) paramers.get(0).value).toString()); - break; - - case "log2": - functionStr = log(SQLUtils.toSQLExpr("2"), (SQLExpr) paramers.get(0).value, name); - break; - case "log10": - functionStr = log10((SQLExpr) paramers.get(0).value); - break; - case "log": - if (paramers.size() > 1) { - functionStr = log((SQLExpr) paramers.get(0).value, (SQLExpr) paramers.get(1).value, name); - } else { - functionStr = ln((SQLExpr) paramers.get(0).value); - } - break; - case "ln": - functionStr = ln((SQLExpr) paramers.get(0).value); - break; - case "assign": - functionStr = assign((SQLExpr) paramers.get(0).value); - break; - case "length": - functionStr = length((SQLExpr) paramers.get(0).value); - break; - case "replace": - functionStr = replace((SQLExpr) paramers.get(0).value, paramers.get(1).value.toString(), - paramers.get(2).value.toString()); - break; - case "locate": - int start = 0; - if (paramers.size() > 2) { - start = Integer.parseInt(paramers.get(2).value.toString()); - } - functionStr = locate(paramers.get(0).value.toString(), (SQLExpr) paramers.get(1).value, start); - break; - case "rtrim": - functionStr = rtrim((SQLExpr) paramers.get(0).value); - break; - case "ltrim": - functionStr = ltrim((SQLExpr) paramers.get(0).value); - break; - case "ascii": - functionStr = ascii((SQLExpr) paramers.get(0).value); - break; - case "left": - functionStr = left((SQLExpr) paramers.get(0).value, (SQLExpr) paramers.get(1).value); - break; - case "right": - functionStr = right((SQLExpr) paramers.get(0).value, (SQLExpr) paramers.get(1).value); - break; - - case "if": - functionStr = ifFunc(paramers); - break; - case "ifnull": - functionStr = ifnull((SQLExpr) paramers.get(0).value, (SQLExpr) paramers.get(1).value); - break; - case "isnull": - functionStr = isnull((SQLExpr) paramers.get(0).value); - break; - - default: - - } - if (returnValue) { - String generatedFieldName = functionStr.v1(); - String returnCommand = ";return " + generatedFieldName + ";"; - String newScript = functionStr.v2() + returnCommand; - functionStr = new Tuple<>(generatedFieldName, newScript); - } - return functionStr; - } - - public String getLocaleForCaseChangingFunction(List paramers) { - String locale; - if (paramers.size() == 1) { - locale = Locale.getDefault().getLanguage(); - } else { - locale = Util.expr2Object((SQLExpr) paramers.get(1).value).toString(); + private static final Set numberOperators = + Sets.newHashSet( + "exp", "expm1", "log", "log2", "log10", "ln", "sqrt", "cbrt", "ceil", "floor", "rint", + "pow", "power", "round", "rand", "abs", "sign", "signum"); + + private static final Set mathConstants = Sets.newHashSet("e", "pi"); + + private static final Set trigFunctions = + Sets.newHashSet( + "degrees", "radians", "sin", "cos", "tan", "asin", "acos", "atan", "atan2", "sinh", + "cosh", "cot"); + + private static final Set stringOperators = + Sets.newHashSet( + "split", + "concat_ws", + "substring", + "trim", + "lower", + "upper", + "rtrim", + "ltrim", + "replace", + "left", + "right"); + + private static final Set stringFunctions = Sets.newHashSet("length", "locate", "ascii"); + + private static final Set binaryOperators = + Sets.newHashSet("add", "multiply", "divide", "subtract", "modulus"); + + private static final Set dateFunctions = + Sets.newHashSet( + "date_format", + "year", + "month_of_year", + "week_of_year", + "day_of_year", + "day_of_month", + "day_of_week", + "hour_of_day", + "minute_of_day", + "minute_of_hour", + "second_of_minute", + "month", + "dayofmonth", + "date", + "monthname", + "timestamp", + "maketime", + "now", + "curdate"); + + private static final Set conditionalFunctions = Sets.newHashSet("if", "ifnull", "isnull"); + + private static final Set utilityFunctions = Sets.newHashSet("field", "assign", "cast"); + + public static final Set builtInFunctions = + Stream.of( + numberOperators, + mathConstants, + trigFunctions, + stringOperators, + stringFunctions, + binaryOperators, + dateFunctions, + conditionalFunctions, + utilityFunctions) + .flatMap(Set::stream) + .collect(Collectors.toSet()); + + private Map generatedIds = new HashMap<>(); + + /** + * Generates next id for given method name. The id's are increasing for each method name, so + * nextId("a"), nextId("a"), nextId("b") will return a_1, a_2, b_1 + */ + public String nextId(String methodName) { + return methodName + "_" + generatedIds.merge(methodName, 1, Integer::sum); + } + + /** Is the function actually translated into Elastic DSL script during execution? */ + public static boolean isFunctionTranslatedToScript(String function) { + return builtInFunctions.contains(function.toLowerCase()); + } + + public Tuple function( + String methodName, List paramers, String name, boolean returnValue) + throws SqlParseException { + Tuple functionStr = null; + switch (methodName.toLowerCase()) { + case "cast": + { + SQLCastExpr castExpr = + (SQLCastExpr) ((SQLIdentifierExpr) paramers.get(0).value).getParent(); + String typeName = castExpr.getDataType().getName(); + functionStr = cast(typeName, paramers); + break; } - return locale; - } - - public Tuple cast(String castType, List paramers) throws SqlParseException { - String name = nextId("cast"); - return new Tuple<>(name, getCastScriptStatement(name, castType, paramers)); - } - - - public Tuple upper(SQLExpr field, String locale, String valueName) { - String name = nextId("upper"); - - if (valueName == null) { - return new Tuple<>(name, def(name, upper(getPropertyOrStringValue(field), locale))); - } else { - return new Tuple<>(name, getPropertyOrStringValue(field) + "; " - + def(name, valueName + "." + upper(getPropertyOrStringValue(field), locale))); + case "lower": + { + functionStr = + lower( + (SQLExpr) paramers.get(0).value, + getLocaleForCaseChangingFunction(paramers), + name); + break; } - } - - public Tuple lower(SQLExpr field, String locale, String valueName) { - String name = nextId("lower"); - - if (valueName == null) { - return new Tuple<>(name, def(name, lower(getPropertyOrStringValue(field), locale))); - } else { - return new Tuple<>(name, getPropertyOrStringValue(field) + "; " - + def(name, valueName + "." + lower(getPropertyOrStringValue(field), locale))); + case "upper": + { + functionStr = + upper( + (SQLExpr) paramers.get(0).value, + getLocaleForCaseChangingFunction(paramers), + name); + break; } - } - - private static String def(String name, String value) { - return "def " + name + " = " + value; - } - - private static String doc(SQLExpr field) { - return "doc['" + exprString(field) + "']"; - } - - private static String doc(String field) { - return "doc['" + field + "']"; - } - - private static String exprString(SQLExpr expr) { - return Util.expr2Object(expr).toString(); - } - - private static String func(String methodName, boolean quotes, String... params) { - if (quotes) { - return methodName + "(" + quoteParams(params) + ")"; - } - - return methodName + "(" + String.join(", ", params) + ")"; - } - - /** - * Helper method to surround each param with '' (single quotes) for painless script - */ - private static String quoteParams(String... params) { - return Stream.of(params).collect(Collectors.joining("', '", "'", "'")); - } - - private Tuple concat_ws(String split, List columns) { - String name = nextId("concat_ws"); - List result = Lists.newArrayList(); - - for (SQLExpr column : columns) { - String strColumn = exprString(column); - if (strColumn.startsWith("def ")) { - result.add(strColumn); - } else if (isProperty(column)) { - result.add("doc['" + strColumn + "'].value"); - } else { - result.add("'" + strColumn + "'"); - } - - } - return new Tuple<>(name, def(name, Joiner.on("+ " + split + " +").join(result))); - } - - - //split(Column expr, java.lang.String pattern) - public Tuple split(SQLExpr field, String pattern, int index, String valueName) { - String name = nextId("split"); - final String script; - if (valueName == null) { - script = def(name, - getPropertyOrValue(field) + "." - + func("split", true, pattern) + "[" + index + "]"); - } else { - script = "; " + def(name, valueName + "." - + func("split", true, pattern) + "[" + index + "]"); - } - return new Tuple<>(name, script); - } - - //split(Column expr, java.lang.String pattern) - public Tuple split(SQLExpr field, String pattern, String valueName) { - String name = nextId("split"); - if (valueName == null) { - return new Tuple<>(name, - def(name, getPropertyOrValue(field) + "." - + func("split", true, pattern))); - } else { - return new Tuple<>(name, getPropertyOrValue(field) + "; " - + def(name, valueName + "." + func("split", true, pattern))); - } - } - - private Tuple date_format(SQLExpr field, String pattern, String zoneId, String valueName) { - String name = nextId("date_format"); - if (valueName == null) { - return new Tuple<>(name, "def " + name + " = DateTimeFormatter.ofPattern('" + pattern + "').withZone(" - + (zoneId != null ? "ZoneId.of('" + zoneId + "')" : "ZoneId.of(\"UTC\")") - + ").format(Instant.ofEpochMilli(" + getPropertyOrValue(field) + ".toInstant().toEpochMilli()))"); - } else { - return new Tuple<>(name, exprString(field) + "; " - + "def " + name + " = new SimpleDateFormat('" + pattern + "').format(" - + "new Date(" + valueName + " - 8*1000*60*60))"); - } - } - - /** - * Explicitly pass in name used to generate variable ID because methodName is not always valid - * - * For example, - * - * functionStr = dateFunctionTemplate("weekOfWeekyear", - * "get(WeekFields.ISO.weekOfWeekBasedYear())", - * (SQLExpr) paramers.get(0).value); - * - * - * The old dateFunctionTemplate(methodName, field) passes string "get(WeekFields.ISO.weekOfWeekBasedYear())" - * to nextId() which generates an invalid variable name in painless script. - */ - private Tuple dateFunctionTemplate(String name, String methodName, SQLExpr field) { - String id = nextId(name); - return new Tuple<>(id, def(id, doc(field) + ".value." + methodName)); - } - - private Tuple dateFunctionTemplate(String methodName, SQLExpr field) { - return dateFunctionTemplate(methodName, methodName, field); - } - - public Tuple add(SQLExpr a, SQLExpr b) { - return binaryOpertator("add", "+", a, b); - } - - public Tuple assign(SQLExpr a) { - String name = nextId("assign"); - return new Tuple<>(name, - def(name, extractName(a))); - } - - private Tuple modulus(SQLExpr a, SQLExpr b) { - return binaryOpertator("modulus", "%", a, b); - } - - public Tuple field(String a) { - String name = nextId("field"); - return new Tuple<>(name, def(name, doc(a) + ".value")); - } - - private Tuple subtract(SQLExpr a, SQLExpr b) { - return binaryOpertator("subtract", "-", a, b); - } - - private Tuple multiply(SQLExpr a, SQLExpr b) { - return binaryOpertator("multiply", "*", a, b); - } - - private Tuple divide(SQLExpr a, SQLExpr b) { - return binaryOpertator("divide", "/", a, b); - } - - private Tuple binaryOpertator(String methodName, String operator, SQLExpr a, SQLExpr b) { - String name = nextId(methodName); - return new Tuple<>(name, - scriptDeclare(a) + scriptDeclare(b) + convertType(a) + convertType(b) - + def(name, extractName(a) + " " + operator + " " + extractName(b))); - } - - private static boolean isProperty(SQLExpr expr) { - return (expr instanceof SQLIdentifierExpr || expr instanceof SQLPropertyExpr - || expr instanceof SQLVariantRefExpr); - } - - private static String getPropertyOrValue(SQLExpr expr) { - if (isProperty(expr)) { - return doc(expr) + ".value"; - } else { - return exprString(expr); - } - } - - private static String getPropertyOrValue(String expr) { - if (isQuoted(expr, "'")) { - return expr; - } else if (StringUtils.isNumeric(expr)) { - return expr; - } else { - return doc(expr) + ".value"; - } - } - - private static String getPropertyOrStringValue(SQLExpr expr) { - if (isProperty(expr)) { - return doc(expr) + ".value"; - } else { - return "'" + exprString(expr) + "'"; - } - } - - private static String scriptDeclare(SQLExpr a) { - - if (isProperty(a) || a instanceof SQLNumericLiteralExpr) { - return ""; - } else { - return exprString(a) + ";"; - } - } - - private static String extractName(SQLExpr script) { - if (isProperty(script)) { - return doc(script) + ".value"; - } - String scriptStr = exprString(script); - String[] variance = scriptStr.split(";"); - String newScript = variance[variance.length - 1]; - if (newScript.trim().startsWith("def ")) { - //for now ,if variant is string,then change to double. - return newScript.trim().substring(4).split("=")[0].trim(); - } else { - return scriptStr; - } - } - //cast(year as int) - - private static String convertType(SQLExpr script) { - String[] variance = exprString(script).split(";"); - String newScript = variance[variance.length - 1]; - if (newScript.trim().startsWith("def ")) { - //for now ,if variant is string,then change to double. - String temp = newScript.trim().substring(4).split("=")[0].trim(); - - return " if( " + temp + " instanceof String) " + temp + "= Double.parseDouble(" + temp.trim() + "); "; + // Split is currently not supported since its using .split() in painless which is not + // allow-listed + case "split": + if (paramers.size() == 3) { + functionStr = + split( + (SQLExpr) paramers.get(0).value, + Util.expr2Object((SQLExpr) paramers.get(1).value).toString(), + Integer.parseInt(Util.expr2Object((SQLExpr) paramers.get(2).value).toString()), + name); } else { - return ""; + functionStr = + split((SQLExpr) paramers.get(0).value, paramers.get(1).value.toString(), name); } + break; - } - - private String getScriptText(MethodField field) { - String content = ((SQLTextLiteralExpr) field.getParams().get(1).value).getText(); - return content; - } - - /** - * Using exprString() rather than getPropertyOrValue() for "base" since something like "Math.E" gets evaluated - * incorrectly in getPropertyOrValue(), returning it as a doc value instead of the literal string - */ - public Tuple log(SQLExpr base, SQLExpr field, String valueName) { - String name = nextId("log"); - String result; - if (valueName == null) { - result = def(name, func("Math.log", false, getPropertyOrValue(field)) - + "/" + func("Math.log", false, exprString(base))); - } else { - result = getPropertyOrValue(field) + "; " - + def(name, func("Math.log", false, valueName) + "/" - + func("Math.log", false, exprString(base))); + case "concat_ws": + List result = Lists.newArrayList(); + for (int i = 1; i < paramers.size(); i++) { + result.add((SQLExpr) paramers.get(i).value); } - return new Tuple<>(name, result); - } - - public Tuple log10(SQLExpr field) { - String name = nextId("log10"); - return new Tuple<>(name, def(name, StringUtils.format("Math.log10(%s)", getPropertyOrValue(field)))); - } - - public Tuple ln(SQLExpr field) { - String name = nextId("ln"); - return new Tuple<>(name, def(name, StringUtils.format("Math.log(%s)", getPropertyOrValue(field)))); - } - - public Tuple trim(SQLExpr field, String valueName) { - return strSingleValueTemplate("trim", field, valueName); - } - - private Tuple degrees(SQLExpr field, String valueName) { - return mathSingleValueTemplate("Math.toDegrees", "degrees", field, valueName); - } - - private Tuple radians(SQLExpr field, String valueName) { - return mathSingleValueTemplate("Math.toRadians", "radians", field, valueName); - } - - private Tuple rand(SQLExpr expr) { - String name = nextId("rand"); - return new Tuple<>(name, def(name, format("new Random(%s).nextDouble()", getPropertyOrValue(expr)))); - } - - private Tuple rand() { - String name = nextId("rand"); - return new Tuple<>(name, def(name, "new Random().nextDouble()")); - } - - private Tuple mathDoubleValueTemplate(String methodName, String fieldName, SQLExpr val1, - String val2, String valueName) { - String name = nextId(fieldName); - if (valueName == null) { - return new Tuple<>(name, def(name, func(methodName, false, getPropertyOrValue(val1), - getPropertyOrValue(val2)))); + functionStr = concat_ws(paramers.get(0).value.toString(), result); + + break; + + case "date_format": + functionStr = + date_format( + (SQLExpr) paramers.get(0).value, + Util.expr2Object((SQLExpr) paramers.get(1).value).toString(), + paramers.size() > 2 + ? Util.expr2Object((SQLExpr) paramers.get(2).value).toString() + : null, + name); + break; + + case "year": + functionStr = dateFunctionTemplate("year", (SQLExpr) paramers.get(0).value); + break; + case "month_of_year": + case "month": + functionStr = dateFunctionTemplate("monthValue", (SQLExpr) paramers.get(0).value); + break; + case "monthname": + functionStr = dateFunctionTemplate("month", (SQLExpr) paramers.get(0).value); + break; + case "week_of_year": + functionStr = + dateFunctionTemplate( + "weekOfWeekyear", + "get(WeekFields.ISO.weekOfWeekBasedYear())", + (SQLExpr) paramers.get(0).value); + break; + case "day_of_year": + functionStr = dateFunctionTemplate("dayOfYear", (SQLExpr) paramers.get(0).value); + break; + case "day_of_month": + case "dayofmonth": + functionStr = dateFunctionTemplate("dayOfMonth", (SQLExpr) paramers.get(0).value); + break; + case "day_of_week": + functionStr = + dateFunctionTemplate( + "dayOfWeek", "getDayOfWeekEnum().getValue()", (SQLExpr) paramers.get(0).value); + break; + case "date": + functionStr = date((SQLExpr) paramers.get(0).value); + break; + case "hour_of_day": + functionStr = dateFunctionTemplate("hour", (SQLExpr) paramers.get(0).value); + break; + case "minute_of_day": + functionStr = + dateFunctionTemplate( + "minuteOfDay", "get(ChronoField.MINUTE_OF_DAY)", (SQLExpr) paramers.get(0).value); + break; + case "minute_of_hour": + functionStr = dateFunctionTemplate("minute", (SQLExpr) paramers.get(0).value); + break; + case "second_of_minute": + functionStr = dateFunctionTemplate("second", (SQLExpr) paramers.get(0).value); + break; + case "timestamp": + functionStr = timestamp((SQLExpr) paramers.get(0).value); + break; + case "maketime": + functionStr = + maketime( + (SQLExpr) paramers.get(0).value, + (SQLExpr) paramers.get(1).value, + (SQLExpr) paramers.get(2).value); + break; + case "now": + functionStr = now(); + break; + case "curdate": + functionStr = curdate(); + break; + + case "e": + case "pi": + methodName = methodName.toUpperCase(); + functionStr = mathConstantTemplate("Math." + methodName, methodName); + break; + + case "abs": + case "round": + case "floor": + case "ceil": + case "cbrt": + case "rint": + case "exp": + case "expm1": + case "sqrt": + case "sin": + case "cos": + case "tan": + case "asin": + case "acos": + case "atan": + case "sinh": + case "cosh": + functionStr = + mathSingleValueTemplate( + "Math." + methodName, methodName, (SQLExpr) paramers.get(0).value, name); + break; + + case "rand": + if (paramers.isEmpty()) { + functionStr = rand(); } else { - return new Tuple<>(name, getPropertyOrValue(val1) + "; " - + def(name, func(methodName, false, valueName, getPropertyOrValue(val2)))); + functionStr = rand((SQLExpr) paramers.get(0).value); } - } - - private Tuple mathDoubleValueTemplate(String methodName, String fieldName, SQLExpr val1, - SQLExpr val2) { - String name = nextId(fieldName); - return new Tuple<>(name, def(name, func(methodName, false, - getPropertyOrValue(val1), getPropertyOrValue(val2)))); - } - - private Tuple mathSingleValueTemplate(String methodName, String fieldName, SQLExpr field, - String valueName) { - String name = nextId(fieldName); - if (valueName == null) { - return new Tuple<>(name, def(name, func(methodName, false, getPropertyOrValue(field)))); + break; + + case "cot": + // OpenSearch does not support the function name cot + functionStr = + mathSingleValueTemplate( + "1 / Math.tan", methodName, (SQLExpr) paramers.get(0).value, name); + break; + + case "sign": + case "signum": + methodName = "signum"; + functionStr = + mathSingleValueTemplate( + "Math." + methodName, methodName, (SQLExpr) paramers.get(0).value, name); + break; + + case "pow": + case "power": + methodName = "pow"; + functionStr = + mathDoubleValueTemplate( + "Math." + methodName, + methodName, + (SQLExpr) paramers.get(0).value, + Util.expr2Object((SQLExpr) paramers.get(1).value).toString(), + name); + break; + + case "atan2": + functionStr = + mathDoubleValueTemplate( + "Math." + methodName, + methodName, + (SQLExpr) paramers.get(0).value, + (SQLExpr) paramers.get(1).value); + break; + + case "substring": + functionStr = + substring( + (SQLExpr) paramers.get(0).value, + Integer.parseInt(Util.expr2Object((SQLExpr) paramers.get(1).value).toString()), + Integer.parseInt(Util.expr2Object((SQLExpr) paramers.get(2).value).toString())); + break; + + case "degrees": + functionStr = degrees((SQLExpr) paramers.get(0).value, name); + break; + case "radians": + functionStr = radians((SQLExpr) paramers.get(0).value, name); + break; + + case "trim": + functionStr = trim((SQLExpr) paramers.get(0).value, name); + break; + + case "add": + functionStr = add((SQLExpr) paramers.get(0).value, (SQLExpr) paramers.get(1).value); + break; + + case "subtract": + functionStr = subtract((SQLExpr) paramers.get(0).value, (SQLExpr) paramers.get(1).value); + break; + case "divide": + functionStr = divide((SQLExpr) paramers.get(0).value, (SQLExpr) paramers.get(1).value); + break; + + case "multiply": + functionStr = multiply((SQLExpr) paramers.get(0).value, (SQLExpr) paramers.get(1).value); + break; + case "modulus": + functionStr = modulus((SQLExpr) paramers.get(0).value, (SQLExpr) paramers.get(1).value); + break; + + case "field": + functionStr = field(Util.expr2Object((SQLExpr) paramers.get(0).value).toString()); + break; + + case "log2": + functionStr = log(SQLUtils.toSQLExpr("2"), (SQLExpr) paramers.get(0).value, name); + break; + case "log10": + functionStr = log10((SQLExpr) paramers.get(0).value); + break; + case "log": + if (paramers.size() > 1) { + functionStr = log((SQLExpr) paramers.get(0).value, (SQLExpr) paramers.get(1).value, name); } else { - return new Tuple<>(name, getPropertyOrValue(field) + "; " - + def(name, func(methodName, false, valueName))); + functionStr = ln((SQLExpr) paramers.get(0).value); } - - } - - private Tuple mathConstantTemplate(String methodName, String fieldName) { - String name = nextId(fieldName); - return new Tuple<>(name, def(name, methodName)); - } - - private Tuple strSingleValueTemplate(String methodName, SQLExpr field, String valueName) { - String name = nextId(methodName); - if (valueName == null) { - return new Tuple<>(name, def(name, getPropertyOrStringValue(field) + "." + func(methodName, false))); - } else { - return new Tuple<>(name, getPropertyOrStringValue(field) + "; " - + def(name, valueName + "." + func(methodName, false))); + break; + case "ln": + functionStr = ln((SQLExpr) paramers.get(0).value); + break; + case "assign": + functionStr = assign((SQLExpr) paramers.get(0).value); + break; + case "length": + functionStr = length((SQLExpr) paramers.get(0).value); + break; + case "replace": + functionStr = + replace( + (SQLExpr) paramers.get(0).value, + paramers.get(1).value.toString(), + paramers.get(2).value.toString()); + break; + case "locate": + int start = 0; + if (paramers.size() > 2) { + start = Integer.parseInt(paramers.get(2).value.toString()); } - - } - - // query: substring(Column expr, int pos, int len) - // painless script: substring(int begin, int end) - // OpenSearch behavior: 1-index, supports out-of-bound index - public Tuple substring(SQLExpr field, int pos, int len) { - String name = nextId("substring"); - // start and end are 0-indexes - int start = pos < 1 ? 0 : pos - 1; - return new Tuple<>(name, StringUtils.format( - "def end = (int) Math.min(%s + %s, %s.length()); " - + def(name, getPropertyOrStringValue(field) + "." - + func("substring", false, Integer.toString(start), "end")), - Integer.toString(start), Integer.toString(len), getPropertyOrStringValue(field) - )); - } - - private String lower(String property, String culture) { - return property + ".toLowerCase(Locale.forLanguageTag(\"" + culture + "\"))"; - } - - private String upper(String property, String culture) { - return property + ".toUpperCase(Locale.forLanguageTag(\"" + culture + "\"))"; - } - - private Tuple length(SQLExpr field) { - String name = nextId("length"); - return new Tuple<>(name, def(name, getPropertyOrStringValue(field) + ".length()")); - } - - private Tuple replace(SQLExpr field, String target, String replacement) { - String name = nextId("replace"); - return new Tuple<>(name, def(name, getPropertyOrStringValue(field) - + ".replace(" + target + "," + replacement + ")")); - } - - // OpenSearch behavior: both 'start' and return value are 1-index; return 0 if pattern does not exist; - // support out-of-bound index - private Tuple locate(String pattern, SQLExpr source, int start) { - String name = nextId("locate"); - String docSource = getPropertyOrStringValue(source); - start = start < 1 ? 0 : start - 1; - return new Tuple<>(name, def(name, StringUtils.format("%s.indexOf(%s,%d)+1", docSource, pattern, start))); - } - - private Tuple rtrim(SQLExpr field) { - String name = nextId("rtrim"); - String fieldString = getPropertyOrStringValue(field); - return new Tuple<>(name, StringUtils.format( - "int pos=%s.length()-1;" + functionStr = + locate(paramers.get(0).value.toString(), (SQLExpr) paramers.get(1).value, start); + break; + case "rtrim": + functionStr = rtrim((SQLExpr) paramers.get(0).value); + break; + case "ltrim": + functionStr = ltrim((SQLExpr) paramers.get(0).value); + break; + case "ascii": + functionStr = ascii((SQLExpr) paramers.get(0).value); + break; + case "left": + functionStr = left((SQLExpr) paramers.get(0).value, (SQLExpr) paramers.get(1).value); + break; + case "right": + functionStr = right((SQLExpr) paramers.get(0).value, (SQLExpr) paramers.get(1).value); + break; + + case "if": + functionStr = ifFunc(paramers); + break; + case "ifnull": + functionStr = ifnull((SQLExpr) paramers.get(0).value, (SQLExpr) paramers.get(1).value); + break; + case "isnull": + functionStr = isnull((SQLExpr) paramers.get(0).value); + break; + + default: + } + if (returnValue) { + String generatedFieldName = functionStr.v1(); + String returnCommand = ";return " + generatedFieldName + ";"; + String newScript = functionStr.v2() + returnCommand; + functionStr = new Tuple<>(generatedFieldName, newScript); + } + return functionStr; + } + + public String getLocaleForCaseChangingFunction(List paramers) { + String locale; + if (paramers.size() == 1) { + locale = Locale.getDefault().getLanguage(); + } else { + locale = Util.expr2Object((SQLExpr) paramers.get(1).value).toString(); + } + return locale; + } + + public Tuple cast(String castType, List paramers) + throws SqlParseException { + String name = nextId("cast"); + return new Tuple<>(name, getCastScriptStatement(name, castType, paramers)); + } + + public Tuple upper(SQLExpr field, String locale, String valueName) { + String name = nextId("upper"); + + if (valueName == null) { + return new Tuple<>(name, def(name, upper(getPropertyOrStringValue(field), locale))); + } else { + return new Tuple<>( + name, + getPropertyOrStringValue(field) + + "; " + + def(name, valueName + "." + upper(getPropertyOrStringValue(field), locale))); + } + } + + public Tuple lower(SQLExpr field, String locale, String valueName) { + String name = nextId("lower"); + + if (valueName == null) { + return new Tuple<>(name, def(name, lower(getPropertyOrStringValue(field), locale))); + } else { + return new Tuple<>( + name, + getPropertyOrStringValue(field) + + "; " + + def(name, valueName + "." + lower(getPropertyOrStringValue(field), locale))); + } + } + + private static String def(String name, String value) { + return "def " + name + " = " + value; + } + + private static String doc(SQLExpr field) { + return "doc['" + exprString(field) + "']"; + } + + private static String doc(String field) { + return "doc['" + field + "']"; + } + + private static String exprString(SQLExpr expr) { + return Util.expr2Object(expr).toString(); + } + + private static String func(String methodName, boolean quotes, String... params) { + if (quotes) { + return methodName + "(" + quoteParams(params) + ")"; + } + + return methodName + "(" + String.join(", ", params) + ")"; + } + + /** Helper method to surround each param with '' (single quotes) for painless script */ + private static String quoteParams(String... params) { + return Stream.of(params).collect(Collectors.joining("', '", "'", "'")); + } + + private Tuple concat_ws(String split, List columns) { + String name = nextId("concat_ws"); + List result = Lists.newArrayList(); + + for (SQLExpr column : columns) { + String strColumn = exprString(column); + if (strColumn.startsWith("def ")) { + result.add(strColumn); + } else if (isProperty(column)) { + result.add("doc['" + strColumn + "'].value"); + } else { + result.add("'" + strColumn + "'"); + } + } + return new Tuple<>(name, def(name, Joiner.on("+ " + split + " +").join(result))); + } + + // split(Column expr, java.lang.String pattern) + public Tuple split(SQLExpr field, String pattern, int index, String valueName) { + String name = nextId("split"); + final String script; + if (valueName == null) { + script = + def( + name, + getPropertyOrValue(field) + "." + func("split", true, pattern) + "[" + index + "]"); + } else { + script = "; " + def(name, valueName + "." + func("split", true, pattern) + "[" + index + "]"); + } + return new Tuple<>(name, script); + } + + // split(Column expr, java.lang.String pattern) + public Tuple split(SQLExpr field, String pattern, String valueName) { + String name = nextId("split"); + if (valueName == null) { + return new Tuple<>( + name, def(name, getPropertyOrValue(field) + "." + func("split", true, pattern))); + } else { + return new Tuple<>( + name, + getPropertyOrValue(field) + + "; " + + def(name, valueName + "." + func("split", true, pattern))); + } + } + + private Tuple date_format( + SQLExpr field, String pattern, String zoneId, String valueName) { + String name = nextId("date_format"); + if (valueName == null) { + return new Tuple<>( + name, + "def " + + name + + " = DateTimeFormatter.ofPattern('" + + pattern + + "').withZone(" + + (zoneId != null ? "ZoneId.of('" + zoneId + "')" : "ZoneId.of(\"UTC\")") + + ").format(Instant.ofEpochMilli(" + + getPropertyOrValue(field) + + ".toInstant().toEpochMilli()))"); + } else { + return new Tuple<>( + name, + exprString(field) + + "; " + + "def " + + name + + " = new SimpleDateFormat('" + + pattern + + "').format(" + + "new Date(" + + valueName + + " - 8*1000*60*60))"); + } + } + + /** + * Explicitly pass in name used to generate variable ID because methodName is not always valid + * + *

For example, + * functionStr = dateFunctionTemplate("weekOfWeekyear", + * "get(WeekFields.ISO.weekOfWeekBasedYear())", + * (SQLExpr) paramers.get(0).value); + * The old dateFunctionTemplate(methodName, field) passes string + * "get(WeekFields.ISO.weekOfWeekBasedYear())" to nextId() which generates an invalid variable + * name in painless script. + */ + private Tuple dateFunctionTemplate( + String name, String methodName, SQLExpr field) { + String id = nextId(name); + return new Tuple<>(id, def(id, doc(field) + ".value." + methodName)); + } + + private Tuple dateFunctionTemplate(String methodName, SQLExpr field) { + return dateFunctionTemplate(methodName, methodName, field); + } + + public Tuple add(SQLExpr a, SQLExpr b) { + return binaryOpertator("add", "+", a, b); + } + + public Tuple assign(SQLExpr a) { + String name = nextId("assign"); + return new Tuple<>(name, def(name, extractName(a))); + } + + private Tuple modulus(SQLExpr a, SQLExpr b) { + return binaryOpertator("modulus", "%", a, b); + } + + public Tuple field(String a) { + String name = nextId("field"); + return new Tuple<>(name, def(name, doc(a) + ".value")); + } + + private Tuple subtract(SQLExpr a, SQLExpr b) { + return binaryOpertator("subtract", "-", a, b); + } + + private Tuple multiply(SQLExpr a, SQLExpr b) { + return binaryOpertator("multiply", "*", a, b); + } + + private Tuple divide(SQLExpr a, SQLExpr b) { + return binaryOpertator("divide", "/", a, b); + } + + private Tuple binaryOpertator( + String methodName, String operator, SQLExpr a, SQLExpr b) { + String name = nextId(methodName); + return new Tuple<>( + name, + scriptDeclare(a) + + scriptDeclare(b) + + convertType(a) + + convertType(b) + + def(name, extractName(a) + " " + operator + " " + extractName(b))); + } + + private static boolean isProperty(SQLExpr expr) { + return (expr instanceof SQLIdentifierExpr + || expr instanceof SQLPropertyExpr + || expr instanceof SQLVariantRefExpr); + } + + private static String getPropertyOrValue(SQLExpr expr) { + if (isProperty(expr)) { + return doc(expr) + ".value"; + } else { + return exprString(expr); + } + } + + private static String getPropertyOrValue(String expr) { + if (isQuoted(expr, "'")) { + return expr; + } else if (StringUtils.isNumeric(expr)) { + return expr; + } else { + return doc(expr) + ".value"; + } + } + + private static String getPropertyOrStringValue(SQLExpr expr) { + if (isProperty(expr)) { + return doc(expr) + ".value"; + } else { + return "'" + exprString(expr) + "'"; + } + } + + private static String scriptDeclare(SQLExpr a) { + + if (isProperty(a) || a instanceof SQLNumericLiteralExpr) { + return ""; + } else { + return exprString(a) + ";"; + } + } + + private static String extractName(SQLExpr script) { + if (isProperty(script)) { + return doc(script) + ".value"; + } + String scriptStr = exprString(script); + String[] variance = scriptStr.split(";"); + String newScript = variance[variance.length - 1]; + if (newScript.trim().startsWith("def ")) { + // for now ,if variant is string,then change to double. + return newScript.trim().substring(4).split("=")[0].trim(); + } else { + return scriptStr; + } + } + + // cast(year as int) + + private static String convertType(SQLExpr script) { + String[] variance = exprString(script).split(";"); + String newScript = variance[variance.length - 1]; + if (newScript.trim().startsWith("def ")) { + // for now ,if variant is string,then change to double. + String temp = newScript.trim().substring(4).split("=")[0].trim(); + + return " if( " + + temp + + " instanceof String) " + + temp + + "= Double.parseDouble(" + + temp.trim() + + "); "; + } else { + return ""; + } + } + + private String getScriptText(MethodField field) { + String content = ((SQLTextLiteralExpr) field.getParams().get(1).value).getText(); + return content; + } + + /** + * Using exprString() rather than getPropertyOrValue() for "base" since something like "Math.E" + * gets evaluated incorrectly in getPropertyOrValue(), returning it as a doc value instead of the + * literal string + */ + public Tuple log(SQLExpr base, SQLExpr field, String valueName) { + String name = nextId("log"); + String result; + if (valueName == null) { + result = + def( + name, + func("Math.log", false, getPropertyOrValue(field)) + + "/" + + func("Math.log", false, exprString(base))); + } else { + result = + getPropertyOrValue(field) + + "; " + + def( + name, + func("Math.log", false, valueName) + + "/" + + func("Math.log", false, exprString(base))); + } + return new Tuple<>(name, result); + } + + public Tuple log10(SQLExpr field) { + String name = nextId("log10"); + return new Tuple<>( + name, def(name, StringUtils.format("Math.log10(%s)", getPropertyOrValue(field)))); + } + + public Tuple ln(SQLExpr field) { + String name = nextId("ln"); + return new Tuple<>( + name, def(name, StringUtils.format("Math.log(%s)", getPropertyOrValue(field)))); + } + + public Tuple trim(SQLExpr field, String valueName) { + return strSingleValueTemplate("trim", field, valueName); + } + + private Tuple degrees(SQLExpr field, String valueName) { + return mathSingleValueTemplate("Math.toDegrees", "degrees", field, valueName); + } + + private Tuple radians(SQLExpr field, String valueName) { + return mathSingleValueTemplate("Math.toRadians", "radians", field, valueName); + } + + private Tuple rand(SQLExpr expr) { + String name = nextId("rand"); + return new Tuple<>( + name, def(name, format("new Random(%s).nextDouble()", getPropertyOrValue(expr)))); + } + + private Tuple rand() { + String name = nextId("rand"); + return new Tuple<>(name, def(name, "new Random().nextDouble()")); + } + + private Tuple mathDoubleValueTemplate( + String methodName, String fieldName, SQLExpr val1, String val2, String valueName) { + String name = nextId(fieldName); + if (valueName == null) { + return new Tuple<>( + name, + def(name, func(methodName, false, getPropertyOrValue(val1), getPropertyOrValue(val2)))); + } else { + return new Tuple<>( + name, + getPropertyOrValue(val1) + + "; " + + def(name, func(methodName, false, valueName, getPropertyOrValue(val2)))); + } + } + + private Tuple mathDoubleValueTemplate( + String methodName, String fieldName, SQLExpr val1, SQLExpr val2) { + String name = nextId(fieldName); + return new Tuple<>( + name, + def(name, func(methodName, false, getPropertyOrValue(val1), getPropertyOrValue(val2)))); + } + + private Tuple mathSingleValueTemplate( + String methodName, String fieldName, SQLExpr field, String valueName) { + String name = nextId(fieldName); + if (valueName == null) { + return new Tuple<>(name, def(name, func(methodName, false, getPropertyOrValue(field)))); + } else { + return new Tuple<>( + name, getPropertyOrValue(field) + "; " + def(name, func(methodName, false, valueName))); + } + } + + private Tuple mathConstantTemplate(String methodName, String fieldName) { + String name = nextId(fieldName); + return new Tuple<>(name, def(name, methodName)); + } + + private Tuple strSingleValueTemplate( + String methodName, SQLExpr field, String valueName) { + String name = nextId(methodName); + if (valueName == null) { + return new Tuple<>( + name, def(name, getPropertyOrStringValue(field) + "." + func(methodName, false))); + } else { + return new Tuple<>( + name, + getPropertyOrStringValue(field) + + "; " + + def(name, valueName + "." + func(methodName, false))); + } + } + + // query: substring(Column expr, int pos, int len) + // painless script: substring(int begin, int end) + // OpenSearch behavior: 1-index, supports out-of-bound index + public Tuple substring(SQLExpr field, int pos, int len) { + String name = nextId("substring"); + // start and end are 0-indexes + int start = pos < 1 ? 0 : pos - 1; + return new Tuple<>( + name, + StringUtils.format( + "def end = (int) Math.min(%s + %s, %s.length()); " + + def( + name, + getPropertyOrStringValue(field) + + "." + + func("substring", false, Integer.toString(start), "end")), + Integer.toString(start), + Integer.toString(len), + getPropertyOrStringValue(field))); + } + + private String lower(String property, String culture) { + return property + ".toLowerCase(Locale.forLanguageTag(\"" + culture + "\"))"; + } + + private String upper(String property, String culture) { + return property + ".toUpperCase(Locale.forLanguageTag(\"" + culture + "\"))"; + } + + private Tuple length(SQLExpr field) { + String name = nextId("length"); + return new Tuple<>(name, def(name, getPropertyOrStringValue(field) + ".length()")); + } + + private Tuple replace(SQLExpr field, String target, String replacement) { + String name = nextId("replace"); + return new Tuple<>( + name, + def( + name, + getPropertyOrStringValue(field) + ".replace(" + target + "," + replacement + ")")); + } + + // OpenSearch behavior: both 'start' and return value are 1-index; return 0 if pattern does not + // exist; + // support out-of-bound index + private Tuple locate(String pattern, SQLExpr source, int start) { + String name = nextId("locate"); + String docSource = getPropertyOrStringValue(source); + start = start < 1 ? 0 : start - 1; + return new Tuple<>( + name, def(name, StringUtils.format("%s.indexOf(%s,%d)+1", docSource, pattern, start))); + } + + private Tuple rtrim(SQLExpr field) { + String name = nextId("rtrim"); + String fieldString = getPropertyOrStringValue(field); + return new Tuple<>( + name, + StringUtils.format( + "int pos=%s.length()-1;" + "while(pos >= 0 && Character.isWhitespace(%s.charAt(pos))) {pos --;} " + def(name, "%s.substring(0, pos+1)"), - fieldString, fieldString, fieldString - )); - } - - private Tuple ltrim(SQLExpr field) { - String name = nextId("ltrim"); - String fieldString = getPropertyOrStringValue(field); - return new Tuple<>(name, StringUtils.format( - "int pos=0;" + fieldString, + fieldString, + fieldString)); + } + + private Tuple ltrim(SQLExpr field) { + String name = nextId("ltrim"); + String fieldString = getPropertyOrStringValue(field); + return new Tuple<>( + name, + StringUtils.format( + "int pos=0;" + "while(pos < %s.length() && Character.isWhitespace(%s.charAt(pos))) {pos ++;} " + def(name, "%s.substring(pos, %s.length())"), - fieldString, fieldString, fieldString, fieldString - )); - } - - private Tuple ascii(SQLExpr field) { - String name = nextId("ascii"); - return new Tuple<>(name, def(name, "(int) " + getPropertyOrStringValue(field) + ".charAt(0)")); - } - - private Tuple left(SQLExpr expr, SQLExpr length) { - String name = nextId("left"); - return new Tuple<>(name, StringUtils.format( - "def len = (int) Math.min(%s, %s.length()); def %s = %s.substring(0, len)", - exprString(length), getPropertyOrStringValue(expr), name, getPropertyOrStringValue(expr))); - } - - private Tuple right(SQLExpr expr, SQLExpr length) { - String name = nextId("right"); - return new Tuple<>(name, StringUtils.format( - "def start = (int) Math.max(0, %s.length()-%s); def %s = %s.substring(start)", - getPropertyOrStringValue(expr), exprString(length), name, getPropertyOrStringValue(expr))); - } - - private Tuple date(SQLExpr field) { - String name = nextId("date"); - return new Tuple<>(name, def(name, - "LocalDate.parse(" + getPropertyOrStringValue(field) + ".toString()," - + "DateTimeFormatter.ISO_DATE_TIME)")); - } - - private Tuple timestamp(SQLExpr field) { - String name = nextId("timestamp"); - return new Tuple<>(name, def(name, - "DateTimeFormatter.ofPattern('yyyy-MM-dd HH:mm:ss').format(" - + "DateTimeFormatter.ISO_DATE_TIME.parse(" - + getPropertyOrStringValue(field) + ".toString()))")); - } - - private Tuple maketime(SQLExpr hr, SQLExpr min, SQLExpr sec) { - String name = nextId("maketime"); - return new Tuple<>(name, def(name, StringUtils.format( + fieldString, + fieldString, + fieldString, + fieldString)); + } + + private Tuple ascii(SQLExpr field) { + String name = nextId("ascii"); + return new Tuple<>(name, def(name, "(int) " + getPropertyOrStringValue(field) + ".charAt(0)")); + } + + private Tuple left(SQLExpr expr, SQLExpr length) { + String name = nextId("left"); + return new Tuple<>( + name, + StringUtils.format( + "def len = (int) Math.min(%s, %s.length()); def %s = %s.substring(0, len)", + exprString(length), + getPropertyOrStringValue(expr), + name, + getPropertyOrStringValue(expr))); + } + + private Tuple right(SQLExpr expr, SQLExpr length) { + String name = nextId("right"); + return new Tuple<>( + name, + StringUtils.format( + "def start = (int) Math.max(0, %s.length()-%s); def %s = %s.substring(start)", + getPropertyOrStringValue(expr), + exprString(length), + name, + getPropertyOrStringValue(expr))); + } + + private Tuple date(SQLExpr field) { + String name = nextId("date"); + return new Tuple<>( + name, + def( + name, + "LocalDate.parse(" + + getPropertyOrStringValue(field) + + ".toString()," + + "DateTimeFormatter.ISO_DATE_TIME)")); + } + + private Tuple timestamp(SQLExpr field) { + String name = nextId("timestamp"); + return new Tuple<>( + name, + def( + name, + "DateTimeFormatter.ofPattern('yyyy-MM-dd HH:mm:ss').format(" + + "DateTimeFormatter.ISO_DATE_TIME.parse(" + + getPropertyOrStringValue(field) + + ".toString()))")); + } + + private Tuple maketime(SQLExpr hr, SQLExpr min, SQLExpr sec) { + String name = nextId("maketime"); + return new Tuple<>( + name, + def( + name, + StringUtils.format( "LocalTime.of(%s, %s, %s).format(DateTimeFormatter.ofPattern('HH:mm:ss'))", hr.toString(), min.toString(), sec.toString()))); - } - - private Tuple now() { - String name = nextId("now"); - return new Tuple<>(name, def(name, "new SimpleDateFormat('HH:mm:ss').format(System.currentTimeMillis())")); - } - - private Tuple curdate() { - String name = nextId("curdate"); - return new Tuple<>(name, def(name, "new SimpleDateFormat('yyyy-MM-dd').format(System.currentTimeMillis())")); - } - - private Tuple ifFunc(List paramers) { - String expr1 = paramers.get(1).value.toString(); - String expr2 = paramers.get(2).value.toString(); - String name = nextId("if"); - - /** Input with null is regarded as false */ - if (paramers.get(0).value instanceof SQLNullExpr) { - return new Tuple<>(name, def(name, expr2)); - } - if (paramers.get(0).value instanceof MethodField) { - String condition = getScriptText((MethodField) paramers.get(0).value); - return new Tuple<>(name, "boolean cond = " + condition + ";" - + def(name, "cond ? " + expr1 + " : " + expr2)); - } else if (paramers.get(0).value instanceof SQLBooleanExpr) { - Boolean condition = ((SQLBooleanExpr) paramers.get(0).value).getValue(); - if (condition) { - return new Tuple<>(name, def(name, expr1)); - } else { - return new Tuple<>(name, def(name, expr2)); - } - } else { - /** - * Detailed explanation of cases that come here: - * the condition expression would be in the format of a=b: - * a is parsed as the key (String) of a KVValue (get from paramers.get(0)) - * and b is parsed as the value (Object) of this KVValue. - * - * Either a or b could be a column name, literal, or a number: - * - if isNumeric is true --> number - * - else if this string is single quoted --> literal - * - else --> column name - */ - String key = getPropertyOrValue(paramers.get(0).key); - String value = getPropertyOrValue(paramers.get(0).value.toString()); - String condition = key + " == " + value; - return new Tuple<>(name, "boolean cond = " + condition + ";" - + def(name, "cond ? " + expr1 + " : " + expr2)); - } - } - - private Tuple ifnull(SQLExpr condition, SQLExpr expr) { - String name = nextId("ifnull"); - if (condition instanceof SQLNullExpr) { - return new Tuple<>(name, def(name, expr.toString())); - } - if (isProperty(condition)) { - return new Tuple<>(name, def(name, doc(condition) + ".size()==0 ? " + expr.toString() + " : " - + getPropertyOrValue(condition))); - } else { - String condStr = Strings.isNullOrEmpty(condition.toString()) ? null : getPropertyOrStringValue(condition); - return new Tuple<>(name, def(name, condStr)); - } - } - - private Tuple isnull(SQLExpr expr) { - String name = nextId("isnull"); - if (expr instanceof SQLNullExpr) { - return new Tuple<>(name, def(name, "1")); - } - if (isProperty(expr)) { - return new Tuple<>(name, def(name, doc(expr) + ".size()==0 ? 1 : 0")); - } - // cases that return 1: - // expr is null || expr is math func but tends to throw "divided by zero" arithmetic exception - String resultStr = "0"; - if (Strings.isNullOrEmpty(expr.toString())) { - resultStr = "1"; - } - if (expr instanceof SQLCharExpr && this.generatedIds.size() > 1) { - // the expr is a math expression - String mathExpr = ((SQLCharExpr) expr).getText(); - return new Tuple<>(name, StringUtils.format( - "try {%s;} " - + "catch(ArithmeticException e) " - + "{return 1;} " - + "def %s=0", - mathExpr, name, name) - ); - } - return new Tuple<>(name, def(name, resultStr)); - } - - public String getCastScriptStatement(String name, String castType, List paramers) - throws SqlParseException { - String castFieldName = String.format("doc['%s'].value", paramers.get(0).toString()); - switch (StringUtils.toUpper(castType)) { - case "INT": - case "LONG": - case "FLOAT": - case "DOUBLE": - return getCastToNumericValueScript(name, castFieldName, StringUtils.toLower(castType)); - case "STRING": - return String.format("def %s = %s.toString()", name, castFieldName); - case "DATETIME": - return String.format("def %s = DateTimeFormatter.ofPattern(\"yyyy-MM-dd'T'HH:mm:ss.SSS'Z'\").format(" - + "DateTimeFormatter.ISO_DATE_TIME.parse(%s.toString()))", name, castFieldName); - default: - throw new SqlParseException("Unsupported cast type " + castType); - } - } - - private String getCastToNumericValueScript(String varName, String docValue, String targetType) { - String script = - "def %1$s = (%2$s instanceof boolean) " - + "? (%2$s ? 1 : 0) " - + ": Double.parseDouble(%2$s.toString()).%3$sValue()"; - return StringUtils.format(script, varName, docValue, targetType); - } - - /** - * Returns return type of script function. This is simple approach, that might be not the best solution in the long - * term. For example - for JDBC, if the column type in index is INTEGER, and the query is "select column+5", current - * approach will return type of result column as DOUBLE, although there is enough information to understand that - * it might be safely treated as INTEGER. - */ - public static Schema.Type getScriptFunctionReturnType(MethodField field, Schema.Type resolvedType) { - String functionName = ((ScriptMethodField) field).getFunctionName().toLowerCase(); - if (functionName.equals("cast")) { - String castType = ((SQLCastExpr) field.getExpression()).getDataType().getName(); - return getCastFunctionReturnType(castType); - } - return resolvedType; - } - - public static Schema.Type getCastFunctionReturnType(String castType) { - switch (StringUtils.toUpper(castType)) { - case "FLOAT": - return Schema.Type.FLOAT; - case "DOUBLE": - return Schema.Type.DOUBLE; - case "INT": - return Schema.Type.INTEGER; - case "STRING": - return Schema.Type.TEXT; - case "DATETIME": - return Schema.Type.DATE; - case "LONG": - return Schema.Type.LONG; - default: - throw new UnsupportedOperationException( - StringUtils.format("The following type is not supported by cast(): %s", castType) - ); - } - } - - /** - * - * @param field - * @return Schema.Type.TEXT or DOUBLE - * There are only two ORDER BY types (TEXT, NUMBER) in OpenSearch, so the Type that is returned here essentially - * indicates the category of the function as opposed to the actual return type. - */ - public static Schema.Type getOrderByFieldType(Field field) { - String functionName = ((ScriptMethodField) field).getFunctionName().toLowerCase(); - if (functionName.equals("cast")) { - String castType = ((SQLCastExpr) field.getExpression()).getDataType().getName(); - return getCastFunctionReturnType(castType); - } - - if (numberOperators.contains(functionName) || mathConstants.contains(functionName) - || trigFunctions.contains(functionName) || binaryOperators.contains(functionName)) { - return Schema.Type.DOUBLE; - } else if (dateFunctions.contains(functionName)) { - if (functionName.equals("date_format") || functionName.equals("now") - || functionName.equals("curdate") || functionName.equals("date") - || functionName.equals("timestamp") || functionName.equals("monthname")) { - return Schema.Type.TEXT; - } - return Schema.Type.DOUBLE; - } else if (stringFunctions.contains(functionName) || stringOperators.contains(functionName)) { - return Schema.Type.TEXT; - } - + } + + private Tuple now() { + String name = nextId("now"); + return new Tuple<>( + name, def(name, "new SimpleDateFormat('HH:mm:ss').format(System.currentTimeMillis())")); + } + + private Tuple curdate() { + String name = nextId("curdate"); + return new Tuple<>( + name, def(name, "new SimpleDateFormat('yyyy-MM-dd').format(System.currentTimeMillis())")); + } + + private Tuple ifFunc(List paramers) { + String expr1 = paramers.get(1).value.toString(); + String expr2 = paramers.get(2).value.toString(); + String name = nextId("if"); + + /** Input with null is regarded as false */ + if (paramers.get(0).value instanceof SQLNullExpr) { + return new Tuple<>(name, def(name, expr2)); + } + if (paramers.get(0).value instanceof MethodField) { + String condition = getScriptText((MethodField) paramers.get(0).value); + return new Tuple<>( + name, "boolean cond = " + condition + ";" + def(name, "cond ? " + expr1 + " : " + expr2)); + } else if (paramers.get(0).value instanceof SQLBooleanExpr) { + Boolean condition = ((SQLBooleanExpr) paramers.get(0).value).getValue(); + if (condition) { + return new Tuple<>(name, def(name, expr1)); + } else { + return new Tuple<>(name, def(name, expr2)); + } + } else { + /** + * Detailed explanation of cases that come here: the condition expression would be in the + * format of a=b: a is parsed as the key (String) of a KVValue (get from paramers.get(0)) and + * b is parsed as the value (Object) of this KVValue. + * + *

Either a or b could be a column name, literal, or a number: - if isNumeric is true --> + * number - else if this string is single quoted --> literal - else --> column name + */ + String key = getPropertyOrValue(paramers.get(0).key); + String value = getPropertyOrValue(paramers.get(0).value.toString()); + String condition = key + " == " + value; + return new Tuple<>( + name, "boolean cond = " + condition + ";" + def(name, "cond ? " + expr1 + " : " + expr2)); + } + } + + private Tuple ifnull(SQLExpr condition, SQLExpr expr) { + String name = nextId("ifnull"); + if (condition instanceof SQLNullExpr) { + return new Tuple<>(name, def(name, expr.toString())); + } + if (isProperty(condition)) { + return new Tuple<>( + name, + def( + name, + doc(condition) + + ".size()==0 ? " + + expr.toString() + + " : " + + getPropertyOrValue(condition))); + } else { + String condStr = + Strings.isNullOrEmpty(condition.toString()) ? null : getPropertyOrStringValue(condition); + return new Tuple<>(name, def(name, condStr)); + } + } + + private Tuple isnull(SQLExpr expr) { + String name = nextId("isnull"); + if (expr instanceof SQLNullExpr) { + return new Tuple<>(name, def(name, "1")); + } + if (isProperty(expr)) { + return new Tuple<>(name, def(name, doc(expr) + ".size()==0 ? 1 : 0")); + } + // cases that return 1: + // expr is null || expr is math func but tends to throw "divided by zero" arithmetic exception + String resultStr = "0"; + if (Strings.isNullOrEmpty(expr.toString())) { + resultStr = "1"; + } + if (expr instanceof SQLCharExpr && this.generatedIds.size() > 1) { + // the expr is a math expression + String mathExpr = ((SQLCharExpr) expr).getText(); + return new Tuple<>( + name, + StringUtils.format( + "try {%s;} " + "catch(ArithmeticException e) " + "{return 1;} " + "def %s=0", + mathExpr, name, name)); + } + return new Tuple<>(name, def(name, resultStr)); + } + + public String getCastScriptStatement(String name, String castType, List paramers) + throws SqlParseException { + String castFieldName = String.format("doc['%s'].value", paramers.get(0).toString()); + switch (StringUtils.toUpper(castType)) { + case "INT": + case "LONG": + case "FLOAT": + case "DOUBLE": + return getCastToNumericValueScript(name, castFieldName, StringUtils.toLower(castType)); + case "STRING": + return String.format("def %s = %s.toString()", name, castFieldName); + case "DATETIME": + return String.format( + "def %s = DateTimeFormatter.ofPattern(\"yyyy-MM-dd'T'HH:mm:ss.SSS'Z'\").format(" + + "DateTimeFormatter.ISO_DATE_TIME.parse(%s.toString()))", + name, castFieldName); + default: + throw new SqlParseException("Unsupported cast type " + castType); + } + } + + private String getCastToNumericValueScript(String varName, String docValue, String targetType) { + String script = + "def %1$s = (%2$s instanceof boolean) " + + "? (%2$s ? 1 : 0) " + + ": Double.parseDouble(%2$s.toString()).%3$sValue()"; + return StringUtils.format(script, varName, docValue, targetType); + } + + /** + * Returns return type of script function. This is simple approach, that might be not the best + * solution in the long term. For example - for JDBC, if the column type in index is INTEGER, and + * the query is "select column+5", current approach will return type of result column as DOUBLE, + * although there is enough information to understand that it might be safely treated as INTEGER. + */ + public static Schema.Type getScriptFunctionReturnType( + MethodField field, Schema.Type resolvedType) { + String functionName = ((ScriptMethodField) field).getFunctionName().toLowerCase(); + if (functionName.equals("cast")) { + String castType = ((SQLCastExpr) field.getExpression()).getDataType().getName(); + return getCastFunctionReturnType(castType); + } + return resolvedType; + } + + public static Schema.Type getCastFunctionReturnType(String castType) { + switch (StringUtils.toUpper(castType)) { + case "FLOAT": + return Schema.Type.FLOAT; + case "DOUBLE": + return Schema.Type.DOUBLE; + case "INT": + return Schema.Type.INTEGER; + case "STRING": + return Schema.Type.TEXT; + case "DATETIME": + return Schema.Type.DATE; + case "LONG": + return Schema.Type.LONG; + default: throw new UnsupportedOperationException( - String.format( - "The following method is not supported in Schema for Order By: %s", - functionName)); - } + StringUtils.format("The following type is not supported by cast(): %s", castType)); + } + } + + /** + * @param field + * @return Schema.Type.TEXT or DOUBLE There are only two ORDER BY types (TEXT, NUMBER) in + * OpenSearch, so the Type that is returned here essentially indicates the category of the + * function as opposed to the actual return type. + */ + public static Schema.Type getOrderByFieldType(Field field) { + String functionName = ((ScriptMethodField) field).getFunctionName().toLowerCase(); + if (functionName.equals("cast")) { + String castType = ((SQLCastExpr) field.getExpression()).getDataType().getName(); + return getCastFunctionReturnType(castType); + } + + if (numberOperators.contains(functionName) + || mathConstants.contains(functionName) + || trigFunctions.contains(functionName) + || binaryOperators.contains(functionName)) { + return Schema.Type.DOUBLE; + } else if (dateFunctions.contains(functionName)) { + if (functionName.equals("date_format") + || functionName.equals("now") + || functionName.equals("curdate") + || functionName.equals("date") + || functionName.equals("timestamp") + || functionName.equals("monthname")) { + return Schema.Type.TEXT; + } + return Schema.Type.DOUBLE; + } else if (stringFunctions.contains(functionName) || stringOperators.contains(functionName)) { + return Schema.Type.TEXT; + } + + throw new UnsupportedOperationException( + String.format( + "The following method is not supported in Schema for Order By: %s", functionName)); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/utils/StringUtils.java b/legacy/src/main/java/org/opensearch/sql/legacy/utils/StringUtils.java index 515d980db9..757ee49f6e 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/utils/StringUtils.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/utils/StringUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.utils; import com.google.common.base.Strings; @@ -17,106 +16,102 @@ */ public class StringUtils { - /** - * Returns a formatted string using the specified format string and - * arguments, as well as the {@link Locale#ROOT} locale. - * - * @param format format string - * @param args arguments referenced by the format specifiers in the format string - * @return A formatted string - * @throws java.util.IllegalFormatException If a format string contains an illegal syntax, a format - * specifier that is incompatible with the given arguments, - * insufficient arguments given the format string, or other - * illegal conditions. - * @see java.lang.String#format(Locale, String, Object...) - */ - public static String format(final String format, Object... args) { - return String.format(Locale.ROOT, format, args); - } + /** + * Returns a formatted string using the specified format string and arguments, as well as the + * {@link Locale#ROOT} locale. + * + * @param format format string + * @param args arguments referenced by the format specifiers in the format string + * @return A formatted string + * @throws java.util.IllegalFormatException If a format string contains an illegal syntax, a + * format specifier that is incompatible with the given arguments, insufficient arguments + * given the format string, or other illegal conditions. + * @see java.lang.String#format(Locale, String, Object...) + */ + public static String format(final String format, Object... args) { + return String.format(Locale.ROOT, format, args); + } - /** - * Converts all of the characters in this {@code String} to lower - * case using the rules of the {@link Locale#ROOT} locale. This is equivalent to calling - * {@link String#toLowerCase(Locale)} with {@link Locale#ROOT}. - * - * @param input the input String - * @return the {@code String}, converted to lowercase - * @see java.lang.String#toLowerCase(Locale) - */ - public static String toLower(final String input) { - return input.toLowerCase(Locale.ROOT); - } + /** + * Converts all of the characters in this {@code String} to lower case using the rules of the + * {@link Locale#ROOT} locale. This is equivalent to calling {@link String#toLowerCase(Locale)} + * with {@link Locale#ROOT}. + * + * @param input the input String + * @return the {@code String}, converted to lowercase + * @see java.lang.String#toLowerCase(Locale) + */ + public static String toLower(final String input) { + return input.toLowerCase(Locale.ROOT); + } - /** - * Converts all of the characters in this {@code String} to upper - * case using the rules of the {@link Locale#ROOT} locale. This is equivalent to calling - * {@link String#toUpperCase(Locale)} with {@link Locale#ROOT}. - * - * @param input the input String - * @return the {@code String}, converted to uppercase - * @see java.lang.String#toUpperCase(Locale) - */ - public static String toUpper(final String input) { - return input.toUpperCase(Locale.ROOT); - } + /** + * Converts all of the characters in this {@code String} to upper case using the rules of the + * {@link Locale#ROOT} locale. This is equivalent to calling {@link String#toUpperCase(Locale)} + * with {@link Locale#ROOT}. + * + * @param input the input String + * @return the {@code String}, converted to uppercase + * @see java.lang.String#toUpperCase(Locale) + */ + public static String toUpper(final String input) { + return input.toUpperCase(Locale.ROOT); + } - /** - * Count how many occurrences of character in this input {@code Sequence}. - * - * @param input the input string - * @param match char to be matched - * @return number of occurrences - */ - public static int countMatches(CharSequence input, char match) { - return Math.toIntExact(input.chars(). - filter(c -> c == match). - count()); - } + /** + * Count how many occurrences of character in this input {@code Sequence}. + * + * @param input the input string + * @param match char to be matched + * @return number of occurrences + */ + public static int countMatches(CharSequence input, char match) { + return Math.toIntExact(input.chars().filter(c -> c == match).count()); + } - /** - * - * @param text string - * @param quote - * @return An unquoted string whose outer pair of back-ticks (if any) has been removed - */ - public static String unquoteSingleField(String text, String quote) { - if (isQuoted(text, quote)) { - return text.substring(quote.length(), text.length() - quote.length()); - } - return text; + /** + * @param text string + * @param quote + * @return An unquoted string whose outer pair of back-ticks (if any) has been removed + */ + public static String unquoteSingleField(String text, String quote) { + if (isQuoted(text, quote)) { + return text.substring(quote.length(), text.length() - quote.length()); } + return text; + } - public static String unquoteSingleField(String text) { - return unquoteSingleField(text, "`"); - } + public static String unquoteSingleField(String text) { + return unquoteSingleField(text, "`"); + } - /** - * - * @param text - * @return A string whose each dot-seperated field has been unquoted from back-ticks (if any) - */ - public static String unquoteFullColumn(String text, String quote) { - String[] strs = text.split("\\."); - for (int i = 0; i < strs.length; i++) { - String unquotedSubstr = unquoteSingleField(strs[i], quote); - strs[i] = unquotedSubstr; - } - return String.join(".", strs); + /** + * @param text + * @return A string whose each dot-seperated field has been unquoted from back-ticks (if any) + */ + public static String unquoteFullColumn(String text, String quote) { + String[] strs = text.split("\\."); + for (int i = 0; i < strs.length; i++) { + String unquotedSubstr = unquoteSingleField(strs[i], quote); + strs[i] = unquotedSubstr; } + return String.join(".", strs); + } - public static String unquoteFullColumn(String text) { - return unquoteFullColumn(text, "`"); - } + public static String unquoteFullColumn(String text) { + return unquoteFullColumn(text, "`"); + } - public static boolean isQuoted(String text, String quote) { - return !Strings.isNullOrEmpty(text) && text.startsWith(quote) && text.endsWith(quote); - } + public static boolean isQuoted(String text, String quote) { + return !Strings.isNullOrEmpty(text) && text.startsWith(quote) && text.endsWith(quote); + } - public static boolean isNumeric(String text) { - return Doubles.tryParse(text) != null; - } + public static boolean isNumeric(String text) { + return Doubles.tryParse(text) != null; + } - private StringUtils() { - throw new AssertionError(getClass().getCanonicalName() + " is a utility class and must not be initialized"); - } + private StringUtils() { + throw new AssertionError( + getClass().getCanonicalName() + " is a utility class and must not be initialized"); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/utils/Util.java b/legacy/src/main/java/org/opensearch/sql/legacy/utils/Util.java index bd1b7f3865..632074bbbe 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/utils/Util.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/utils/Util.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.utils; import com.alibaba.druid.sql.ast.SQLExpr; @@ -38,231 +37,247 @@ import org.opensearch.sql.legacy.exception.SqlParseException; import org.opensearch.sql.legacy.parser.ElasticSqlExprParser; - public class Util { - public static final String NESTED_JOIN_TYPE = "NestedJoinType"; - - public static String joiner(List lists, String oper) { - - if (lists.size() == 0) { - return null; - } + public static final String NESTED_JOIN_TYPE = "NestedJoinType"; - StringBuilder sb = new StringBuilder(lists.get(0).toString()); - for (int i = 1; i < lists.size(); i++) { - sb.append(oper); - sb.append(lists.get(i).toString()); - } + public static String joiner(List lists, String oper) { - return sb.toString(); + if (lists.size() == 0) { + return null; } - public static Object removeTableAilasFromField(Object expr, String tableAlias) { - - if (expr instanceof SQLIdentifierExpr || expr instanceof SQLPropertyExpr || expr instanceof SQLVariantRefExpr) { - String name = expr.toString().replace("`", ""); - if (tableAlias != null) { - String aliasPrefix = tableAlias + "."; - if (name.startsWith(aliasPrefix)) { - String newFieldName = name.replaceFirst(aliasPrefix, ""); - return new SQLIdentifierExpr(newFieldName); - } - } - } - return expr; + StringBuilder sb = new StringBuilder(lists.get(0).toString()); + for (int i = 1; i < lists.size(); i++) { + sb.append(oper); + sb.append(lists.get(i).toString()); } + return sb.toString(); + } - public static Object expr2Object(SQLExpr expr) { - return expr2Object(expr, ""); - } + public static Object removeTableAilasFromField(Object expr, String tableAlias) { - public static Object expr2Object(SQLExpr expr, String charWithQuote) { - Object value = null; - if (expr instanceof SQLNumericLiteralExpr) { - value = ((SQLNumericLiteralExpr) expr).getNumber(); - } else if (expr instanceof SQLCharExpr) { - value = charWithQuote + ((SQLCharExpr) expr).getText() + charWithQuote; - } else if (expr instanceof SQLIdentifierExpr) { - value = expr.toString(); - } else if (expr instanceof SQLPropertyExpr) { - value = expr.toString(); - } else if (expr instanceof SQLVariantRefExpr) { - value = expr.toString(); - } else if (expr instanceof SQLAllColumnExpr) { - value = "*"; - } else if (expr instanceof SQLValuableExpr) { - value = ((SQLValuableExpr) expr).getValue(); - } else { - //throw new SqlParseException("can not support this type " + expr.getClass()); + if (expr instanceof SQLIdentifierExpr + || expr instanceof SQLPropertyExpr + || expr instanceof SQLVariantRefExpr) { + String name = expr.toString().replace("`", ""); + if (tableAlias != null) { + String aliasPrefix = tableAlias + "."; + if (name.startsWith(aliasPrefix)) { + String newFieldName = name.replaceFirst(aliasPrefix, ""); + return new SQLIdentifierExpr(newFieldName); } - return value; + } } - - public static Object getScriptValue(SQLExpr expr) throws SqlParseException { - if (expr instanceof SQLIdentifierExpr || expr instanceof SQLPropertyExpr || expr instanceof SQLVariantRefExpr) { - return "doc['" + expr.toString() + "'].value"; - } else if (expr instanceof SQLValuableExpr) { - return ((SQLValuableExpr) expr).getValue(); - } - throw new SqlParseException("could not parse sqlBinaryOpExpr need to be identifier/valuable got" - + expr.getClass().toString() + " with value:" + expr.toString()); + return expr; + } + + public static Object expr2Object(SQLExpr expr) { + return expr2Object(expr, ""); + } + + public static Object expr2Object(SQLExpr expr, String charWithQuote) { + Object value = null; + if (expr instanceof SQLNumericLiteralExpr) { + value = ((SQLNumericLiteralExpr) expr).getNumber(); + } else if (expr instanceof SQLCharExpr) { + value = charWithQuote + ((SQLCharExpr) expr).getText() + charWithQuote; + } else if (expr instanceof SQLIdentifierExpr) { + value = expr.toString(); + } else if (expr instanceof SQLPropertyExpr) { + value = expr.toString(); + } else if (expr instanceof SQLVariantRefExpr) { + value = expr.toString(); + } else if (expr instanceof SQLAllColumnExpr) { + value = "*"; + } else if (expr instanceof SQLValuableExpr) { + value = ((SQLValuableExpr) expr).getValue(); + } else { + // throw new SqlParseException("can not support this type " + expr.getClass()); } - - public static Object getScriptValueWithQuote(SQLExpr expr, String quote) throws SqlParseException { - if (expr instanceof SQLIdentifierExpr || expr instanceof SQLPropertyExpr || expr instanceof SQLVariantRefExpr) { - return "doc['" + expr.toString() + "'].value"; - } else if (expr instanceof SQLCharExpr) { - return quote + ((SQLCharExpr) expr).getValue() + quote; - } else if (expr instanceof SQLIntegerExpr) { - return ((SQLIntegerExpr) expr).getValue(); - } else if (expr instanceof SQLNumericLiteralExpr) { - return ((SQLNumericLiteralExpr) expr).getNumber(); - } else if (expr instanceof SQLNullExpr) { - return ((SQLNullExpr) expr).toString().toLowerCase(); - } - throw new SqlParseException("could not parse sqlBinaryOpExpr need to be identifier/valuable got" - + expr.getClass().toString() + " with value:" + expr.toString()); + return value; + } + + public static Object getScriptValue(SQLExpr expr) throws SqlParseException { + if (expr instanceof SQLIdentifierExpr + || expr instanceof SQLPropertyExpr + || expr instanceof SQLVariantRefExpr) { + return "doc['" + expr.toString() + "'].value"; + } else if (expr instanceof SQLValuableExpr) { + return ((SQLValuableExpr) expr).getValue(); } - - public static boolean isFromJoinOrUnionTable(SQLExpr expr) { - SQLObject temp = expr; - AtomicInteger counter = new AtomicInteger(10); - while (temp != null && !(expr instanceof SQLSelectQueryBlock) - && !(expr instanceof SQLJoinTableSource) && !(expr instanceof SQLUnionQuery) && counter.get() > 0) { - counter.decrementAndGet(); - temp = temp.getParent(); - if (temp instanceof SQLSelectQueryBlock) { - SQLTableSource from = ((SQLSelectQueryBlock) temp).getFrom(); - if (from instanceof SQLJoinTableSource || from instanceof SQLUnionQuery) { - return true; - } - } - if (temp instanceof SQLJoinTableSource || temp instanceof SQLUnionQuery) { - return true; - } - } - return false; + throw new SqlParseException( + "could not parse sqlBinaryOpExpr need to be identifier/valuable got" + + expr.getClass().toString() + + " with value:" + + expr.toString()); + } + + public static Object getScriptValueWithQuote(SQLExpr expr, String quote) + throws SqlParseException { + if (expr instanceof SQLIdentifierExpr + || expr instanceof SQLPropertyExpr + || expr instanceof SQLVariantRefExpr) { + return "doc['" + expr.toString() + "'].value"; + } else if (expr instanceof SQLCharExpr) { + return quote + ((SQLCharExpr) expr).getValue() + quote; + } else if (expr instanceof SQLIntegerExpr) { + return ((SQLIntegerExpr) expr).getValue(); + } else if (expr instanceof SQLNumericLiteralExpr) { + return ((SQLNumericLiteralExpr) expr).getNumber(); + } else if (expr instanceof SQLNullExpr) { + return ((SQLNullExpr) expr).toString().toLowerCase(); } - - public static double[] KV2DoubleArr(List params) { - double[] ds = new double[params.size()]; - int i = 0; - for (KVValue v : params) { - ds[i] = Double.parseDouble(v.value.toString()); - i++; + throw new SqlParseException( + "could not parse sqlBinaryOpExpr need to be identifier/valuable got" + + expr.getClass().toString() + + " with value:" + + expr.toString()); + } + + public static boolean isFromJoinOrUnionTable(SQLExpr expr) { + SQLObject temp = expr; + AtomicInteger counter = new AtomicInteger(10); + while (temp != null + && !(expr instanceof SQLSelectQueryBlock) + && !(expr instanceof SQLJoinTableSource) + && !(expr instanceof SQLUnionQuery) + && counter.get() > 0) { + counter.decrementAndGet(); + temp = temp.getParent(); + if (temp instanceof SQLSelectQueryBlock) { + SQLTableSource from = ((SQLSelectQueryBlock) temp).getFrom(); + if (from instanceof SQLJoinTableSource || from instanceof SQLUnionQuery) { + return true; } - return ds; + } + if (temp instanceof SQLJoinTableSource || temp instanceof SQLUnionQuery) { + return true; + } } - - - public static String extendedToString(SQLExpr sqlExpr) { - if (sqlExpr instanceof SQLTextLiteralExpr) { - return ((SQLTextLiteralExpr) sqlExpr).getText(); - } - return sqlExpr.toString(); + return false; + } + + public static double[] KV2DoubleArr(List params) { + double[] ds = new double[params.size()]; + int i = 0; + for (KVValue v : params) { + ds[i] = Double.parseDouble(v.value.toString()); + i++; } + return ds; + } - public static String[] concatStringsArrays(String[] a1, String[] a2) { - String[] strings = new String[a1.length + a2.length]; - for (int i = 0; i < a1.length; i++) { - strings[i] = a1[i]; - } - for (int i = 0; i < a2.length; i++) { - strings[a1.length + i] = a2[i]; - } - return strings; + public static String extendedToString(SQLExpr sqlExpr) { + if (sqlExpr instanceof SQLTextLiteralExpr) { + return ((SQLTextLiteralExpr) sqlExpr).getText(); } + return sqlExpr.toString(); + } - public static Object searchPathInMap(Map fieldsMap, String[] path) { - Map currentObject = fieldsMap; - for (int i = 0; i < path.length - 1; i++) { - Object valueFromCurrentMap = currentObject.get(path[i]); - if (valueFromCurrentMap == null) { - return null; - } - if (!Map.class.isAssignableFrom(valueFromCurrentMap.getClass())) { - return null; - } - currentObject = (Map) valueFromCurrentMap; - } - return currentObject.get(path[path.length - 1]); + public static String[] concatStringsArrays(String[] a1, String[] a2) { + String[] strings = new String[a1.length + a2.length]; + for (int i = 0; i < a1.length; i++) { + strings[i] = a1[i]; + } + for (int i = 0; i < a2.length; i++) { + strings[a1.length + i] = a2[i]; } + return strings; + } + + public static Object searchPathInMap(Map fieldsMap, String[] path) { + Map currentObject = fieldsMap; + for (int i = 0; i < path.length - 1; i++) { + Object valueFromCurrentMap = currentObject.get(path[i]); + if (valueFromCurrentMap == null) { + return null; + } + if (!Map.class.isAssignableFrom(valueFromCurrentMap.getClass())) { + return null; + } + currentObject = (Map) valueFromCurrentMap; + } + return currentObject.get(path[path.length - 1]); + } - public static Object deepSearchInMap(Map fieldsMap, String field) { - if (field.contains(".")) { - String[] split = field.split("\\."); - return searchPathInMap(fieldsMap, split); - } - return fieldsMap.get(field); + public static Object deepSearchInMap(Map fieldsMap, String field) { + if (field.contains(".")) { + String[] split = field.split("\\."); + return searchPathInMap(fieldsMap, split); } + return fieldsMap.get(field); + } - public static boolean clearEmptyPaths(Map map) { - if (map.size() == 0) { - return true; - } - Set keysToDelete = new HashSet<>(); - for (Map.Entry entry : map.entrySet()) { - Object value = entry.getValue(); - if (Map.class.isAssignableFrom(value.getClass())) { - if (clearEmptyPaths((Map) value)) { - keysToDelete.add(entry.getKey()); - } - } - } - if (keysToDelete.size() != 0) { - if (map.size() == keysToDelete.size()) { - map.clear(); - return true; - } - for (String key : keysToDelete) { - // TODO: seems like a bug, either fix, or just get rid of for loop and remove the first key - map.remove(key); - return false; - } + public static boolean clearEmptyPaths(Map map) { + if (map.size() == 0) { + return true; + } + Set keysToDelete = new HashSet<>(); + for (Map.Entry entry : map.entrySet()) { + Object value = entry.getValue(); + if (Map.class.isAssignableFrom(value.getClass())) { + if (clearEmptyPaths((Map) value)) { + keysToDelete.add(entry.getKey()); } + } + } + if (keysToDelete.size() != 0) { + if (map.size() == keysToDelete.size()) { + map.clear(); + return true; + } + for (String key : keysToDelete) { + // TODO: seems like a bug, either fix, or just get rid of for loop and remove the first key + map.remove(key); return false; + } } - - public static GetIndexRequestBuilder prepareIndexRequestBuilder(Client client, IndexStatement statement) { - /* - * indexPattern represents wildcard as '.*' which is the regex syntax for matching anything but - * indexRequestBuilder uses the file-match syntax like UNIX which is just '*', so the pattern is converted - * in case its added to the request below - */ - String indexPattern = statement.getIndexPattern().replace(".*", "*"); - - /* - * Ideally all features should be removed from the indexRequest used in SHOW to prevent wasted data - * since only the index name is required in the JDBC format response. However, the type is obtained from the - * mappings response so this feature will need to be set if retrieving type is necessary in other formats. - * (For the time being it is included since the GUI returns types for SHOW queries) - */ - GetIndexRequestBuilder indexRequestBuilder = client.admin().indices() - .prepareGetIndex() - .setFeatures(GetIndexRequest.Feature.MAPPINGS) - .setLocal(true); - - /* - * Since the index request supports index names with wildcard (*) but not (.) it is checked for here so that the - * results returned can be reduced if possible (the regex checks in the ResultSet classes handle the rest). - */ - if (!indexPattern.contains(".")) { - indexRequestBuilder.addIndices(indexPattern); - } - - return indexRequestBuilder; + return false; + } + + public static GetIndexRequestBuilder prepareIndexRequestBuilder( + Client client, IndexStatement statement) { + /* + * indexPattern represents wildcard as '.*' which is the regex syntax for matching anything but + * indexRequestBuilder uses the file-match syntax like UNIX which is just '*', so the pattern is converted + * in case its added to the request below + */ + String indexPattern = statement.getIndexPattern().replace(".*", "*"); + + /* + * Ideally all features should be removed from the indexRequest used in SHOW to prevent wasted data + * since only the index name is required in the JDBC format response. However, the type is obtained from the + * mappings response so this feature will need to be set if retrieving type is necessary in other formats. + * (For the time being it is included since the GUI returns types for SHOW queries) + */ + GetIndexRequestBuilder indexRequestBuilder = + client + .admin() + .indices() + .prepareGetIndex() + .setFeatures(GetIndexRequest.Feature.MAPPINGS) + .setLocal(true); + + /* + * Since the index request supports index names with wildcard (*) but not (.) it is checked for here so that the + * results returned can be reduced if possible (the regex checks in the ResultSet classes handle the rest). + */ + if (!indexPattern.contains(".")) { + indexRequestBuilder.addIndices(indexPattern); } - public static SQLExpr toSqlExpr(String sql) { - SQLExprParser parser = new ElasticSqlExprParser(sql); - SQLExpr expr = parser.expr(); + return indexRequestBuilder; + } - if (parser.getLexer().token() != Token.EOF) { - throw new ParserException("Illegal SQL expression : " + sql); - } - return expr; - } + public static SQLExpr toSqlExpr(String sql) { + SQLExprParser parser = new ElasticSqlExprParser(sql); + SQLExpr expr = parser.expr(); + if (parser.getLexer().token() != Token.EOF) { + throw new ParserException("Illegal SQL expression : " + sql); + } + return expr; + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/SymbolSimilarityTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/SymbolSimilarityTest.java index a894f4311a..fbdcca2bb0 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/SymbolSimilarityTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/SymbolSimilarityTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr; import static java.util.Collections.emptyList; @@ -14,42 +13,39 @@ import org.junit.Assert; import org.junit.Test; -/** - * Test cases for symbol similarity - */ +/** Test cases for symbol similarity */ public class SymbolSimilarityTest { - @Test - public void noneCandidateShouldReturnTargetStringItself() { - String target = "test"; - String mostSimilarSymbol = new SimilarSymbols(emptyList()).mostSimilarTo(target); - Assert.assertEquals(target, mostSimilarSymbol); - } - - @Test - public void singleCandidateShouldReturnTheOnlyCandidate() { - String target = "test"; - String candidate = "hello"; - String mostSimilarSymbol = new SimilarSymbols(singletonList(candidate)).mostSimilarTo(target); - Assert.assertEquals(candidate, mostSimilarSymbol); - } - - @Test - public void twoCandidatesShouldReturnMostSimilarCandidate() { - String target = "test"; - String mostSimilar = "tests"; - List candidates = Arrays.asList("hello", mostSimilar); - String mostSimilarSymbol = new SimilarSymbols(candidates).mostSimilarTo(target); - Assert.assertEquals(mostSimilar, mostSimilarSymbol); - } - - @Test - public void manyCandidatesShouldReturnMostSimilarCandidate() { - String target = "test"; - String mostSimilar = "tests"; - List candidates = Arrays.asList("hello", mostSimilar, "world"); - String mostSimilarSymbol = new SimilarSymbols(candidates).mostSimilarTo(target); - Assert.assertEquals(mostSimilar, mostSimilarSymbol); - } - + @Test + public void noneCandidateShouldReturnTargetStringItself() { + String target = "test"; + String mostSimilarSymbol = new SimilarSymbols(emptyList()).mostSimilarTo(target); + Assert.assertEquals(target, mostSimilarSymbol); + } + + @Test + public void singleCandidateShouldReturnTheOnlyCandidate() { + String target = "test"; + String candidate = "hello"; + String mostSimilarSymbol = new SimilarSymbols(singletonList(candidate)).mostSimilarTo(target); + Assert.assertEquals(candidate, mostSimilarSymbol); + } + + @Test + public void twoCandidatesShouldReturnMostSimilarCandidate() { + String target = "test"; + String mostSimilar = "tests"; + List candidates = Arrays.asList("hello", mostSimilar); + String mostSimilarSymbol = new SimilarSymbols(candidates).mostSimilarTo(target); + Assert.assertEquals(mostSimilar, mostSimilarSymbol); + } + + @Test + public void manyCandidatesShouldReturnMostSimilarCandidate() { + String target = "test"; + String mostSimilar = "tests"; + List candidates = Arrays.asList("hello", mostSimilar, "world"); + String mostSimilarSymbol = new SimilarSymbols(candidates).mostSimilarTo(target); + Assert.assertEquals(mostSimilar, mostSimilarSymbol); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/SyntaxAnalysisTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/SyntaxAnalysisTest.java index bd71fd2500..765bb0616e 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/SyntaxAnalysisTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/SyntaxAnalysisTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr; import static java.util.stream.Collectors.toList; @@ -17,124 +16,115 @@ import org.opensearch.sql.legacy.antlr.syntax.SyntaxAnalysisException; /** - * Test cases focused on illegal syntax testing (denylist) along with a few normal cases not covered previously. - * All other normal cases should be covered in existing unit test and IT. + * Test cases focused on illegal syntax testing (denylist) along with a few normal cases not covered + * previously. All other normal cases should be covered in existing unit test and IT. */ public class SyntaxAnalysisTest { - /** public accessor is required by @Rule annotation */ - @Rule - public ExpectedException exception = ExpectedException.none(); - - private OpenSearchLegacySqlAnalyzer - analyzer = new OpenSearchLegacySqlAnalyzer(new SqlAnalysisConfig(true, true, 1000)); - - /** In reality exception occurs before reaching new parser for now */ - @Test - public void unsupportedKeywordShouldThrowException() { - expectValidationFailWithErrorMessage( - "INSERT INTO accounts VALUES ('a')", - "offending symbol [INSERT]" - ); - } - - /** - * Why we need to let it go and verify in semantic analyzer? - * Parser treats LOG123 a valid column and stops at '(' which gives wrong location and expected token - * In this case it's hard for parser to figure out if this is a wrong function name indeed or not. - * So we let it pass as an UDF and fail in semantic analyzer with meaningful message. - */ - @Test //(expected = SyntaxAnalysisException.class) - public void unsupportedFunctionShouldThrowException() { - validate("SELECT * FROM accounts WHERE LOG123(balance) = 1"); - } - - @Test - public void unsupportedOperatorShouldPassSyntaxCheck() { - expectValidationFailWithErrorMessage( - "SELECT * FROM accounts WHERE age <=> 1", - "offending symbol [>]" - ); - } - - @Test - public void missingFromClauseShouldThrowException() { - expectValidationFailWithErrorMessage( - "SELECT 1", - "offending symbol []" // parsing was unable to terminate normally - ); - } - - @Test - public void missingWhereKeywordShouldThrowException() { - expectValidationFailWithErrorMessage( - "SELECT * FROM accounts age = 1", - "offending symbol [=]", // parser thought 'age' is alias of 'accounts' and failed at '=' - "Expecting", ";" // "Expecting tokens in {, ';'}" + /** public accessor is required by @Rule annotation */ + @Rule public ExpectedException exception = ExpectedException.none(); + + private OpenSearchLegacySqlAnalyzer analyzer = + new OpenSearchLegacySqlAnalyzer(new SqlAnalysisConfig(true, true, 1000)); + + /** In reality exception occurs before reaching new parser for now */ + @Test + public void unsupportedKeywordShouldThrowException() { + expectValidationFailWithErrorMessage( + "INSERT INTO accounts VALUES ('a')", "offending symbol [INSERT]"); + } + + /** + * Why we need to let it go and verify in semantic analyzer? Parser treats LOG123 a valid column + * and stops at '(' which gives wrong location and expected token In this case it's hard for + * parser to figure out if this is a wrong function name indeed or not. So we let it pass as an + * UDF and fail in semantic analyzer with meaningful message. + */ + @Test // (expected = SyntaxAnalysisException.class) + public void unsupportedFunctionShouldThrowException() { + validate("SELECT * FROM accounts WHERE LOG123(balance) = 1"); + } + + @Test + public void unsupportedOperatorShouldPassSyntaxCheck() { + expectValidationFailWithErrorMessage( + "SELECT * FROM accounts WHERE age <=> 1", "offending symbol [>]"); + } + + @Test + public void missingFromClauseShouldThrowException() { + expectValidationFailWithErrorMessage( + "SELECT 1", "offending symbol []" // parsing was unable to terminate normally ); - } - - @Test - public void someKeywordsShouldBeAbleToUseAsIdentifier() { - validate("SELECT AVG(balance) AS avg FROM accounts"); - } - - @Test - public void specialIndexNameShouldPass() { - validate("SELECT * FROM accounts/temp"); - validate("SELECT * FROM account*"); - validate("SELECT * FROM opensearch-accounts"); - validate("SELECT * FROM opensearch-account*"); - } - - @Test - public void typeNamePatternShouldThrowException() { - expectValidationFailWithErrorMessage( - "SELECT * FROM accounts/tem*", - "offending symbol [*]" + } + + @Test + public void missingWhereKeywordShouldThrowException() { + expectValidationFailWithErrorMessage( + "SELECT * FROM accounts age = 1", + "offending symbol [=]", // parser thought 'age' is alias of 'accounts' and failed at '=' + "Expecting", + ";" // "Expecting tokens in {, ';'}" ); - } - - @Test - public void systemIndexNameShouldPass() { - validate("SELECT * FROM .opensearch_dashboards"); - } - - @Test - public void useMetadataFieldShouldPass() { - validate("SELECT @timestamp FROM accounts"); - } - - @Test - public void leftJoinOnNestedFieldWithoutOnClauseShouldPass() { - validate("SELECT * FROM accounts a LEFT JOIN a.projects p"); - } - - @Test - public void useDeepNestedFieldShouldPass() { - validate("SELECT a.projects.name FROM accounts a"); - } - - /** As the translation is not supported for now, check this in semantic analyzer */ - @Test - public void arithmeticExpressionInWhereClauseShouldPass() { - validate("SELECT * FROM accounts WHERE age + 1 = 10"); - } - - @Test - public void queryEndWithSemiColonShouldPass() { - validate("SELECT * FROM accounts;"); - } - - private void expectValidationFailWithErrorMessage(String query, String... messages) { - exception.expect(SyntaxAnalysisException.class); - exception.expectMessage(allOf(Arrays.stream(messages). - map(Matchers::containsString). - collect(toList()))); - validate(query); - } - - private void validate(String sql) { - analyzer.analyzeSyntax(sql); - } + } + + @Test + public void someKeywordsShouldBeAbleToUseAsIdentifier() { + validate("SELECT AVG(balance) AS avg FROM accounts"); + } + + @Test + public void specialIndexNameShouldPass() { + validate("SELECT * FROM accounts/temp"); + validate("SELECT * FROM account*"); + validate("SELECT * FROM opensearch-accounts"); + validate("SELECT * FROM opensearch-account*"); + } + + @Test + public void typeNamePatternShouldThrowException() { + expectValidationFailWithErrorMessage("SELECT * FROM accounts/tem*", "offending symbol [*]"); + } + + @Test + public void systemIndexNameShouldPass() { + validate("SELECT * FROM .opensearch_dashboards"); + } + + @Test + public void useMetadataFieldShouldPass() { + validate("SELECT @timestamp FROM accounts"); + } + + @Test + public void leftJoinOnNestedFieldWithoutOnClauseShouldPass() { + validate("SELECT * FROM accounts a LEFT JOIN a.projects p"); + } + + @Test + public void useDeepNestedFieldShouldPass() { + validate("SELECT a.projects.name FROM accounts a"); + } + + /** As the translation is not supported for now, check this in semantic analyzer */ + @Test + public void arithmeticExpressionInWhereClauseShouldPass() { + validate("SELECT * FROM accounts WHERE age + 1 = 10"); + } + + @Test + public void queryEndWithSemiColonShouldPass() { + validate("SELECT * FROM accounts;"); + } + + private void expectValidationFailWithErrorMessage(String query, String... messages) { + exception.expect(SyntaxAnalysisException.class); + exception.expectMessage( + allOf(Arrays.stream(messages).map(Matchers::containsString).collect(toList()))); + validate(query); + } + + private void validate(String sql) { + analyzer.analyzeSyntax(sql); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerAggregateFunctionTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerAggregateFunctionTest.java index 6671542298..df258270b9 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerAggregateFunctionTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerAggregateFunctionTest.java @@ -3,154 +3,147 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic; import org.junit.Ignore; import org.junit.Test; -/** - * Semantic analysis test for aggregate functions. - */ +/** Semantic analysis test for aggregate functions. */ public class SemanticAnalyzerAggregateFunctionTest extends SemanticAnalyzerTestBase { - @Ignore("To be implemented") - @Test(expected = SemanticAnalysisException.class) - public void useAggregateFunctionInWhereClauseShouldFail() { - validate("SELECT * FROM semantics WHERE AVG(balance) > 10000"); - } - - @Test - public void useAggregateFunctionInSelectClauseShouldPass() { - validate( - "SELECT" + - " city," + - " COUNT(*)," + - " MAX(age)," + - " MIN(balance)," + - " AVG(manager.salary)," + - " SUM(balance)" + - "FROM semantics " + - "GROUP BY city"); - } - - @Test - public void useAggregateFunctionInSelectClauseWithoutGroupByShouldPass() { - validate( - "SELECT" + - " COUNT(*)," + - " MAX(age)," + - " MIN(balance)," + - " AVG(manager.salary)," + - " SUM(balance)" + - "FROM semantics"); - } - - @Test - public void countFunctionCallOnAnyFieldShouldPass() { - validate( - "SELECT" + - " COUNT(address)," + - " COUNT(age)," + - " COUNT(birthday)," + - " COUNT(location)," + - " COUNT(manager.address)," + - " COUNT(employer)" + - "FROM semantics"); - } - - @Test - public void maxFunctionCallOnTextFieldShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT MAX(address) FROM semantics", - "Function [MAX] cannot work with [TEXT].", - "Usage: MAX(NUMBER T) -> T" - ); - } - - @Test - public void minFunctionCallOnDateFieldShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT MIN(birthday) FROM semantics", - "Function [MIN] cannot work with [DATE].", - "Usage: MIN(NUMBER T) -> T" - ); - } - - @Test - public void avgFunctionCallOnBooleanFieldShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT AVG(p.active) FROM semantics s, s.projects p", - "Function [AVG] cannot work with [BOOLEAN].", - "Usage: AVG(NUMBER T) -> DOUBLE" - ); - } - - @Test - public void sumFunctionCallOnBooleanFieldShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT SUM(city) FROM semantics", - "Function [SUM] cannot work with [KEYWORD].", - "Usage: SUM(NUMBER T) -> T" - ); - } - - @Test - public void useAvgFunctionCallAliasInHavingClauseShouldPass() { - validate("SELECT city, AVG(age) AS avg FROM semantics GROUP BY city HAVING avg > 10"); - } - - @Test - public void useAvgAndMaxFunctionCallAliasInHavingClauseShouldPass() { - validate( - "SELECT city, AVG(age) AS avg, MAX(balance) AS bal FROM semantics " + - "GROUP BY city HAVING avg > 10 AND bal > 10000" - ); - } - - @Test - public void useAvgFunctionCallWithoutAliasInHavingShouldPass() { - validate("SELECT city, AVG(age) FROM semantics GROUP BY city HAVING AVG(age) > 10"); - } - - @Test - public void useDifferentAggregateFunctionInHavingClauseShouldPass() { - validate("SELECT city, AVG(age) FROM semantics GROUP BY city HAVING COUNT(*) > 10 AND SUM(balance) <= 10000"); - } - - @Test - public void useAvgFunctionCallAliasInOrderByClauseShouldPass() { - validate("SELECT city, AVG(age) AS avg FROM semantics GROUP BY city ORDER BY avg"); - } - - @Test - public void useAvgFunctionCallAliasInGroupByAndOrderByClauseShouldPass() { - validate("SELECT SUBSTRING(address, 0, 3) AS add FROM semantics GROUP BY add ORDER BY add"); - } - - @Test - public void useColumnNameAliasInOrderByClauseShouldPass() { - validate("SELECT age AS a, AVG(balance) FROM semantics GROUP BY age ORDER BY a"); - } - - @Test - public void useExpressionAliasInOrderByClauseShouldPass() { - validate("SELECT age + 1 AS a FROM semantics GROUP BY age ORDER BY a"); - } - - @Test - public void useAvgFunctionCallWithTextFieldInHavingClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT city FROM semantics GROUP BY city HAVING AVG(address) > 10", - "Function [AVG] cannot work with [TEXT].", - "Usage: AVG(NUMBER T) -> DOUBLE" - ); - } - - @Test - public void useCountFunctionCallWithNestedFieldShouldPass() { - validate("SELECT * FROM semantics s, s.projects p GROUP BY city HAVING COUNT(p) > 1"); - validate("SELECT * FROM semantics s, s.projects p, p.members m GROUP BY city HAVING COUNT(m) > 1"); - } - + @Ignore("To be implemented") + @Test(expected = SemanticAnalysisException.class) + public void useAggregateFunctionInWhereClauseShouldFail() { + validate("SELECT * FROM semantics WHERE AVG(balance) > 10000"); + } + + @Test + public void useAggregateFunctionInSelectClauseShouldPass() { + validate( + "SELECT" + + " city," + + " COUNT(*)," + + " MAX(age)," + + " MIN(balance)," + + " AVG(manager.salary)," + + " SUM(balance)" + + "FROM semantics " + + "GROUP BY city"); + } + + @Test + public void useAggregateFunctionInSelectClauseWithoutGroupByShouldPass() { + validate( + "SELECT" + + " COUNT(*)," + + " MAX(age)," + + " MIN(balance)," + + " AVG(manager.salary)," + + " SUM(balance)" + + "FROM semantics"); + } + + @Test + public void countFunctionCallOnAnyFieldShouldPass() { + validate( + "SELECT" + + " COUNT(address)," + + " COUNT(age)," + + " COUNT(birthday)," + + " COUNT(location)," + + " COUNT(manager.address)," + + " COUNT(employer)" + + "FROM semantics"); + } + + @Test + public void maxFunctionCallOnTextFieldShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT MAX(address) FROM semantics", + "Function [MAX] cannot work with [TEXT].", + "Usage: MAX(NUMBER T) -> T"); + } + + @Test + public void minFunctionCallOnDateFieldShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT MIN(birthday) FROM semantics", + "Function [MIN] cannot work with [DATE].", + "Usage: MIN(NUMBER T) -> T"); + } + + @Test + public void avgFunctionCallOnBooleanFieldShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT AVG(p.active) FROM semantics s, s.projects p", + "Function [AVG] cannot work with [BOOLEAN].", + "Usage: AVG(NUMBER T) -> DOUBLE"); + } + + @Test + public void sumFunctionCallOnBooleanFieldShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT SUM(city) FROM semantics", + "Function [SUM] cannot work with [KEYWORD].", + "Usage: SUM(NUMBER T) -> T"); + } + + @Test + public void useAvgFunctionCallAliasInHavingClauseShouldPass() { + validate("SELECT city, AVG(age) AS avg FROM semantics GROUP BY city HAVING avg > 10"); + } + + @Test + public void useAvgAndMaxFunctionCallAliasInHavingClauseShouldPass() { + validate( + "SELECT city, AVG(age) AS avg, MAX(balance) AS bal FROM semantics " + + "GROUP BY city HAVING avg > 10 AND bal > 10000"); + } + + @Test + public void useAvgFunctionCallWithoutAliasInHavingShouldPass() { + validate("SELECT city, AVG(age) FROM semantics GROUP BY city HAVING AVG(age) > 10"); + } + + @Test + public void useDifferentAggregateFunctionInHavingClauseShouldPass() { + validate( + "SELECT city, AVG(age) FROM semantics GROUP BY city HAVING COUNT(*) > 10 AND SUM(balance)" + + " <= 10000"); + } + + @Test + public void useAvgFunctionCallAliasInOrderByClauseShouldPass() { + validate("SELECT city, AVG(age) AS avg FROM semantics GROUP BY city ORDER BY avg"); + } + + @Test + public void useAvgFunctionCallAliasInGroupByAndOrderByClauseShouldPass() { + validate("SELECT SUBSTRING(address, 0, 3) AS add FROM semantics GROUP BY add ORDER BY add"); + } + + @Test + public void useColumnNameAliasInOrderByClauseShouldPass() { + validate("SELECT age AS a, AVG(balance) FROM semantics GROUP BY age ORDER BY a"); + } + + @Test + public void useExpressionAliasInOrderByClauseShouldPass() { + validate("SELECT age + 1 AS a FROM semantics GROUP BY age ORDER BY a"); + } + + @Test + public void useAvgFunctionCallWithTextFieldInHavingClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT city FROM semantics GROUP BY city HAVING AVG(address) > 10", + "Function [AVG] cannot work with [TEXT].", + "Usage: AVG(NUMBER T) -> DOUBLE"); + } + + @Test + public void useCountFunctionCallWithNestedFieldShouldPass() { + validate("SELECT * FROM semantics s, s.projects p GROUP BY city HAVING COUNT(p) > 1"); + validate( + "SELECT * FROM semantics s, s.projects p, p.members m GROUP BY city HAVING COUNT(m) > 1"); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerBasicTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerBasicTest.java index 1d594db2bc..1d5ff595f3 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerBasicTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerBasicTest.java @@ -3,13 +3,12 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.aMapWithSize; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.hasEntry; -import static org.hamcrest.MatcherAssert.assertThat; import static org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType.BOOLEAN; import static org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType.DATE; import static org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType.DOUBLE; @@ -37,561 +36,576 @@ import org.opensearch.sql.legacy.esdomain.LocalClusterState; /** - * Semantic analysis test cases focused on basic scope building logic which is the cornerstone of analysis followed. - * The low abstraction here enumerating all present field names in each test case is intentional for better demonstration. + * Semantic analysis test cases focused on basic scope building logic which is the cornerstone of + * analysis followed. The low abstraction here enumerating all present field names in each test case + * is intentional for better demonstration. */ public class SemanticAnalyzerBasicTest extends SemanticAnalyzerTestBase { - private SemanticContext context; - - private OpenSearchMappingLoader analyzer; + private SemanticContext context; - @Before - public void setUp() { - context = new SemanticContext(); - analyzer = new OpenSearchMappingLoader(context, LocalClusterState.state(), 1000); - } + private OpenSearchMappingLoader analyzer; - @Test - public void contextShouldIncludeAllFieldsAfterVisitingIndexNameInFromClause() { - analyzer.visitIndexName("semantics"); + @Before + public void setUp() { + context = new SemanticContext(); + analyzer = new OpenSearchMappingLoader(context, LocalClusterState.state(), 1000); + } - Map typeByName = context.peek().resolveAll(Namespace.FIELD_NAME); - assertThat( - typeByName, - allOf( - aMapWithSize(21), - hasEntry("semantics", (Type) new OpenSearchIndex("semantics", INDEX)), - hasEntry("address", TEXT), - hasEntry("age", INTEGER), - hasEntry("balance", DOUBLE), - hasEntry("city", KEYWORD), - hasEntry("birthday", DATE), - hasEntry("location", GEO_POINT), - hasEntry("new_field", UNKNOWN), - hasEntry("field with spaces", TEXT), - hasEntry("employer", TEXT), - hasEntry("employer.keyword", KEYWORD), - hasEntry("projects", (Type) new OpenSearchIndex("projects", NESTED_FIELD)), - hasEntry("projects.active", BOOLEAN), - hasEntry("projects.release", DATE), - hasEntry("projects.members", (Type) new OpenSearchIndex("projects.members", NESTED_FIELD)), - hasEntry("projects.members.name", TEXT), - hasEntry("manager", OBJECT), - hasEntry("manager.name", TEXT), - hasEntry("manager.name.keyword", KEYWORD), - hasEntry("manager.address", KEYWORD), - hasEntry("manager.salary", LONG) - ) - ); + @Test + public void contextShouldIncludeAllFieldsAfterVisitingIndexNameInFromClause() { + analyzer.visitIndexName("semantics"); - analyzer.visitAs("", new OpenSearchIndex("semantics", INDEX)); - typeByName = context.peek().resolveAll(Namespace.FIELD_NAME); - assertThat( - typeByName, - allOf( - aMapWithSize(41), - hasEntry("semantics", (Type) new OpenSearchIndex("semantics", INDEX)), - hasEntry("address", TEXT), - hasEntry("age", INTEGER), - hasEntry("balance", DOUBLE), - hasEntry("city", KEYWORD), - hasEntry("birthday", DATE), - hasEntry("location", GEO_POINT), - hasEntry("new_field", UNKNOWN), - hasEntry("field with spaces", TEXT), - hasEntry("employer", TEXT), - hasEntry("employer.keyword", KEYWORD), - hasEntry("projects", (Type) new OpenSearchIndex("projects", NESTED_FIELD)), - hasEntry("projects.active", BOOLEAN), - hasEntry("projects.release", DATE), - hasEntry("projects.members", (Type) new OpenSearchIndex("projects.members", NESTED_FIELD)), - hasEntry("projects.members.name", TEXT), - hasEntry("manager", OBJECT), - hasEntry("manager.name", TEXT), - hasEntry("manager.name.keyword", KEYWORD), - hasEntry("manager.address", KEYWORD), - hasEntry("manager.salary", LONG), - // These are also valid identifier in SQL - hasEntry("semantics.address", TEXT), - hasEntry("semantics.age", INTEGER), - hasEntry("semantics.balance", DOUBLE), - hasEntry("semantics.city", KEYWORD), - hasEntry("semantics.birthday", DATE), - hasEntry("semantics.location", GEO_POINT), - hasEntry("semantics.new_field", UNKNOWN), - hasEntry("semantics.field with spaces", TEXT), - hasEntry("semantics.employer", TEXT), - hasEntry("semantics.employer.keyword", KEYWORD), - hasEntry("semantics.projects", (Type) new OpenSearchIndex("semantics.projects", NESTED_FIELD)), - hasEntry("semantics.projects.active", BOOLEAN), - hasEntry("semantics.projects.release", DATE), - hasEntry("semantics.projects.members", (Type) new OpenSearchIndex("semantics.projects.members", NESTED_FIELD)), - hasEntry("semantics.projects.members.name", TEXT), - hasEntry("semantics.manager", OBJECT), - hasEntry("semantics.manager.name", TEXT), - hasEntry("semantics.manager.name.keyword", KEYWORD), - hasEntry("semantics.manager.address", KEYWORD), - hasEntry("semantics.manager.salary", LONG) - ) - ); - } + Map typeByName = context.peek().resolveAll(Namespace.FIELD_NAME); + assertThat( + typeByName, + allOf( + aMapWithSize(21), + hasEntry("semantics", (Type) new OpenSearchIndex("semantics", INDEX)), + hasEntry("address", TEXT), + hasEntry("age", INTEGER), + hasEntry("balance", DOUBLE), + hasEntry("city", KEYWORD), + hasEntry("birthday", DATE), + hasEntry("location", GEO_POINT), + hasEntry("new_field", UNKNOWN), + hasEntry("field with spaces", TEXT), + hasEntry("employer", TEXT), + hasEntry("employer.keyword", KEYWORD), + hasEntry("projects", (Type) new OpenSearchIndex("projects", NESTED_FIELD)), + hasEntry("projects.active", BOOLEAN), + hasEntry("projects.release", DATE), + hasEntry( + "projects.members", (Type) new OpenSearchIndex("projects.members", NESTED_FIELD)), + hasEntry("projects.members.name", TEXT), + hasEntry("manager", OBJECT), + hasEntry("manager.name", TEXT), + hasEntry("manager.name.keyword", KEYWORD), + hasEntry("manager.address", KEYWORD), + hasEntry("manager.salary", LONG))); - @Test - public void contextShouldIncludeAllFieldsPrefixedByIndexAliasAfterVisitingIndexNameWithAliasInFromClause() { - OpenSearchIndex indexType = new OpenSearchIndex("semantics", INDEX); - analyzer.visitIndexName("semantics"); - analyzer.visitAs("s", indexType); + analyzer.visitAs("", new OpenSearchIndex("semantics", INDEX)); + typeByName = context.peek().resolveAll(Namespace.FIELD_NAME); + assertThat( + typeByName, + allOf( + aMapWithSize(41), + hasEntry("semantics", (Type) new OpenSearchIndex("semantics", INDEX)), + hasEntry("address", TEXT), + hasEntry("age", INTEGER), + hasEntry("balance", DOUBLE), + hasEntry("city", KEYWORD), + hasEntry("birthday", DATE), + hasEntry("location", GEO_POINT), + hasEntry("new_field", UNKNOWN), + hasEntry("field with spaces", TEXT), + hasEntry("employer", TEXT), + hasEntry("employer.keyword", KEYWORD), + hasEntry("projects", (Type) new OpenSearchIndex("projects", NESTED_FIELD)), + hasEntry("projects.active", BOOLEAN), + hasEntry("projects.release", DATE), + hasEntry( + "projects.members", (Type) new OpenSearchIndex("projects.members", NESTED_FIELD)), + hasEntry("projects.members.name", TEXT), + hasEntry("manager", OBJECT), + hasEntry("manager.name", TEXT), + hasEntry("manager.name.keyword", KEYWORD), + hasEntry("manager.address", KEYWORD), + hasEntry("manager.salary", LONG), + // These are also valid identifier in SQL + hasEntry("semantics.address", TEXT), + hasEntry("semantics.age", INTEGER), + hasEntry("semantics.balance", DOUBLE), + hasEntry("semantics.city", KEYWORD), + hasEntry("semantics.birthday", DATE), + hasEntry("semantics.location", GEO_POINT), + hasEntry("semantics.new_field", UNKNOWN), + hasEntry("semantics.field with spaces", TEXT), + hasEntry("semantics.employer", TEXT), + hasEntry("semantics.employer.keyword", KEYWORD), + hasEntry( + "semantics.projects", + (Type) new OpenSearchIndex("semantics.projects", NESTED_FIELD)), + hasEntry("semantics.projects.active", BOOLEAN), + hasEntry("semantics.projects.release", DATE), + hasEntry( + "semantics.projects.members", + (Type) new OpenSearchIndex("semantics.projects.members", NESTED_FIELD)), + hasEntry("semantics.projects.members.name", TEXT), + hasEntry("semantics.manager", OBJECT), + hasEntry("semantics.manager.name", TEXT), + hasEntry("semantics.manager.name.keyword", KEYWORD), + hasEntry("semantics.manager.address", KEYWORD), + hasEntry("semantics.manager.salary", LONG))); + } - Map typeByName = context.peek().resolveAll(Namespace.FIELD_NAME); - assertThat( - typeByName, - allOf( - aMapWithSize(41), - hasEntry("semantics", (Type) indexType), - // These are also valid because alias is optional in SQL - hasEntry("address", TEXT), - hasEntry("age", INTEGER), - hasEntry("balance", DOUBLE), - hasEntry("city", KEYWORD), - hasEntry("birthday", DATE), - hasEntry("location", GEO_POINT), - hasEntry("new_field", UNKNOWN), - hasEntry("field with spaces", TEXT), - hasEntry("employer", TEXT), - hasEntry("employer.keyword", KEYWORD), - hasEntry("projects", (Type) new OpenSearchIndex("projects", NESTED_FIELD)), - hasEntry("projects.active", BOOLEAN), - hasEntry("projects.release", DATE), - hasEntry("projects.members", (Type) new OpenSearchIndex("projects.members", NESTED_FIELD)), - hasEntry("projects.members.name", TEXT), - hasEntry("manager", OBJECT), - hasEntry("manager.name", TEXT), - hasEntry("manager.name.keyword", KEYWORD), - hasEntry("manager.address", KEYWORD), - hasEntry("manager.salary", LONG), - // These are valid because of alias specified - hasEntry("s.address", TEXT), - hasEntry("s.age", INTEGER), - hasEntry("s.balance", DOUBLE), - hasEntry("s.city", KEYWORD), - hasEntry("s.birthday", DATE), - hasEntry("s.location", GEO_POINT), - hasEntry("s.new_field", UNKNOWN), - hasEntry("s.field with spaces", TEXT), - hasEntry("s.employer", TEXT), - hasEntry("s.employer.keyword", KEYWORD), - hasEntry("s.projects", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), - hasEntry("s.projects.active", BOOLEAN), - hasEntry("s.projects.release", DATE), - hasEntry("s.projects.members", (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), - hasEntry("s.projects.members.name", TEXT), - hasEntry("s.manager", OBJECT), - hasEntry("s.manager.name", TEXT), - hasEntry("s.manager.name.keyword", KEYWORD), - hasEntry("s.manager.address", KEYWORD), - hasEntry("s.manager.salary", LONG) - ) - ); - } + @Test + public void + contextShouldIncludeAllFieldsPrefixedByIndexAliasAfterVisitingIndexNameWithAliasInFromClause() { + OpenSearchIndex indexType = new OpenSearchIndex("semantics", INDEX); + analyzer.visitIndexName("semantics"); + analyzer.visitAs("s", indexType); - @Test - public void contextShouldIncludeSameFieldsAfterVisitingNestedFieldWithoutAliasInFromClause() { - OpenSearchIndex indexType = new OpenSearchIndex("semantics", INDEX); - analyzer.visitIndexName("semantics"); - analyzer.visitAs("s", indexType); - analyzer.visitIndexName("s.projects"); - analyzer.visitAs("", new OpenSearchIndex("s.projects", NESTED_FIELD)); + Map typeByName = context.peek().resolveAll(Namespace.FIELD_NAME); + assertThat( + typeByName, + allOf( + aMapWithSize(41), + hasEntry("semantics", (Type) indexType), + // These are also valid because alias is optional in SQL + hasEntry("address", TEXT), + hasEntry("age", INTEGER), + hasEntry("balance", DOUBLE), + hasEntry("city", KEYWORD), + hasEntry("birthday", DATE), + hasEntry("location", GEO_POINT), + hasEntry("new_field", UNKNOWN), + hasEntry("field with spaces", TEXT), + hasEntry("employer", TEXT), + hasEntry("employer.keyword", KEYWORD), + hasEntry("projects", (Type) new OpenSearchIndex("projects", NESTED_FIELD)), + hasEntry("projects.active", BOOLEAN), + hasEntry("projects.release", DATE), + hasEntry( + "projects.members", (Type) new OpenSearchIndex("projects.members", NESTED_FIELD)), + hasEntry("projects.members.name", TEXT), + hasEntry("manager", OBJECT), + hasEntry("manager.name", TEXT), + hasEntry("manager.name.keyword", KEYWORD), + hasEntry("manager.address", KEYWORD), + hasEntry("manager.salary", LONG), + // These are valid because of alias specified + hasEntry("s.address", TEXT), + hasEntry("s.age", INTEGER), + hasEntry("s.balance", DOUBLE), + hasEntry("s.city", KEYWORD), + hasEntry("s.birthday", DATE), + hasEntry("s.location", GEO_POINT), + hasEntry("s.new_field", UNKNOWN), + hasEntry("s.field with spaces", TEXT), + hasEntry("s.employer", TEXT), + hasEntry("s.employer.keyword", KEYWORD), + hasEntry("s.projects", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), + hasEntry("s.projects.active", BOOLEAN), + hasEntry("s.projects.release", DATE), + hasEntry( + "s.projects.members", + (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), + hasEntry("s.projects.members.name", TEXT), + hasEntry("s.manager", OBJECT), + hasEntry("s.manager.name", TEXT), + hasEntry("s.manager.name.keyword", KEYWORD), + hasEntry("s.manager.address", KEYWORD), + hasEntry("s.manager.salary", LONG))); + } - Map typeByName = context.peek().resolveAll(Namespace.FIELD_NAME); - assertThat( - typeByName, - allOf( - aMapWithSize(41), - hasEntry("semantics", (Type) indexType), - // These are also valid because alias is optional in SQL - hasEntry("address", TEXT), - hasEntry("age", INTEGER), - hasEntry("balance", DOUBLE), - hasEntry("city", KEYWORD), - hasEntry("birthday", DATE), - hasEntry("location", GEO_POINT), - hasEntry("new_field", UNKNOWN), - hasEntry("field with spaces", TEXT), - hasEntry("employer", TEXT), - hasEntry("employer.keyword", KEYWORD), - hasEntry("projects", (Type) new OpenSearchIndex("projects", NESTED_FIELD)), - hasEntry("projects.active", BOOLEAN), - hasEntry("projects.release", DATE), - hasEntry("projects.members", (Type) new OpenSearchIndex("projects.members", NESTED_FIELD)), - hasEntry("projects.members.name", TEXT), - hasEntry("manager", OBJECT), - hasEntry("manager.name", TEXT), - hasEntry("manager.name.keyword", KEYWORD), - hasEntry("manager.address", KEYWORD), - hasEntry("manager.salary", LONG), - // These are valid because of alias specified - hasEntry("s.address", TEXT), - hasEntry("s.age", INTEGER), - hasEntry("s.balance", DOUBLE), - hasEntry("s.city", KEYWORD), - hasEntry("s.birthday", DATE), - hasEntry("s.location", GEO_POINT), - hasEntry("s.new_field", UNKNOWN), - hasEntry("s.field with spaces", TEXT), - hasEntry("s.employer", TEXT), - hasEntry("s.employer.keyword", KEYWORD), - hasEntry("s.projects", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), - hasEntry("s.projects.active", BOOLEAN), - hasEntry("s.projects.release", DATE), - hasEntry("s.projects.members", (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), - hasEntry("s.projects.members.name", TEXT), - hasEntry("s.manager", OBJECT), - hasEntry("s.manager.name", TEXT), - hasEntry("s.manager.name.keyword", KEYWORD), - hasEntry("s.manager.address", KEYWORD), - hasEntry("s.manager.salary", LONG) - ) - ); - } + @Test + public void contextShouldIncludeSameFieldsAfterVisitingNestedFieldWithoutAliasInFromClause() { + OpenSearchIndex indexType = new OpenSearchIndex("semantics", INDEX); + analyzer.visitIndexName("semantics"); + analyzer.visitAs("s", indexType); + analyzer.visitIndexName("s.projects"); + analyzer.visitAs("", new OpenSearchIndex("s.projects", NESTED_FIELD)); - @Test - public void contextShouldIncludeMoreFieldsPrefixedByNestedFieldAliasAfterVisitingNestedFieldWithAliasInFromClause() { - OpenSearchIndex indexType = new OpenSearchIndex("semantics", INDEX); - analyzer.visitIndexName("semantics"); - analyzer.visitAs("s", indexType); - analyzer.visitIndexName("s.projects"); - analyzer.visitAs("p", new OpenSearchIndex("s.projects", NESTED_FIELD)); + Map typeByName = context.peek().resolveAll(Namespace.FIELD_NAME); + assertThat( + typeByName, + allOf( + aMapWithSize(41), + hasEntry("semantics", (Type) indexType), + // These are also valid because alias is optional in SQL + hasEntry("address", TEXT), + hasEntry("age", INTEGER), + hasEntry("balance", DOUBLE), + hasEntry("city", KEYWORD), + hasEntry("birthday", DATE), + hasEntry("location", GEO_POINT), + hasEntry("new_field", UNKNOWN), + hasEntry("field with spaces", TEXT), + hasEntry("employer", TEXT), + hasEntry("employer.keyword", KEYWORD), + hasEntry("projects", (Type) new OpenSearchIndex("projects", NESTED_FIELD)), + hasEntry("projects.active", BOOLEAN), + hasEntry("projects.release", DATE), + hasEntry( + "projects.members", (Type) new OpenSearchIndex("projects.members", NESTED_FIELD)), + hasEntry("projects.members.name", TEXT), + hasEntry("manager", OBJECT), + hasEntry("manager.name", TEXT), + hasEntry("manager.name.keyword", KEYWORD), + hasEntry("manager.address", KEYWORD), + hasEntry("manager.salary", LONG), + // These are valid because of alias specified + hasEntry("s.address", TEXT), + hasEntry("s.age", INTEGER), + hasEntry("s.balance", DOUBLE), + hasEntry("s.city", KEYWORD), + hasEntry("s.birthday", DATE), + hasEntry("s.location", GEO_POINT), + hasEntry("s.new_field", UNKNOWN), + hasEntry("s.field with spaces", TEXT), + hasEntry("s.employer", TEXT), + hasEntry("s.employer.keyword", KEYWORD), + hasEntry("s.projects", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), + hasEntry("s.projects.active", BOOLEAN), + hasEntry("s.projects.release", DATE), + hasEntry( + "s.projects.members", + (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), + hasEntry("s.projects.members.name", TEXT), + hasEntry("s.manager", OBJECT), + hasEntry("s.manager.name", TEXT), + hasEntry("s.manager.name.keyword", KEYWORD), + hasEntry("s.manager.address", KEYWORD), + hasEntry("s.manager.salary", LONG))); + } - Map typeByName = context.peek().resolveAll(Namespace.FIELD_NAME); - assertThat( - typeByName, - allOf( - aMapWithSize(46), - // These are also valid because alias is optional in SQL - hasEntry("semantics", (Type) indexType), - // These are also valid because alias is optional in SQL - hasEntry("address", TEXT), - hasEntry("age", INTEGER), - hasEntry("balance", DOUBLE), - hasEntry("city", KEYWORD), - hasEntry("birthday", DATE), - hasEntry("location", GEO_POINT), - hasEntry("new_field", UNKNOWN), - hasEntry("field with spaces", TEXT), - hasEntry("employer", TEXT), - hasEntry("employer.keyword", KEYWORD), - hasEntry("projects", (Type) new OpenSearchIndex("projects", NESTED_FIELD)), - hasEntry("projects.active", BOOLEAN), - hasEntry("projects.release", DATE), - hasEntry("projects.members", (Type) new OpenSearchIndex("projects.members", NESTED_FIELD)), - hasEntry("projects.members.name", TEXT), - hasEntry("manager", OBJECT), - hasEntry("manager.name", TEXT), - hasEntry("manager.name.keyword", KEYWORD), - hasEntry("manager.address", KEYWORD), - hasEntry("manager.salary", LONG), - // These are valid because of alias specified - hasEntry("s.address", TEXT), - hasEntry("s.age", INTEGER), - hasEntry("s.balance", DOUBLE), - hasEntry("s.city", KEYWORD), - hasEntry("s.birthday", DATE), - hasEntry("s.location", GEO_POINT), - hasEntry("s.new_field", UNKNOWN), - hasEntry("s.field with spaces", TEXT), - hasEntry("s.employer", TEXT), - hasEntry("s.employer.keyword", KEYWORD), - hasEntry("s.projects", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), - hasEntry("s.projects.active", BOOLEAN), - hasEntry("s.projects.release", DATE), - hasEntry("s.projects.members", (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), - hasEntry("s.projects.members.name", TEXT), - hasEntry("s.manager", OBJECT), - hasEntry("s.manager.name", TEXT), - hasEntry("s.manager.name.keyword", KEYWORD), - hasEntry("s.manager.address", KEYWORD), - hasEntry("s.manager.salary", LONG), - // Valid because of nested field alias specified - hasEntry("p", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), - hasEntry("p.active", BOOLEAN), - hasEntry("p.release", DATE), - hasEntry("p.members", (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), - hasEntry("p.members.name", TEXT) - ) - ); - } + @Test + public void + contextShouldIncludeMoreFieldsPrefixedByNestedFieldAliasAfterVisitingNestedFieldWithAliasInFromClause() { + OpenSearchIndex indexType = new OpenSearchIndex("semantics", INDEX); + analyzer.visitIndexName("semantics"); + analyzer.visitAs("s", indexType); + analyzer.visitIndexName("s.projects"); + analyzer.visitAs("p", new OpenSearchIndex("s.projects", NESTED_FIELD)); - @Test - public void contextShouldIncludeMoreFieldsPrefixedByNestedFieldAliasAfterVisitingDeepNestedFieldWithAliasInFromClause() { - OpenSearchIndex indexType = new OpenSearchIndex("semantics", INDEX); - analyzer.visitIndexName("semantics"); - analyzer.visitAs("s", indexType); - analyzer.visitIndexName("s.projects.members"); - analyzer.visitAs("m", new OpenSearchIndex("s.projects.members", NESTED_FIELD)); + Map typeByName = context.peek().resolveAll(Namespace.FIELD_NAME); + assertThat( + typeByName, + allOf( + aMapWithSize(46), + // These are also valid because alias is optional in SQL + hasEntry("semantics", (Type) indexType), + // These are also valid because alias is optional in SQL + hasEntry("address", TEXT), + hasEntry("age", INTEGER), + hasEntry("balance", DOUBLE), + hasEntry("city", KEYWORD), + hasEntry("birthday", DATE), + hasEntry("location", GEO_POINT), + hasEntry("new_field", UNKNOWN), + hasEntry("field with spaces", TEXT), + hasEntry("employer", TEXT), + hasEntry("employer.keyword", KEYWORD), + hasEntry("projects", (Type) new OpenSearchIndex("projects", NESTED_FIELD)), + hasEntry("projects.active", BOOLEAN), + hasEntry("projects.release", DATE), + hasEntry( + "projects.members", (Type) new OpenSearchIndex("projects.members", NESTED_FIELD)), + hasEntry("projects.members.name", TEXT), + hasEntry("manager", OBJECT), + hasEntry("manager.name", TEXT), + hasEntry("manager.name.keyword", KEYWORD), + hasEntry("manager.address", KEYWORD), + hasEntry("manager.salary", LONG), + // These are valid because of alias specified + hasEntry("s.address", TEXT), + hasEntry("s.age", INTEGER), + hasEntry("s.balance", DOUBLE), + hasEntry("s.city", KEYWORD), + hasEntry("s.birthday", DATE), + hasEntry("s.location", GEO_POINT), + hasEntry("s.new_field", UNKNOWN), + hasEntry("s.field with spaces", TEXT), + hasEntry("s.employer", TEXT), + hasEntry("s.employer.keyword", KEYWORD), + hasEntry("s.projects", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), + hasEntry("s.projects.active", BOOLEAN), + hasEntry("s.projects.release", DATE), + hasEntry( + "s.projects.members", + (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), + hasEntry("s.projects.members.name", TEXT), + hasEntry("s.manager", OBJECT), + hasEntry("s.manager.name", TEXT), + hasEntry("s.manager.name.keyword", KEYWORD), + hasEntry("s.manager.address", KEYWORD), + hasEntry("s.manager.salary", LONG), + // Valid because of nested field alias specified + hasEntry("p", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), + hasEntry("p.active", BOOLEAN), + hasEntry("p.release", DATE), + hasEntry("p.members", (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), + hasEntry("p.members.name", TEXT))); + } - Map typeByName = context.peek().resolveAll(Namespace.FIELD_NAME); + @Test + public void + contextShouldIncludeMoreFieldsPrefixedByNestedFieldAliasAfterVisitingDeepNestedFieldWithAliasInFromClause() { + OpenSearchIndex indexType = new OpenSearchIndex("semantics", INDEX); + analyzer.visitIndexName("semantics"); + analyzer.visitAs("s", indexType); + analyzer.visitIndexName("s.projects.members"); + analyzer.visitAs("m", new OpenSearchIndex("s.projects.members", NESTED_FIELD)); - assertThat( - typeByName, - allOf( - aMapWithSize(43), - hasEntry("semantics", (Type) indexType), - // These are also valid because alias is optional in SQL - hasEntry("address", TEXT), - hasEntry("age", INTEGER), - hasEntry("balance", DOUBLE), - hasEntry("city", KEYWORD), - hasEntry("birthday", DATE), - hasEntry("location", GEO_POINT), - hasEntry("new_field", UNKNOWN), - hasEntry("field with spaces", TEXT), - hasEntry("employer", TEXT), - hasEntry("employer.keyword", KEYWORD), - hasEntry("projects", (Type) new OpenSearchIndex("projects", NESTED_FIELD)), - hasEntry("projects.active", BOOLEAN), - hasEntry("projects.release", DATE), - hasEntry("projects.members", (Type) new OpenSearchIndex("projects.members", NESTED_FIELD)), - hasEntry("projects.members.name", TEXT), - hasEntry("manager", OBJECT), - hasEntry("manager.name", TEXT), - hasEntry("manager.name.keyword", KEYWORD), - hasEntry("manager.address", KEYWORD), - hasEntry("manager.salary", LONG), - // These are valid because of alias specified - hasEntry("s.address", TEXT), - hasEntry("s.age", INTEGER), - hasEntry("s.balance", DOUBLE), - hasEntry("s.city", KEYWORD), - hasEntry("s.birthday", DATE), - hasEntry("s.location", GEO_POINT), - hasEntry("s.new_field", UNKNOWN), - hasEntry("s.field with spaces", TEXT), - hasEntry("s.employer", TEXT), - hasEntry("s.employer.keyword", KEYWORD), - hasEntry("s.projects", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), - hasEntry("s.projects.active", BOOLEAN), - hasEntry("s.projects.release", DATE), - hasEntry("s.projects.members", (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), - hasEntry("s.projects.members.name", TEXT), - hasEntry("s.manager", OBJECT), - hasEntry("s.manager.name", TEXT), - hasEntry("s.manager.name.keyword", KEYWORD), - hasEntry("s.manager.address", KEYWORD), - hasEntry("s.manager.salary", LONG), - // Valid because of deep nested field alias specified - hasEntry("m", (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), - hasEntry("m.name", TEXT) - ) - ); - } + Map typeByName = context.peek().resolveAll(Namespace.FIELD_NAME); - @Test - public void contextShouldIncludeMoreFieldsPrefixedByNestedFieldAliasAfterVisitingAllNestedFieldsWithAliasInFromClause() { - OpenSearchIndex indexType = new OpenSearchIndex("semantics", INDEX); - analyzer.visitIndexName("semantics"); - analyzer.visitAs("s", indexType); - analyzer.visitIndexName("s.projects"); - analyzer.visitAs("p", new OpenSearchIndex("s.projects", NESTED_FIELD)); - analyzer.visitIndexName("s.projects.members"); - analyzer.visitAs("m", new OpenSearchIndex("s.projects.members", NESTED_FIELD)); + assertThat( + typeByName, + allOf( + aMapWithSize(43), + hasEntry("semantics", (Type) indexType), + // These are also valid because alias is optional in SQL + hasEntry("address", TEXT), + hasEntry("age", INTEGER), + hasEntry("balance", DOUBLE), + hasEntry("city", KEYWORD), + hasEntry("birthday", DATE), + hasEntry("location", GEO_POINT), + hasEntry("new_field", UNKNOWN), + hasEntry("field with spaces", TEXT), + hasEntry("employer", TEXT), + hasEntry("employer.keyword", KEYWORD), + hasEntry("projects", (Type) new OpenSearchIndex("projects", NESTED_FIELD)), + hasEntry("projects.active", BOOLEAN), + hasEntry("projects.release", DATE), + hasEntry( + "projects.members", (Type) new OpenSearchIndex("projects.members", NESTED_FIELD)), + hasEntry("projects.members.name", TEXT), + hasEntry("manager", OBJECT), + hasEntry("manager.name", TEXT), + hasEntry("manager.name.keyword", KEYWORD), + hasEntry("manager.address", KEYWORD), + hasEntry("manager.salary", LONG), + // These are valid because of alias specified + hasEntry("s.address", TEXT), + hasEntry("s.age", INTEGER), + hasEntry("s.balance", DOUBLE), + hasEntry("s.city", KEYWORD), + hasEntry("s.birthday", DATE), + hasEntry("s.location", GEO_POINT), + hasEntry("s.new_field", UNKNOWN), + hasEntry("s.field with spaces", TEXT), + hasEntry("s.employer", TEXT), + hasEntry("s.employer.keyword", KEYWORD), + hasEntry("s.projects", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), + hasEntry("s.projects.active", BOOLEAN), + hasEntry("s.projects.release", DATE), + hasEntry( + "s.projects.members", + (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), + hasEntry("s.projects.members.name", TEXT), + hasEntry("s.manager", OBJECT), + hasEntry("s.manager.name", TEXT), + hasEntry("s.manager.name.keyword", KEYWORD), + hasEntry("s.manager.address", KEYWORD), + hasEntry("s.manager.salary", LONG), + // Valid because of deep nested field alias specified + hasEntry("m", (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), + hasEntry("m.name", TEXT))); + } - Map typeByName = context.peek().resolveAll(Namespace.FIELD_NAME); - assertThat( - typeByName, - allOf( - aMapWithSize(48), - hasEntry("semantics", (Type) indexType), - // These are also valid because alias is optional in SQL - hasEntry("address", TEXT), - hasEntry("age", INTEGER), - hasEntry("balance", DOUBLE), - hasEntry("city", KEYWORD), - hasEntry("birthday", DATE), - hasEntry("location", GEO_POINT), - hasEntry("new_field", UNKNOWN), - hasEntry("field with spaces", TEXT), - hasEntry("employer", TEXT), - hasEntry("employer.keyword", KEYWORD), - hasEntry("projects", (Type) new OpenSearchIndex("projects", NESTED_FIELD)), - hasEntry("projects.active", BOOLEAN), - hasEntry("projects.release", DATE), - hasEntry("projects.members", (Type) new OpenSearchIndex("projects.members", NESTED_FIELD)), - hasEntry("projects.members.name", TEXT), - hasEntry("manager", OBJECT), - hasEntry("manager.name", TEXT), - hasEntry("manager.name.keyword", KEYWORD), - hasEntry("manager.address", KEYWORD), - hasEntry("manager.salary", LONG), - // These are valid because of alias specified - hasEntry("s.address", TEXT), - hasEntry("s.age", INTEGER), - hasEntry("s.balance", DOUBLE), - hasEntry("s.city", KEYWORD), - hasEntry("s.birthday", DATE), - hasEntry("s.location", GEO_POINT), - hasEntry("s.new_field", UNKNOWN), - hasEntry("s.field with spaces", TEXT), - hasEntry("s.employer", TEXT), - hasEntry("s.employer.keyword", KEYWORD), - hasEntry("s.projects", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), - hasEntry("s.projects.active", BOOLEAN), - hasEntry("s.projects.release", DATE), - hasEntry("s.projects.members", (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), - hasEntry("s.projects.members.name", TEXT), - hasEntry("s.manager", OBJECT), - hasEntry("s.manager.name", TEXT), - hasEntry("s.manager.name.keyword", KEYWORD), - hasEntry("s.manager.address", KEYWORD), - hasEntry("s.manager.salary", LONG), - // Valid because of nested field alias specified - hasEntry("p", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), - hasEntry("p.active", BOOLEAN), - hasEntry("p.release", DATE), - hasEntry("p.members", (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), - hasEntry("p.members.name", TEXT), - // Valid because of deep nested field alias specified - hasEntry("m", (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), - hasEntry("m.name", TEXT) - ) - ); - } + @Test + public void + contextShouldIncludeMoreFieldsPrefixedByNestedFieldAliasAfterVisitingAllNestedFieldsWithAliasInFromClause() { + OpenSearchIndex indexType = new OpenSearchIndex("semantics", INDEX); + analyzer.visitIndexName("semantics"); + analyzer.visitAs("s", indexType); + analyzer.visitIndexName("s.projects"); + analyzer.visitAs("p", new OpenSearchIndex("s.projects", NESTED_FIELD)); + analyzer.visitIndexName("s.projects.members"); + analyzer.visitAs("m", new OpenSearchIndex("s.projects.members", NESTED_FIELD)); - @Test - public void contextShouldIncludeMoreFieldsPrefixedByNestedFieldAliasAfterVisitingNestedFieldWithAliasInSubqueryFromClause() { - OpenSearchIndex indexType = new OpenSearchIndex("semantics", INDEX); - analyzer.visitIndexName("semantics"); - analyzer.visitAs("s", indexType); + Map typeByName = context.peek().resolveAll(Namespace.FIELD_NAME); + assertThat( + typeByName, + allOf( + aMapWithSize(48), + hasEntry("semantics", (Type) indexType), + // These are also valid because alias is optional in SQL + hasEntry("address", TEXT), + hasEntry("age", INTEGER), + hasEntry("balance", DOUBLE), + hasEntry("city", KEYWORD), + hasEntry("birthday", DATE), + hasEntry("location", GEO_POINT), + hasEntry("new_field", UNKNOWN), + hasEntry("field with spaces", TEXT), + hasEntry("employer", TEXT), + hasEntry("employer.keyword", KEYWORD), + hasEntry("projects", (Type) new OpenSearchIndex("projects", NESTED_FIELD)), + hasEntry("projects.active", BOOLEAN), + hasEntry("projects.release", DATE), + hasEntry( + "projects.members", (Type) new OpenSearchIndex("projects.members", NESTED_FIELD)), + hasEntry("projects.members.name", TEXT), + hasEntry("manager", OBJECT), + hasEntry("manager.name", TEXT), + hasEntry("manager.name.keyword", KEYWORD), + hasEntry("manager.address", KEYWORD), + hasEntry("manager.salary", LONG), + // These are valid because of alias specified + hasEntry("s.address", TEXT), + hasEntry("s.age", INTEGER), + hasEntry("s.balance", DOUBLE), + hasEntry("s.city", KEYWORD), + hasEntry("s.birthday", DATE), + hasEntry("s.location", GEO_POINT), + hasEntry("s.new_field", UNKNOWN), + hasEntry("s.field with spaces", TEXT), + hasEntry("s.employer", TEXT), + hasEntry("s.employer.keyword", KEYWORD), + hasEntry("s.projects", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), + hasEntry("s.projects.active", BOOLEAN), + hasEntry("s.projects.release", DATE), + hasEntry( + "s.projects.members", + (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), + hasEntry("s.projects.members.name", TEXT), + hasEntry("s.manager", OBJECT), + hasEntry("s.manager.name", TEXT), + hasEntry("s.manager.name.keyword", KEYWORD), + hasEntry("s.manager.address", KEYWORD), + hasEntry("s.manager.salary", LONG), + // Valid because of nested field alias specified + hasEntry("p", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), + hasEntry("p.active", BOOLEAN), + hasEntry("p.release", DATE), + hasEntry("p.members", (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), + hasEntry("p.members.name", TEXT), + // Valid because of deep nested field alias specified + hasEntry("m", (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), + hasEntry("m.name", TEXT))); + } - context.push(); - analyzer.visitIndexName("s.projects"); - analyzer.visitAs("p", new OpenSearchIndex("s.projects", NESTED_FIELD)); + @Test + public void + contextShouldIncludeMoreFieldsPrefixedByNestedFieldAliasAfterVisitingNestedFieldWithAliasInSubqueryFromClause() { + OpenSearchIndex indexType = new OpenSearchIndex("semantics", INDEX); + analyzer.visitIndexName("semantics"); + analyzer.visitAs("s", indexType); - Map typeByName = context.peek().resolveAll(Namespace.FIELD_NAME); - assertThat( - typeByName, - allOf( - aMapWithSize(46), - // These are also valid because alias is optional in SQL - hasEntry("semantics", (Type) indexType), - // These are also valid because alias is optional in SQL - hasEntry("address", TEXT), - hasEntry("age", INTEGER), - hasEntry("balance", DOUBLE), - hasEntry("city", KEYWORD), - hasEntry("birthday", DATE), - hasEntry("location", GEO_POINT), - hasEntry("new_field", UNKNOWN), - hasEntry("field with spaces", TEXT), - hasEntry("employer", TEXT), - hasEntry("employer.keyword", KEYWORD), - hasEntry("projects", (Type) new OpenSearchIndex("projects", NESTED_FIELD)), - hasEntry("projects.active", BOOLEAN), - hasEntry("projects.release", DATE), - hasEntry("projects.members", (Type) new OpenSearchIndex("projects.members", NESTED_FIELD)), - hasEntry("projects.members.name", TEXT), - hasEntry("manager", OBJECT), - hasEntry("manager.name", TEXT), - hasEntry("manager.name.keyword", KEYWORD), - hasEntry("manager.address", KEYWORD), - hasEntry("manager.salary", LONG), - // These are valid because of alias specified - hasEntry("s.address", TEXT), - hasEntry("s.age", INTEGER), - hasEntry("s.balance", DOUBLE), - hasEntry("s.city", KEYWORD), - hasEntry("s.birthday", DATE), - hasEntry("s.location", GEO_POINT), - hasEntry("s.new_field", UNKNOWN), - hasEntry("s.field with spaces", TEXT), - hasEntry("s.employer", TEXT), - hasEntry("s.employer.keyword", KEYWORD), - hasEntry("s.projects", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), - hasEntry("s.projects.active", BOOLEAN), - hasEntry("s.projects.release", DATE), - hasEntry("s.projects.members", (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), - hasEntry("s.projects.members.name", TEXT), - hasEntry("s.manager", OBJECT), - hasEntry("s.manager.name", TEXT), - hasEntry("s.manager.name.keyword", KEYWORD), - hasEntry("s.manager.address", KEYWORD), - hasEntry("s.manager.salary", LONG), - // Valid because of nested field alias specified - hasEntry("p", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), - hasEntry("p.active", BOOLEAN), - hasEntry("p.release", DATE), - hasEntry("p.members", (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), - hasEntry("p.members.name", TEXT) - ) - ); + context.push(); + analyzer.visitIndexName("s.projects"); + analyzer.visitAs("p", new OpenSearchIndex("s.projects", NESTED_FIELD)); - context.pop(); - typeByName = context.peek().resolveAll(Namespace.FIELD_NAME); - assertThat( - typeByName, - allOf( - aMapWithSize(41), - hasEntry("semantics", (Type) indexType), - // These are also valid because alias is optional in SQL - hasEntry("address", TEXT), - hasEntry("age", INTEGER), - hasEntry("balance", DOUBLE), - hasEntry("city", KEYWORD), - hasEntry("birthday", DATE), - hasEntry("location", GEO_POINT), - hasEntry("new_field", UNKNOWN), - hasEntry("field with spaces", TEXT), - hasEntry("employer", TEXT), - hasEntry("employer.keyword", KEYWORD), - hasEntry("projects", (Type) new OpenSearchIndex("projects", NESTED_FIELD)), - hasEntry("projects.active", BOOLEAN), - hasEntry("projects.release", DATE), - hasEntry("projects.members", (Type) new OpenSearchIndex("projects.members", NESTED_FIELD)), - hasEntry("projects.members.name", TEXT), - hasEntry("manager", OBJECT), - hasEntry("manager.name", TEXT), - hasEntry("manager.name.keyword", KEYWORD), - hasEntry("manager.address", KEYWORD), - hasEntry("manager.salary", LONG), - // These are valid because of alias specified - hasEntry("s.address", TEXT), - hasEntry("s.age", INTEGER), - hasEntry("s.balance", DOUBLE), - hasEntry("s.city", KEYWORD), - hasEntry("s.birthday", DATE), - hasEntry("s.location", GEO_POINT), - hasEntry("s.new_field", UNKNOWN), - hasEntry("s.field with spaces", TEXT), - hasEntry("s.employer", TEXT), - hasEntry("s.employer.keyword", KEYWORD), - hasEntry("s.projects", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), - hasEntry("s.projects.active", BOOLEAN), - hasEntry("s.projects.release", DATE), - hasEntry("s.projects.members", (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), - hasEntry("s.projects.members.name", TEXT), - hasEntry("s.manager", OBJECT), - hasEntry("s.manager.name", TEXT), - hasEntry("s.manager.name.keyword", KEYWORD), - hasEntry("s.manager.address", KEYWORD), - hasEntry("s.manager.salary", LONG) - ) - ); - } + Map typeByName = context.peek().resolveAll(Namespace.FIELD_NAME); + assertThat( + typeByName, + allOf( + aMapWithSize(46), + // These are also valid because alias is optional in SQL + hasEntry("semantics", (Type) indexType), + // These are also valid because alias is optional in SQL + hasEntry("address", TEXT), + hasEntry("age", INTEGER), + hasEntry("balance", DOUBLE), + hasEntry("city", KEYWORD), + hasEntry("birthday", DATE), + hasEntry("location", GEO_POINT), + hasEntry("new_field", UNKNOWN), + hasEntry("field with spaces", TEXT), + hasEntry("employer", TEXT), + hasEntry("employer.keyword", KEYWORD), + hasEntry("projects", (Type) new OpenSearchIndex("projects", NESTED_FIELD)), + hasEntry("projects.active", BOOLEAN), + hasEntry("projects.release", DATE), + hasEntry( + "projects.members", (Type) new OpenSearchIndex("projects.members", NESTED_FIELD)), + hasEntry("projects.members.name", TEXT), + hasEntry("manager", OBJECT), + hasEntry("manager.name", TEXT), + hasEntry("manager.name.keyword", KEYWORD), + hasEntry("manager.address", KEYWORD), + hasEntry("manager.salary", LONG), + // These are valid because of alias specified + hasEntry("s.address", TEXT), + hasEntry("s.age", INTEGER), + hasEntry("s.balance", DOUBLE), + hasEntry("s.city", KEYWORD), + hasEntry("s.birthday", DATE), + hasEntry("s.location", GEO_POINT), + hasEntry("s.new_field", UNKNOWN), + hasEntry("s.field with spaces", TEXT), + hasEntry("s.employer", TEXT), + hasEntry("s.employer.keyword", KEYWORD), + hasEntry("s.projects", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), + hasEntry("s.projects.active", BOOLEAN), + hasEntry("s.projects.release", DATE), + hasEntry( + "s.projects.members", + (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), + hasEntry("s.projects.members.name", TEXT), + hasEntry("s.manager", OBJECT), + hasEntry("s.manager.name", TEXT), + hasEntry("s.manager.name.keyword", KEYWORD), + hasEntry("s.manager.address", KEYWORD), + hasEntry("s.manager.salary", LONG), + // Valid because of nested field alias specified + hasEntry("p", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), + hasEntry("p.active", BOOLEAN), + hasEntry("p.release", DATE), + hasEntry("p.members", (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), + hasEntry("p.members.name", TEXT))); - @Test - public void fieldWithUnknownEsTypeShouldPass() { - analyzer.visitIndexName("semantics"); - Optional type = context.peek().resolve(new Symbol(Namespace.FIELD_NAME, "new_field")); - Assert.assertTrue(type.isPresent()); - Assert.assertSame(UNKNOWN, type.get()); - } + context.pop(); + typeByName = context.peek().resolveAll(Namespace.FIELD_NAME); + assertThat( + typeByName, + allOf( + aMapWithSize(41), + hasEntry("semantics", (Type) indexType), + // These are also valid because alias is optional in SQL + hasEntry("address", TEXT), + hasEntry("age", INTEGER), + hasEntry("balance", DOUBLE), + hasEntry("city", KEYWORD), + hasEntry("birthday", DATE), + hasEntry("location", GEO_POINT), + hasEntry("new_field", UNKNOWN), + hasEntry("field with spaces", TEXT), + hasEntry("employer", TEXT), + hasEntry("employer.keyword", KEYWORD), + hasEntry("projects", (Type) new OpenSearchIndex("projects", NESTED_FIELD)), + hasEntry("projects.active", BOOLEAN), + hasEntry("projects.release", DATE), + hasEntry( + "projects.members", (Type) new OpenSearchIndex("projects.members", NESTED_FIELD)), + hasEntry("projects.members.name", TEXT), + hasEntry("manager", OBJECT), + hasEntry("manager.name", TEXT), + hasEntry("manager.name.keyword", KEYWORD), + hasEntry("manager.address", KEYWORD), + hasEntry("manager.salary", LONG), + // These are valid because of alias specified + hasEntry("s.address", TEXT), + hasEntry("s.age", INTEGER), + hasEntry("s.balance", DOUBLE), + hasEntry("s.city", KEYWORD), + hasEntry("s.birthday", DATE), + hasEntry("s.location", GEO_POINT), + hasEntry("s.new_field", UNKNOWN), + hasEntry("s.field with spaces", TEXT), + hasEntry("s.employer", TEXT), + hasEntry("s.employer.keyword", KEYWORD), + hasEntry("s.projects", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), + hasEntry("s.projects.active", BOOLEAN), + hasEntry("s.projects.release", DATE), + hasEntry( + "s.projects.members", + (Type) new OpenSearchIndex("s.projects.members", NESTED_FIELD)), + hasEntry("s.projects.members.name", TEXT), + hasEntry("s.manager", OBJECT), + hasEntry("s.manager.name", TEXT), + hasEntry("s.manager.name.keyword", KEYWORD), + hasEntry("s.manager.address", KEYWORD), + hasEntry("s.manager.salary", LONG))); + } - @Test - public void fieldWithSpacesInNameShouldPass() { - analyzer.visitIndexName("semantics"); - Optional type = context.peek().resolve(new Symbol(Namespace.FIELD_NAME, "field with spaces")); - Assert.assertTrue(type.isPresent()); - Assert.assertSame(TEXT, type.get()); - } + @Test + public void fieldWithUnknownEsTypeShouldPass() { + analyzer.visitIndexName("semantics"); + Optional type = context.peek().resolve(new Symbol(Namespace.FIELD_NAME, "new_field")); + Assert.assertTrue(type.isPresent()); + Assert.assertSame(UNKNOWN, type.get()); + } + @Test + public void fieldWithSpacesInNameShouldPass() { + analyzer.visitIndexName("semantics"); + Optional type = + context.peek().resolve(new Symbol(Namespace.FIELD_NAME, "field with spaces")); + Assert.assertTrue(type.isPresent()); + Assert.assertSame(TEXT, type.get()); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerConfigTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerConfigTest.java index 18253bd71f..2b9a5e418c 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerConfigTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerConfigTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic; import static org.hamcrest.Matchers.allOf; @@ -17,54 +16,47 @@ import org.opensearch.sql.legacy.antlr.SqlAnalysisConfig; import org.opensearch.sql.legacy.esdomain.LocalClusterState; -/** - * Test cases for semantic analysis configuration - */ +/** Test cases for semantic analysis configuration */ public class SemanticAnalyzerConfigTest extends SemanticAnalyzerTestBase { - @Rule - public ExpectedException exceptionWithoutSuggestion = ExpectedException.none(); - - @Test - public void noAnalysisShouldPerformForNonSelectStatement() { - String sql = "DELETE FROM semantics WHERE age12 = 123"; - expectValidationPassWithConfig(sql, new SqlAnalysisConfig(true, true, 1000)); - } - - @Test - public void noAnalysisShouldPerformIfDisabledAnalysis() { - String sql = "SELECT * FROM semantics WHERE age12 = 123"; - expectValidationFailWithErrorMessages(sql, "Field [age12] cannot be found or used here."); - expectValidationPassWithConfig(sql, new SqlAnalysisConfig(false, true, 1000)); - } - - @Test - public void noFieldNameSuggestionIfDisabledSuggestion() { - String sql = "SELECT * FROM semantics WHERE age12 = 123"; - expectValidationFailWithErrorMessages(sql, - "Field [age12] cannot be found or used here.", - "Did you mean [age]?"); - - exceptionWithoutSuggestion.expect(SemanticAnalysisException.class); - exceptionWithoutSuggestion.expectMessage( - allOf( - containsString("Field [age12] cannot be found or used here"), - not(containsString("Did you mean")) - ) - ); - new OpenSearchLegacySqlAnalyzer(new SqlAnalysisConfig(true, false, 1000)). - analyze(sql, LocalClusterState.state()); - } - - @Test - public void noAnalysisShouldPerformIfIndexMappingIsLargerThanThreshold() { - String sql = "SELECT * FROM semantics WHERE test = 123"; - expectValidationFailWithErrorMessages(sql, "Field [test] cannot be found or used here."); - expectValidationPassWithConfig(sql, new SqlAnalysisConfig(true, true, 1)); - } - - private void expectValidationPassWithConfig(String sql, SqlAnalysisConfig config) { - new OpenSearchLegacySqlAnalyzer(config).analyze(sql, LocalClusterState.state()); - } - + @Rule public ExpectedException exceptionWithoutSuggestion = ExpectedException.none(); + + @Test + public void noAnalysisShouldPerformForNonSelectStatement() { + String sql = "DELETE FROM semantics WHERE age12 = 123"; + expectValidationPassWithConfig(sql, new SqlAnalysisConfig(true, true, 1000)); + } + + @Test + public void noAnalysisShouldPerformIfDisabledAnalysis() { + String sql = "SELECT * FROM semantics WHERE age12 = 123"; + expectValidationFailWithErrorMessages(sql, "Field [age12] cannot be found or used here."); + expectValidationPassWithConfig(sql, new SqlAnalysisConfig(false, true, 1000)); + } + + @Test + public void noFieldNameSuggestionIfDisabledSuggestion() { + String sql = "SELECT * FROM semantics WHERE age12 = 123"; + expectValidationFailWithErrorMessages( + sql, "Field [age12] cannot be found or used here.", "Did you mean [age]?"); + + exceptionWithoutSuggestion.expect(SemanticAnalysisException.class); + exceptionWithoutSuggestion.expectMessage( + allOf( + containsString("Field [age12] cannot be found or used here"), + not(containsString("Did you mean")))); + new OpenSearchLegacySqlAnalyzer(new SqlAnalysisConfig(true, false, 1000)) + .analyze(sql, LocalClusterState.state()); + } + + @Test + public void noAnalysisShouldPerformIfIndexMappingIsLargerThanThreshold() { + String sql = "SELECT * FROM semantics WHERE test = 123"; + expectValidationFailWithErrorMessages(sql, "Field [test] cannot be found or used here."); + expectValidationPassWithConfig(sql, new SqlAnalysisConfig(true, true, 1)); + } + + private void expectValidationPassWithConfig(String sql, SqlAnalysisConfig config) { + new OpenSearchLegacySqlAnalyzer(config).analyze(sql, LocalClusterState.state()); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerConstantTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerConstantTest.java index 5ff8875f0c..48d9b6e36c 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerConstantTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerConstantTest.java @@ -3,21 +3,19 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic; import org.junit.Test; public class SemanticAnalyzerConstantTest extends SemanticAnalyzerTestBase { - @Test - public void useNegativeIntegerShouldPass() { - validate("SELECT * FROM test WHERE age > -1"); - } - - @Test - public void useNegativeFloatingPointNumberShouldPass() { - validate("SELECT * FROM test WHERE balance > -1.23456"); - } + @Test + public void useNegativeIntegerShouldPass() { + validate("SELECT * FROM test WHERE age > -1"); + } + @Test + public void useNegativeFloatingPointNumberShouldPass() { + validate("SELECT * FROM test WHERE balance > -1.23456"); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerESScalarFunctionTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerESScalarFunctionTest.java index 32c322f8c2..c16ecc33e3 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerESScalarFunctionTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerESScalarFunctionTest.java @@ -3,54 +3,50 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic; import org.junit.Test; -/** - * Semantic analysis test for Elaticsearch special scalar functions - */ +/** Semantic analysis test for Elaticsearch special scalar functions */ public class SemanticAnalyzerESScalarFunctionTest extends SemanticAnalyzerTestBase { - @Test - public void dateFunctionCallWithDateInSelectClauseShouldPass() { - validate("SELECT DAY_OF_MONTH(birthday) FROM semantics"); - validate("SELECT DAY_OF_WEEK(birthday) FROM semantics"); - validate("SELECT DAY_OF_YEAR(birthday) FROM semantics"); - validate("SELECT MINUTE_OF_DAY(birthday) FROM semantics"); - validate("SELECT MINUTE_OF_HOUR(birthday) FROM semantics"); - validate("SELECT MONTH_OF_YEAR(birthday) FROM semantics"); - validate("SELECT WEEK_OF_YEAR(birthday) FROM semantics"); - } - - @Test - public void dateFunctionCallWithDateInWhereClauseShouldPass() { - validate("SELECT * FROM semantics WHERE DAY_OF_MONTH(birthday) = 1"); - validate("SELECT * FROM semantics WHERE DAY_OF_WEEK(birthday) = 1"); - validate("SELECT * FROM semantics WHERE DAY_OF_YEAR(birthday) = 1"); - validate("SELECT * FROM semantics WHERE MINUTE_OF_DAY(birthday) = 1"); - validate("SELECT * FROM semantics WHERE MINUTE_OF_HOUR(birthday) = 1"); - validate("SELECT * FROM semantics WHERE MONTH_OF_YEAR(birthday) = 1"); - validate("SELECT * FROM semantics WHERE WEEK_OF_YEAR(birthday) = 1"); - } - - @Test - public void geoFunctionCallWithGeoPointInWhereClauseShouldPass() { - validate("SELECT * FROM semantics WHERE GEO_BOUNDING_BOX(location, 100.0, 1.0, 101, 0.0)"); - validate("SELECT * FROM semantics WHERE GEO_DISTANCE(location, '1km', 100.5, 0.500001)"); - validate("SELECT * FROM semantics WHERE GEO_DISTANCE_RANGE(location, '1km', 100.5, 0.500001)"); - } - - @Test - public void fullTextMatchFunctionCallWithStringInWhereClauseShouldPass() { - validate("SELECT * FROM semantics WHERE MATCH_PHRASE(address, 'Seattle')"); - validate("SELECT * FROM semantics WHERE MATCHPHRASE(employer, 'Seattle')"); - validate("SELECT * FROM semantics WHERE MATCH_QUERY(manager.name, 'Seattle')"); - validate("SELECT * FROM semantics WHERE MATCHQUERY(manager.name, 'Seattle')"); - validate("SELECT * FROM semantics WHERE QUERY('Seattle')"); - validate("SELECT * FROM semantics WHERE WILDCARD_QUERY(manager.name, 'Sea*')"); - validate("SELECT * FROM semantics WHERE WILDCARDQUERY(manager.name, 'Sea*')"); - } - + @Test + public void dateFunctionCallWithDateInSelectClauseShouldPass() { + validate("SELECT DAY_OF_MONTH(birthday) FROM semantics"); + validate("SELECT DAY_OF_WEEK(birthday) FROM semantics"); + validate("SELECT DAY_OF_YEAR(birthday) FROM semantics"); + validate("SELECT MINUTE_OF_DAY(birthday) FROM semantics"); + validate("SELECT MINUTE_OF_HOUR(birthday) FROM semantics"); + validate("SELECT MONTH_OF_YEAR(birthday) FROM semantics"); + validate("SELECT WEEK_OF_YEAR(birthday) FROM semantics"); + } + + @Test + public void dateFunctionCallWithDateInWhereClauseShouldPass() { + validate("SELECT * FROM semantics WHERE DAY_OF_MONTH(birthday) = 1"); + validate("SELECT * FROM semantics WHERE DAY_OF_WEEK(birthday) = 1"); + validate("SELECT * FROM semantics WHERE DAY_OF_YEAR(birthday) = 1"); + validate("SELECT * FROM semantics WHERE MINUTE_OF_DAY(birthday) = 1"); + validate("SELECT * FROM semantics WHERE MINUTE_OF_HOUR(birthday) = 1"); + validate("SELECT * FROM semantics WHERE MONTH_OF_YEAR(birthday) = 1"); + validate("SELECT * FROM semantics WHERE WEEK_OF_YEAR(birthday) = 1"); + } + + @Test + public void geoFunctionCallWithGeoPointInWhereClauseShouldPass() { + validate("SELECT * FROM semantics WHERE GEO_BOUNDING_BOX(location, 100.0, 1.0, 101, 0.0)"); + validate("SELECT * FROM semantics WHERE GEO_DISTANCE(location, '1km', 100.5, 0.500001)"); + validate("SELECT * FROM semantics WHERE GEO_DISTANCE_RANGE(location, '1km', 100.5, 0.500001)"); + } + + @Test + public void fullTextMatchFunctionCallWithStringInWhereClauseShouldPass() { + validate("SELECT * FROM semantics WHERE MATCH_PHRASE(address, 'Seattle')"); + validate("SELECT * FROM semantics WHERE MATCHPHRASE(employer, 'Seattle')"); + validate("SELECT * FROM semantics WHERE MATCH_QUERY(manager.name, 'Seattle')"); + validate("SELECT * FROM semantics WHERE MATCHQUERY(manager.name, 'Seattle')"); + validate("SELECT * FROM semantics WHERE QUERY('Seattle')"); + validate("SELECT * FROM semantics WHERE WILDCARD_QUERY(manager.name, 'Sea*')"); + validate("SELECT * FROM semantics WHERE WILDCARDQUERY(manager.name, 'Sea*')"); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerFieldTypeTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerFieldTypeTest.java index 3e4d3e6eb5..1b9b0dde45 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerFieldTypeTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerFieldTypeTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic; import static org.opensearch.sql.legacy.util.MultipleIndexClusterUtils.mockMultipleIndexEnv; @@ -12,87 +11,69 @@ import org.junit.Test; public class SemanticAnalyzerFieldTypeTest extends SemanticAnalyzerTestBase { - @Before - public void setup() { - mockMultipleIndexEnv(); - } + @Before + public void setup() { + mockMultipleIndexEnv(); + } - /** - * id has same type in account1 and account2. - */ - @Test - public void accessFieldTypeNotInQueryPassSemanticCheck() { - validate("SELECT id FROM account* WHERE id = 1"); - } + /** id has same type in account1 and account2. */ + @Test + public void accessFieldTypeNotInQueryPassSemanticCheck() { + validate("SELECT id FROM account* WHERE id = 1"); + } - /** - * address doesn't exist in account1. - */ - @Test - public void accessFieldTypeOnlyInOneIndexPassSemanticCheck() { - validate("SELECT address FROM account* WHERE id = 30"); - } + /** address doesn't exist in account1. */ + @Test + public void accessFieldTypeOnlyInOneIndexPassSemanticCheck() { + validate("SELECT address FROM account* WHERE id = 30"); + } - /** - * age has different type in account1 and account2. - */ - @Test - public void accessConflictFieldTypeShouldFailSemanticCheck() { - expectValidationFailWithErrorMessages("SELECT age FROM account* WHERE age = 30", - "Field [age] have conflict type"); - } + /** age has different type in account1 and account2. */ + @Test + public void accessConflictFieldTypeShouldFailSemanticCheck() { + expectValidationFailWithErrorMessages( + "SELECT age FROM account* WHERE age = 30", "Field [age] have conflict type"); + } - /** - * age has different type in account1 and account2. - */ - @Test - public void mixNonConflictTypeAndConflictFieldTypeShouldFailSemanticCheck() { - expectValidationFailWithErrorMessages("SELECT id, age FROM account* WHERE id = 1", - "Field [age] have conflict type"); - } + /** age has different type in account1 and account2. */ + @Test + public void mixNonConflictTypeAndConflictFieldTypeShouldFailSemanticCheck() { + expectValidationFailWithErrorMessages( + "SELECT id, age FROM account* WHERE id = 1", "Field [age] have conflict type"); + } - /** - * age has different type in account1 and account2. - */ - @Test - public void conflictFieldTypeWithAliasShouldFailSemanticCheck() { - expectValidationFailWithErrorMessages("SELECT a.age FROM account* as a", - "Field [a.age] have conflict type"); - } + /** age has different type in account1 and account2. */ + @Test + public void conflictFieldTypeWithAliasShouldFailSemanticCheck() { + expectValidationFailWithErrorMessages( + "SELECT a.age FROM account* as a", "Field [a.age] have conflict type"); + } - /** - * age has different type in account1 and account2. - * Todo, the error message is not accurate. - */ - @Test - public void selectAllFieldTypeShouldFailSemanticCheck() { - expectValidationFailWithErrorMessages("SELECT * FROM account*", - "Field [account*.age] have conflict type"); - } + /** age has different type in account1 and account2. Todo, the error message is not accurate. */ + @Test + public void selectAllFieldTypeShouldFailSemanticCheck() { + expectValidationFailWithErrorMessages( + "SELECT * FROM account*", "Field [account*.age] have conflict type"); + } - /** - * age has different type in account1 and account2. - */ - @Test - public void selectAllFieldTypeWithAliasShouldFailSemanticCheck() { - expectValidationFailWithErrorMessages("SELECT a.* FROM account* as a", - "Field [a.age] have conflict type"); - } + /** age has different type in account1 and account2. */ + @Test + public void selectAllFieldTypeWithAliasShouldFailSemanticCheck() { + expectValidationFailWithErrorMessages( + "SELECT a.* FROM account* as a", "Field [a.age] have conflict type"); + } - /** - * a.projects.name has same type in account1 and account2. - */ - @Test - public void selectNestedNoneConflictTypeShouldPassSemanticCheck() { - validate("SELECT a.projects.name FROM account* as a"); - } + /** a.projects.name has same type in account1 and account2. */ + @Test + public void selectNestedNoneConflictTypeShouldPassSemanticCheck() { + validate("SELECT a.projects.name FROM account* as a"); + } - /** - * a.projects.started_year has conflict type in account1 and account2. - */ - @Test - public void selectNestedConflictTypeShouldFailSemanticCheck() { - expectValidationFailWithErrorMessages("SELECT a.projects.started_year FROM account* as a", - "Field [a.projects.started_year] have conflict type"); - } + /** a.projects.started_year has conflict type in account1 and account2. */ + @Test + public void selectNestedConflictTypeShouldFailSemanticCheck() { + expectValidationFailWithErrorMessages( + "SELECT a.projects.started_year FROM account* as a", + "Field [a.projects.started_year] have conflict type"); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerFromClauseTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerFromClauseTest.java index a487a7afaa..2a04321f2f 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerFromClauseTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerFromClauseTest.java @@ -3,191 +3,173 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic; import org.junit.Ignore; import org.junit.Test; /** - * Semantic analyzer tests for FROM clause, including parse single index, multiple indices, - * index + (deep) nested field and multiple statements like UNION/MINUS etc. Basically, we - * need to make sure the environment be set up properly so that semantic analysis followed - * can be performed correctly. + * Semantic analyzer tests for FROM clause, including parse single index, multiple indices, index + + * (deep) nested field and multiple statements like UNION/MINUS etc. Basically, we need to make sure + * the environment be set up properly so that semantic analysis followed can be performed correctly. */ public class SemanticAnalyzerFromClauseTest extends SemanticAnalyzerTestBase { - @Ignore("IndexNotFoundException should be thrown from OpenSearch API directly") - @Test - public void nonExistingIndexNameShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics1" - ); - } - - @Test - public void useNotExistFieldInIndexPatternShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT abc FROM semant* WHERE def = 1", - "Field [def] cannot be found or used here.", - "Did you mean [address]?" - ); - } - - @Test - public void useNotExistFieldInIndexAndIndexPatternShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT abc FROM semantics, semant* WHERE def = 1", - "Field [def] cannot be found or used here.", - "Did you mean [address]?" - ); - } - - /** - * As shown below, there are multiple cases for alias: - * 1. Alias is not present: either use full index name as prefix or not. - * 2. Alias is present: either use alias as prefix or not. Full index name is illegal. - */ - @Test - public void indexNameAliasShouldBeOptional() { - validate("SELECT address FROM semantics"); - validate("SELECT address FROM semantics s"); - validate("SELECT * FROM semantics WHERE semantics.address LIKE 'Seattle'"); - } - - @Test - public void useFullIndexNameShouldFailIfAliasIsPresent() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics s WHERE semantics.address LIKE 'Seattle'", - "Field [semantics.address] cannot be found or used here", - "Did you mean [s.manager.address]?" - ); - } - - @Test - public void invalidIndexNameAliasInFromClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics s, a.projects p", - "Field [a.projects] cannot be found or used here", - "Did you mean [s.projects]?" - ); - } - - @Test - public void invalidIndexNameAliasInWhereClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics s WHERE a.balance = 10000", - "Field [a.balance] cannot be found or used here", - "Did you mean [s.balance]?" - ); - } - - @Test - public void invalidIndexNameAliasInGroupByClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics s GROUP BY a.balance", - "Field [a.balance] cannot be found or used here", - "Did you mean [s.balance]?" - ); - } - - @Test - public void invalidIndexNameAliasInHavingClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics s HAVING COUNT(a.balance) > 5", - "Field [a.balance] cannot be found or used here", - "Did you mean [s.balance]?" - ); - } - - @Test - public void invalidIndexNameAliasInOrderByClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics s ORDER BY a.balance", - "Field [a.balance] cannot be found or used here", - "Did you mean [s.balance]?" - ); - } - - @Test - public void invalidIndexNameAliasInOnClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics sem JOIN semantic tic ON sem.age = t.age", - "Field [t.age] cannot be found or used here", - "Did you mean [tic.age]?" - ); - } - - @Test - public void nonNestedFieldInFromClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics s, s.manager m", - "Operator [JOIN] cannot work with [INDEX, OBJECT]." - ); - } - - @Test - public void nonExistingNestedFieldInFromClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics s, s.project p", - "Field [s.project] cannot be found or used here", - "Did you mean [s.projects]?" - ); - } - - @Ignore("Need to figure out a better way to detect naming conflict") - @Test - public void duplicateIndexNameAliasInFromClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics s, s.projects s", - "Field [s] is conflicting with field of same name defined by other index" - ); - } - - @Ignore("Need to figure out a better way to detect naming conflict") - @Test - public void duplicateFieldNameFromDifferentIndexShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics INNER JOIN semantics", - "is conflicting with field of same name defined by other index" - ); - } - - @Test - public void validIndexNameAliasShouldPass() { - validate("SELECT * FROM semantics s, s.projects p"); - validate("SELECT * FROM semantics s WHERE s.balance = 10000"); - } - - @Test - public void indexNameWithTypeShouldPass() { - validate("SELECT * FROM semantics/docs WHERE balance = 10000"); - validate("SELECT * FROM semantics/docs s WHERE s.balance = 10000"); - validate("SELECT * FROM semantics/docs s, s.projects p WHERE p.active IS TRUE"); - } - - @Test - public void noIndexAliasShouldPass() { - validate("SELECT * FROM semantics"); - validate("SELECT * FROM semantics, semantics.projects"); - } - - @Test - public void regularJoinShouldPass() { - validate("SELECT * FROM semantics s1, semantics s2"); - validate("SELECT * FROM semantics s1 JOIN semantics s2"); - validate("SELECT * FROM semantics s1 LEFT JOIN semantics s2 ON s1.balance = s2.balance"); - } - - @Test - public void deepNestedFieldInFromClauseShouldPass() { - validate("SELECT * FROM semantics s, s.projects p, p.members m"); - } - - @Test - public void duplicateFieldNameFromDifferentStatementShouldPass() { - validate("SELECT age FROM semantics UNION SELECT age FROM semantic"); - validate("SELECT s.age FROM semantics s UNION SELECT s.age FROM semantic s"); - } - + @Ignore("IndexNotFoundException should be thrown from OpenSearch API directly") + @Test + public void nonExistingIndexNameShouldFail() { + expectValidationFailWithErrorMessages("SELECT * FROM semantics1"); + } + + @Test + public void useNotExistFieldInIndexPatternShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT abc FROM semant* WHERE def = 1", + "Field [def] cannot be found or used here.", + "Did you mean [address]?"); + } + + @Test + public void useNotExistFieldInIndexAndIndexPatternShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT abc FROM semantics, semant* WHERE def = 1", + "Field [def] cannot be found or used here.", + "Did you mean [address]?"); + } + + /** + * As shown below, there are multiple cases for alias: 1. Alias is not present: either use full + * index name as prefix or not. 2. Alias is present: either use alias as prefix or not. Full index + * name is illegal. + */ + @Test + public void indexNameAliasShouldBeOptional() { + validate("SELECT address FROM semantics"); + validate("SELECT address FROM semantics s"); + validate("SELECT * FROM semantics WHERE semantics.address LIKE 'Seattle'"); + } + + @Test + public void useFullIndexNameShouldFailIfAliasIsPresent() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics s WHERE semantics.address LIKE 'Seattle'", + "Field [semantics.address] cannot be found or used here", + "Did you mean [s.manager.address]?"); + } + + @Test + public void invalidIndexNameAliasInFromClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics s, a.projects p", + "Field [a.projects] cannot be found or used here", + "Did you mean [s.projects]?"); + } + + @Test + public void invalidIndexNameAliasInWhereClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics s WHERE a.balance = 10000", + "Field [a.balance] cannot be found or used here", + "Did you mean [s.balance]?"); + } + + @Test + public void invalidIndexNameAliasInGroupByClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics s GROUP BY a.balance", + "Field [a.balance] cannot be found or used here", + "Did you mean [s.balance]?"); + } + + @Test + public void invalidIndexNameAliasInHavingClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics s HAVING COUNT(a.balance) > 5", + "Field [a.balance] cannot be found or used here", + "Did you mean [s.balance]?"); + } + + @Test + public void invalidIndexNameAliasInOrderByClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics s ORDER BY a.balance", + "Field [a.balance] cannot be found or used here", + "Did you mean [s.balance]?"); + } + + @Test + public void invalidIndexNameAliasInOnClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics sem JOIN semantic tic ON sem.age = t.age", + "Field [t.age] cannot be found or used here", + "Did you mean [tic.age]?"); + } + + @Test + public void nonNestedFieldInFromClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics s, s.manager m", + "Operator [JOIN] cannot work with [INDEX, OBJECT]."); + } + + @Test + public void nonExistingNestedFieldInFromClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics s, s.project p", + "Field [s.project] cannot be found or used here", + "Did you mean [s.projects]?"); + } + + @Ignore("Need to figure out a better way to detect naming conflict") + @Test + public void duplicateIndexNameAliasInFromClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics s, s.projects s", + "Field [s] is conflicting with field of same name defined by other index"); + } + + @Ignore("Need to figure out a better way to detect naming conflict") + @Test + public void duplicateFieldNameFromDifferentIndexShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics INNER JOIN semantics", + "is conflicting with field of same name defined by other index"); + } + + @Test + public void validIndexNameAliasShouldPass() { + validate("SELECT * FROM semantics s, s.projects p"); + validate("SELECT * FROM semantics s WHERE s.balance = 10000"); + } + + @Test + public void indexNameWithTypeShouldPass() { + validate("SELECT * FROM semantics/docs WHERE balance = 10000"); + validate("SELECT * FROM semantics/docs s WHERE s.balance = 10000"); + validate("SELECT * FROM semantics/docs s, s.projects p WHERE p.active IS TRUE"); + } + + @Test + public void noIndexAliasShouldPass() { + validate("SELECT * FROM semantics"); + validate("SELECT * FROM semantics, semantics.projects"); + } + + @Test + public void regularJoinShouldPass() { + validate("SELECT * FROM semantics s1, semantics s2"); + validate("SELECT * FROM semantics s1 JOIN semantics s2"); + validate("SELECT * FROM semantics s1 LEFT JOIN semantics s2 ON s1.balance = s2.balance"); + } + + @Test + public void deepNestedFieldInFromClauseShouldPass() { + validate("SELECT * FROM semantics s, s.projects p, p.members m"); + } + + @Test + public void duplicateFieldNameFromDifferentStatementShouldPass() { + validate("SELECT age FROM semantics UNION SELECT age FROM semantic"); + validate("SELECT s.age FROM semantics s UNION SELECT s.age FROM semantic s"); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerIdentifierTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerIdentifierTest.java index 3d9133c937..35bcde3f76 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerIdentifierTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerIdentifierTest.java @@ -3,169 +3,158 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic; import org.junit.Ignore; import org.junit.Test; -/** - * Semantic analyzer tests for identifier - */ +/** Semantic analyzer tests for identifier */ public class SemanticAnalyzerIdentifierTest extends SemanticAnalyzerTestBase { - @Ignore("To be implemented") - @Test - public void duplicateFieldAliasInSelectClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT age a, COUNT(*) a FROM semantics s, a.projects p", - "Field [a.projects] cannot be found or used here" - ); - } - - @Test - public void fieldWithDifferentCaseInSelectClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT Age a FROM semantics", - "Field [Age] cannot be found or used here", - "Did you mean [age]?" - ); - } - - @Test - public void useHiddenFieldShouldPass() { - validate("SELECT _score FROM semantics WHERE _id = 1 AND _type = '_doc'"); - } - - @Ignore("Need to remove single quote or back ticks") - @Test - public void useFieldNameWithSpaceShouldPass() { - validate("SELECT ['field with spaces'] FROM semantics"); - validate("SELECT `field with spaces` FROM semantics"); - } - - @Test - public void nonExistingFieldNameInSelectClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT age1 FROM semantics s", - "Field [age1] cannot be found or used here.", - "Did you mean [age]?" - ); - } - - @Test - public void invalidIndexAliasInFromClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics s, a.projects p", - "Field [a.projects] cannot be found or used here.", - "Did you mean [s.projects]?" - ); - } - - @Test - public void nonExistingFieldNameInWhereClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics s WHERE s.balce = 10000", - "Field [s.balce] cannot be found or used here.", - "Did you mean [s.balance]?" - ); - } - - @Test - public void nonExistingFieldNameInGroupByClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics s GROUP BY s.balce", - "Field [s.balce] cannot be found or used here.", - "Did you mean [s.balance]?" - ); - } - - @Test - public void nonExistingFieldNameInHavingClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics s HAVING COUNT(s.balce) > 5", - "Field [s.balce] cannot be found or used here.", - "Did you mean [s.balance]?" - ); - } - - @Test - public void nonExistingFieldNameInOrderByClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics s ORDER BY s.balce", - "Field [s.balce] cannot be found or used here.", - "Did you mean [s.balance]?" - ); - } - - @Test - public void nonExistingFieldNameInFunctionShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics s WHERE LOG(s.balce) = 1", - "Field [s.balce] cannot be found or used here.", - "Did you mean [s.balance]?" - ); - } - - @Test - public void nonExistingNestedFieldNameInWhereClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics s, s.projects p, p.members m WHERE m.nam = 'John'", - "Field [m.nam] cannot be found or used here.", - "Did you mean [m.name]?" - ); - } - - @Test - public void nonExistingNestedFieldNameInFunctionShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics WHERE nested(projects.actives) = TRUE", - "Field [projects.actives] cannot be found or used here.", - "Did you mean [projects.active]?" - ); - } - - @Test - public void useKeywordInMultiFieldShouldPass() { - validate("SELECT employer.keyword FROM semantics WHERE employer.keyword LIKE 'AWS' GROUP BY employer.keyword"); - validate("SELECT * FROM semantics s WHERE s.manager.name.keyword LIKE 'John'"); - } - - @Test - public void useDeepNestedFieldNameShouldPass() { - validate("SELECT p.* FROM semantics s, s.projects p WHERE p IS NULL"); - validate("SELECT p.active FROM semantics s, s.projects p WHERE p.active = TRUE"); - validate("SELECT m.name FROM semantics s, s.projects p, p.members m WHERE m.name = 'John'"); - } - - @Test - public void useConstantLiteralInSelectClauseShouldPass() { - validate("SELECT 1 FROM semantics"); - validate("SELECT 2.0 FROM semantics"); - //validate("SELECT 'test' FROM semantics"); TODO: why 'test' goes to fullColumnName that can be string literal - validate("SELECT TRUE FROM semantics"); - } - - @Test - public void queryWithBackticksQuotedIndexShouldPass() { - validate("SELECT age FROM `semantics`"); - } - - @Test - public void queryWithBackticksQuotedIndexAliasShouldPass() { - validate("SELECT `s`.age FROM semantics AS `s`"); - validate("SELECT `s t`.age FROM semantics AS `s t`"); - } - - @Test - public void queryWithBackticksQuotedFieldNameShouldPass() { - validate("SELECT `age` FROM semantics"); - validate("SELECT s.`age` FROM semantics AS s"); - validate("SELECT `s`.`age` FROM semantics AS `s`"); - } - - @Test - public void queryWithBackticksQuotedFieldNameInFunctionShouldPass() { - validate("SELECT SUM(`age`) FROM semantics"); - } + @Ignore("To be implemented") + @Test + public void duplicateFieldAliasInSelectClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT age a, COUNT(*) a FROM semantics s, a.projects p", + "Field [a.projects] cannot be found or used here"); + } + + @Test + public void fieldWithDifferentCaseInSelectClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT Age a FROM semantics", + "Field [Age] cannot be found or used here", + "Did you mean [age]?"); + } + + @Test + public void useHiddenFieldShouldPass() { + validate("SELECT _score FROM semantics WHERE _id = 1 AND _type = '_doc'"); + } + + @Ignore("Need to remove single quote or back ticks") + @Test + public void useFieldNameWithSpaceShouldPass() { + validate("SELECT ['field with spaces'] FROM semantics"); + validate("SELECT `field with spaces` FROM semantics"); + } + + @Test + public void nonExistingFieldNameInSelectClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT age1 FROM semantics s", + "Field [age1] cannot be found or used here.", + "Did you mean [age]?"); + } + + @Test + public void invalidIndexAliasInFromClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics s, a.projects p", + "Field [a.projects] cannot be found or used here.", + "Did you mean [s.projects]?"); + } + + @Test + public void nonExistingFieldNameInWhereClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics s WHERE s.balce = 10000", + "Field [s.balce] cannot be found or used here.", + "Did you mean [s.balance]?"); + } + + @Test + public void nonExistingFieldNameInGroupByClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics s GROUP BY s.balce", + "Field [s.balce] cannot be found or used here.", + "Did you mean [s.balance]?"); + } + + @Test + public void nonExistingFieldNameInHavingClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics s HAVING COUNT(s.balce) > 5", + "Field [s.balce] cannot be found or used here.", + "Did you mean [s.balance]?"); + } + + @Test + public void nonExistingFieldNameInOrderByClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics s ORDER BY s.balce", + "Field [s.balce] cannot be found or used here.", + "Did you mean [s.balance]?"); + } + + @Test + public void nonExistingFieldNameInFunctionShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics s WHERE LOG(s.balce) = 1", + "Field [s.balce] cannot be found or used here.", + "Did you mean [s.balance]?"); + } + + @Test + public void nonExistingNestedFieldNameInWhereClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics s, s.projects p, p.members m WHERE m.nam = 'John'", + "Field [m.nam] cannot be found or used here.", + "Did you mean [m.name]?"); + } + + @Test + public void nonExistingNestedFieldNameInFunctionShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics WHERE nested(projects.actives) = TRUE", + "Field [projects.actives] cannot be found or used here.", + "Did you mean [projects.active]?"); + } + + @Test + public void useKeywordInMultiFieldShouldPass() { + validate( + "SELECT employer.keyword FROM semantics WHERE employer.keyword LIKE 'AWS' GROUP BY" + + " employer.keyword"); + validate("SELECT * FROM semantics s WHERE s.manager.name.keyword LIKE 'John'"); + } + + @Test + public void useDeepNestedFieldNameShouldPass() { + validate("SELECT p.* FROM semantics s, s.projects p WHERE p IS NULL"); + validate("SELECT p.active FROM semantics s, s.projects p WHERE p.active = TRUE"); + validate("SELECT m.name FROM semantics s, s.projects p, p.members m WHERE m.name = 'John'"); + } + + @Test + public void useConstantLiteralInSelectClauseShouldPass() { + validate("SELECT 1 FROM semantics"); + validate("SELECT 2.0 FROM semantics"); + // validate("SELECT 'test' FROM semantics"); TODO: why 'test' goes to fullColumnName that can be + // string literal + validate("SELECT TRUE FROM semantics"); + } + + @Test + public void queryWithBackticksQuotedIndexShouldPass() { + validate("SELECT age FROM `semantics`"); + } + + @Test + public void queryWithBackticksQuotedIndexAliasShouldPass() { + validate("SELECT `s`.age FROM semantics AS `s`"); + validate("SELECT `s t`.age FROM semantics AS `s t`"); + } + + @Test + public void queryWithBackticksQuotedFieldNameShouldPass() { + validate("SELECT `age` FROM semantics"); + validate("SELECT s.`age` FROM semantics AS s"); + validate("SELECT `s`.`age` FROM semantics AS `s`"); + } + + @Test + public void queryWithBackticksQuotedFieldNameInFunctionShouldPass() { + validate("SELECT SUM(`age`) FROM semantics"); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerMultiQueryTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerMultiQueryTest.java index 3c4c71c6ea..319f6c5cfa 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerMultiQueryTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerMultiQueryTest.java @@ -3,93 +3,87 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic; import org.junit.Ignore; import org.junit.Test; -/** - * Semantic analyzer tests for multi query like UNION and MINUS - */ +/** Semantic analyzer tests for multi query like UNION and MINUS */ public class SemanticAnalyzerMultiQueryTest extends SemanticAnalyzerTestBase { - @Test - public void unionDifferentResultTypeOfTwoQueriesShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT balance FROM semantics UNION SELECT address FROM semantics", - "Operator [UNION] cannot work with [DOUBLE, TEXT]." - ); - } - - @Test - public void unionDifferentNumberOfResultTypeOfTwoQueriesShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT balance FROM semantics UNION SELECT balance, age FROM semantics", - "Operator [UNION] cannot work with [DOUBLE, (DOUBLE, INTEGER)]." - ); - } - - @Test - public void minusDifferentResultTypeOfTwoQueriesShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT p.active FROM semantics s, s.projects p MINUS SELECT address FROM semantics", - "Operator [MINUS] cannot work with [BOOLEAN, TEXT]." - ); - } - - @Test - public void unionSameResultTypeOfTwoQueriesShouldPass() { - validate("SELECT balance FROM semantics UNION SELECT balance FROM semantics"); - } - - @Test - public void unionCompatibleResultTypeOfTwoQueriesShouldPass() { - validate("SELECT balance FROM semantics UNION SELECT age FROM semantics"); - validate("SELECT address FROM semantics UNION ALL SELECT city FROM semantics"); - } - - @Test - public void minusSameResultTypeOfTwoQueriesShouldPass() { - validate("SELECT s.projects.active FROM semantics s UNION SELECT p.active FROM semantics s, s.projects p"); - } - - @Test - public void minusCompatibleResultTypeOfTwoQueriesShouldPass() { - validate("SELECT address FROM semantics MINUS SELECT manager.name.keyword FROM semantics"); - } - - @Test - public void unionSelectStarWithExtraFieldOfTwoQueriesShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics UNION SELECT *, city FROM semantics", - "Operator [UNION] cannot work with [(*), KEYWORD]." - ); - } - - @Test - public void minusSelectStarWithExtraFieldOfTwoQueriesShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT *, address, balance FROM semantics MINUS SELECT * FROM semantics", - "Operator [MINUS] cannot work with [(TEXT, DOUBLE), (*)]." - ); - } - - @Test - public void unionSelectStarOfTwoQueriesShouldPass() { - validate("SELECT * FROM semantics UNION SELECT * FROM semantics"); - validate("SELECT *, age FROM semantics UNION SELECT *, balance FROM semantics"); - } - - @Test - public void unionSelectFunctionCallWithSameReturnTypeOfTwoQueriesShouldPass() { - validate("SELECT LOG(balance) FROM semantics UNION SELECT ABS(age) FROM semantics"); - } - - @Ignore("* is empty and ignored in product of select items for now") - @Test - public void unionSelectFieldWithExtraStarOfTwoQueriesShouldFail() { - expectValidationFailWithErrorMessages("SELECT age FROM semantics UNION SELECT *, age FROM semantics"); - } - + @Test + public void unionDifferentResultTypeOfTwoQueriesShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT balance FROM semantics UNION SELECT address FROM semantics", + "Operator [UNION] cannot work with [DOUBLE, TEXT]."); + } + + @Test + public void unionDifferentNumberOfResultTypeOfTwoQueriesShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT balance FROM semantics UNION SELECT balance, age FROM semantics", + "Operator [UNION] cannot work with [DOUBLE, (DOUBLE, INTEGER)]."); + } + + @Test + public void minusDifferentResultTypeOfTwoQueriesShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT p.active FROM semantics s, s.projects p MINUS SELECT address FROM semantics", + "Operator [MINUS] cannot work with [BOOLEAN, TEXT]."); + } + + @Test + public void unionSameResultTypeOfTwoQueriesShouldPass() { + validate("SELECT balance FROM semantics UNION SELECT balance FROM semantics"); + } + + @Test + public void unionCompatibleResultTypeOfTwoQueriesShouldPass() { + validate("SELECT balance FROM semantics UNION SELECT age FROM semantics"); + validate("SELECT address FROM semantics UNION ALL SELECT city FROM semantics"); + } + + @Test + public void minusSameResultTypeOfTwoQueriesShouldPass() { + validate( + "SELECT s.projects.active FROM semantics s UNION SELECT p.active FROM semantics s," + + " s.projects p"); + } + + @Test + public void minusCompatibleResultTypeOfTwoQueriesShouldPass() { + validate("SELECT address FROM semantics MINUS SELECT manager.name.keyword FROM semantics"); + } + + @Test + public void unionSelectStarWithExtraFieldOfTwoQueriesShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics UNION SELECT *, city FROM semantics", + "Operator [UNION] cannot work with [(*), KEYWORD]."); + } + + @Test + public void minusSelectStarWithExtraFieldOfTwoQueriesShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT *, address, balance FROM semantics MINUS SELECT * FROM semantics", + "Operator [MINUS] cannot work with [(TEXT, DOUBLE), (*)]."); + } + + @Test + public void unionSelectStarOfTwoQueriesShouldPass() { + validate("SELECT * FROM semantics UNION SELECT * FROM semantics"); + validate("SELECT *, age FROM semantics UNION SELECT *, balance FROM semantics"); + } + + @Test + public void unionSelectFunctionCallWithSameReturnTypeOfTwoQueriesShouldPass() { + validate("SELECT LOG(balance) FROM semantics UNION SELECT ABS(age) FROM semantics"); + } + + @Ignore("* is empty and ignored in product of select items for now") + @Test + public void unionSelectFieldWithExtraStarOfTwoQueriesShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT age FROM semantics UNION SELECT *, age FROM semantics"); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerOperatorTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerOperatorTest.java index 36046aa0ad..bd5aeba507 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerOperatorTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerOperatorTest.java @@ -3,71 +3,62 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic; import org.junit.Test; -/** - * Semantic analysis test cases for operator - */ +/** Semantic analysis test cases for operator */ public class SemanticAnalyzerOperatorTest extends SemanticAnalyzerTestBase { - @Test - public void compareNumberIsBooleanShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics WHERE age IS FALSE", - "Operator [IS] cannot work with [INTEGER, BOOLEAN]." - ); - } - - @Test - public void compareTextIsNotBooleanShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics WHERE address IS NOT TRUE", - "Operator [IS] cannot work with [TEXT, BOOLEAN]." - ); - } + @Test + public void compareNumberIsBooleanShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics WHERE age IS FALSE", + "Operator [IS] cannot work with [INTEGER, BOOLEAN]."); + } - @Test - public void compareNumberEqualsToStringShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics WHERE balance = 'test'", - "Operator [=] cannot work with [DOUBLE, STRING]." - ); - } + @Test + public void compareTextIsNotBooleanShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics WHERE address IS NOT TRUE", + "Operator [IS] cannot work with [TEXT, BOOLEAN]."); + } - @Test - public void compareSubstringFunctionCallEqualsToNumberShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics WHERE SUBSTRING(address, 0, 3) = 1", - "Operator [=] cannot work with [TEXT, INTEGER]." - ); - } + @Test + public void compareNumberEqualsToStringShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics WHERE balance = 'test'", + "Operator [=] cannot work with [DOUBLE, STRING]."); + } - @Test - public void compareLogFunctionCallWithIntegerSmallerThanStringShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics WHERE LOG(age) < 'test'", - "Operator [<] cannot work with [DOUBLE, STRING]." - ); - } + @Test + public void compareSubstringFunctionCallEqualsToNumberShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics WHERE SUBSTRING(address, 0, 3) = 1", + "Operator [=] cannot work with [TEXT, INTEGER]."); + } - @Test - public void compareDoubleWithIntegerShouldPass() { - validate("SELECT * FROM semantics WHERE balance >= 1000"); - validate("SELECT * FROM semantics WHERE balance <> 1000"); - validate("SELECT * FROM semantics WHERE balance != 1000"); - } + @Test + public void compareLogFunctionCallWithIntegerSmallerThanStringShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics WHERE LOG(age) < 'test'", + "Operator [<] cannot work with [DOUBLE, STRING]."); + } - @Test - public void compareDateWithStringShouldPass() { - validate("SELECT * FROM semantics WHERE birthday = '2019-09-30'"); - } + @Test + public void compareDoubleWithIntegerShouldPass() { + validate("SELECT * FROM semantics WHERE balance >= 1000"); + validate("SELECT * FROM semantics WHERE balance <> 1000"); + validate("SELECT * FROM semantics WHERE balance != 1000"); + } - @Test - public void namedArgumentShouldSkipOperatorTypeCheck() { - validate("SELECT TOPHITS('size'=3, age='desc') FROM semantics GROUP BY city"); - } + @Test + public void compareDateWithStringShouldPass() { + validate("SELECT * FROM semantics WHERE birthday = '2019-09-30'"); + } + @Test + public void namedArgumentShouldSkipOperatorTypeCheck() { + validate("SELECT TOPHITS('size'=3, age='desc') FROM semantics GROUP BY city"); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerScalarFunctionTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerScalarFunctionTest.java index 83454b9549..8017c49548 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerScalarFunctionTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerScalarFunctionTest.java @@ -3,270 +3,255 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic; import org.junit.Ignore; import org.junit.Test; import org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType; -/** - * Semantic analysis tests for scalar function. - */ +/** Semantic analysis tests for scalar function. */ public class SemanticAnalyzerScalarFunctionTest extends SemanticAnalyzerTestBase { - @Test - public void unsupportedScalarFunctionCallInSelectClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT DAY() FROM semantics", - "Function [DAY] cannot be found or used here." - ); - } + @Test + public void unsupportedScalarFunctionCallInSelectClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT DAY() FROM semantics", "Function [DAY] cannot be found or used here."); + } - @Test - public void unsupportedScalarFunctionCallInWhereClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics WHERE LOG100(balance) = 1", - "Function [LOG100] cannot be found or used here.", - "Did you mean [LOG10]?" - ); - } + @Test + public void unsupportedScalarFunctionCallInWhereClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics WHERE LOG100(balance) = 1", + "Function [LOG100] cannot be found or used here.", + "Did you mean [LOG10]?"); + } - @Test - public void scalarFunctionCallWithLessArgumentsInWhereClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics WHERE LOG() = 1", - "Function [LOG] cannot work with [].", - "Usage: LOG(NUMBER T) -> DOUBLE" - ); - } + @Test + public void scalarFunctionCallWithLessArgumentsInWhereClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics WHERE LOG() = 1", + "Function [LOG] cannot work with [].", + "Usage: LOG(NUMBER T) -> DOUBLE"); + } - @Test - public void scalarFunctionCallWithMoreArgumentsInWhereClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics WHERE LOG(age, city) = 1", - "Function [LOG] cannot work with [INTEGER, KEYWORD].", - "Usage: LOG(NUMBER T) -> DOUBLE" - ); - } + @Test + public void scalarFunctionCallWithMoreArgumentsInWhereClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics WHERE LOG(age, city) = 1", + "Function [LOG] cannot work with [INTEGER, KEYWORD].", + "Usage: LOG(NUMBER T) -> DOUBLE"); + } - @Test - public void logFunctionCallWithOneNestedInSelectClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT LOG(projects) FROM semantics", - "Function [LOG] cannot work with [NESTED_FIELD].", - "Usage: LOG(NUMBER T) -> DOUBLE" - ); - } + @Test + public void logFunctionCallWithOneNestedInSelectClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT LOG(projects) FROM semantics", + "Function [LOG] cannot work with [NESTED_FIELD].", + "Usage: LOG(NUMBER T) -> DOUBLE"); + } - @Test - public void logFunctionCallWithOneTextInWhereClauseShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics WHERE LOG(city) = 1", - "Function [LOG] cannot work with [KEYWORD].", - "Usage: LOG(NUMBER T) -> DOUBLE" - ); - } + @Test + public void logFunctionCallWithOneTextInWhereClauseShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics WHERE LOG(city) = 1", + "Function [LOG] cannot work with [KEYWORD].", + "Usage: LOG(NUMBER T) -> DOUBLE"); + } - @Test - public void logFunctionCallWithOneNumberShouldPass() { - validate("SELECT LOG(age) FROM semantics"); - validate("SELECT * FROM semantics s WHERE LOG(s.balance) = 1000"); - validate("SELECT LOG(s.manager.salary) FROM semantics s"); - } + @Test + public void logFunctionCallWithOneNumberShouldPass() { + validate("SELECT LOG(age) FROM semantics"); + validate("SELECT * FROM semantics s WHERE LOG(s.balance) = 1000"); + validate("SELECT LOG(s.manager.salary) FROM semantics s"); + } - @Test - public void logFunctionCallInDifferentCaseShouldPass() { - validate("SELECT log(age) FROM semantics"); - validate("SELECT Log(age) FROM semantics"); - validate("SELECT loG(age) FROM semantics"); - } + @Test + public void logFunctionCallInDifferentCaseShouldPass() { + validate("SELECT log(age) FROM semantics"); + validate("SELECT Log(age) FROM semantics"); + validate("SELECT loG(age) FROM semantics"); + } - @Test - public void logFunctionCallWithUnknownFieldShouldPass() { - validate("SELECT LOG(new_field) FROM semantics"); - } + @Test + public void logFunctionCallWithUnknownFieldShouldPass() { + validate("SELECT LOG(new_field) FROM semantics"); + } - @Ignore("Test set to ignore due to nested functions not supported and blocked by throwing SqlFeatureNotImplementedException") - @Test - public void substringWithLogFunctionCallWithUnknownFieldShouldPass() { - expectValidationFailWithErrorMessages( - "SELECT SUBSTRING(LOG(new_field), 0, 1) FROM semantics", - "Function [SUBSTRING] cannot work with [DOUBLE, INTEGER, INTEGER]." - ," Usage: SUBSTRING(STRING T, INTEGER, INTEGER) -> T" - ); - } + @Ignore( + "Test set to ignore due to nested functions not supported and blocked by throwing" + + " SqlFeatureNotImplementedException") + @Test + public void substringWithLogFunctionCallWithUnknownFieldShouldPass() { + expectValidationFailWithErrorMessages( + "SELECT SUBSTRING(LOG(new_field), 0, 1) FROM semantics", + "Function [SUBSTRING] cannot work with [DOUBLE, INTEGER, INTEGER].", + " Usage: SUBSTRING(STRING T, INTEGER, INTEGER) -> T"); + } - @Ignore("Test set to ignore due to nested functions not supported and blocked by throwing SqlFeatureNotImplementedException") - @Test - public void logFunctionCallWithResultOfAbsFunctionCallWithOneNumberShouldPass() { - validate("SELECT LOG(ABS(age)) FROM semantics"); - } + @Ignore( + "Test set to ignore due to nested functions not supported and blocked by throwing" + + " SqlFeatureNotImplementedException") + @Test + public void logFunctionCallWithResultOfAbsFunctionCallWithOneNumberShouldPass() { + validate("SELECT LOG(ABS(age)) FROM semantics"); + } - @Ignore("Test set to ignore due to nested functions not supported and blocked by throwing SqlFeatureNotImplementedException") - @Test - public void logFunctionCallWithMoreNestedFunctionCallWithOneNumberShouldPass() { - validate("SELECT LOG(ABS(SQRT(balance))) FROM semantics"); - } + @Ignore( + "Test set to ignore due to nested functions not supported and blocked by throwing" + + " SqlFeatureNotImplementedException") + @Test + public void logFunctionCallWithMoreNestedFunctionCallWithOneNumberShouldPass() { + validate("SELECT LOG(ABS(SQRT(balance))) FROM semantics"); + } - @Ignore("Test set to ignore due to nested functions not supported and blocked by throwing SqlFeatureNotImplementedException") - @Test - public void substringFunctionCallWithResultOfAnotherSubstringAndAbsFunctionCallShouldPass() { - validate("SELECT SUBSTRING(SUBSTRING(city, ABS(age), 1), 2, ABS(1)) FROM semantics"); - } + @Ignore( + "Test set to ignore due to nested functions not supported and blocked by throwing" + + " SqlFeatureNotImplementedException") + @Test + public void substringFunctionCallWithResultOfAnotherSubstringAndAbsFunctionCallShouldPass() { + validate("SELECT SUBSTRING(SUBSTRING(city, ABS(age), 1), 2, ABS(1)) FROM semantics"); + } - @Ignore("Test set to ignore due to nested functions not supported and blocked by throwing SqlFeatureNotImplementedException") - @Test - public void substringFunctionCallWithResultOfMathFunctionCallShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT SUBSTRING(LOG(balance), 2, 3) FROM semantics", - "Function [SUBSTRING] cannot work with [DOUBLE, INTEGER, INTEGER].", - "Usage: SUBSTRING(STRING T, INTEGER, INTEGER) -> T" - ); - } + @Ignore( + "Test set to ignore due to nested functions not supported and blocked by throwing" + + " SqlFeatureNotImplementedException") + @Test + public void substringFunctionCallWithResultOfMathFunctionCallShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT SUBSTRING(LOG(balance), 2, 3) FROM semantics", + "Function [SUBSTRING] cannot work with [DOUBLE, INTEGER, INTEGER].", + "Usage: SUBSTRING(STRING T, INTEGER, INTEGER) -> T"); + } - @Ignore("Test set to ignore due to nested functions not supported and blocked by throwing SqlFeatureNotImplementedException") - @Test - public void logFunctionCallWithResultOfSubstringFunctionCallShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT LOG(SUBSTRING(address, 0, 1)) FROM semantics", - "Function [LOG] cannot work with [TEXT].", - "Usage: LOG(NUMBER T) -> DOUBLE or LOG(NUMBER T, NUMBER) -> DOUBLE" - ); - } + @Ignore( + "Test set to ignore due to nested functions not supported and blocked by throwing" + + " SqlFeatureNotImplementedException") + @Test + public void logFunctionCallWithResultOfSubstringFunctionCallShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT LOG(SUBSTRING(address, 0, 1)) FROM semantics", + "Function [LOG] cannot work with [TEXT].", + "Usage: LOG(NUMBER T) -> DOUBLE or LOG(NUMBER T, NUMBER) -> DOUBLE"); + } - @Test - public void allSupportedMathFunctionCallInSelectClauseShouldPass() { - validate( - "SELECT" + - " ABS(age), " + - " ASIN(age), " + - " ATAN(age), " + - " ATAN2(age, age), " + - " CBRT(age), " + - " CEIL(age), " + - " COS(age), " + - " COSH(age), " + - " DEGREES(age), " + - " EXP(age), " + - " EXPM1(age), " + - " FLOOR(age), " + - " LOG(age), " + - " LOG2(age), " + - " LOG10(age), " + - " LN(age), " + - " POW(age), " + - " RADIANS(age), " + - " RINT(age), " + - " ROUND(age), " + - " SIN(age), " + - " SINH(age), " + - " SQRT(age), " + - " TAN(age) " + - "FROM semantics" - ); - } + @Test + public void allSupportedMathFunctionCallInSelectClauseShouldPass() { + validate( + "SELECT" + + " ABS(age), " + + " ASIN(age), " + + " ATAN(age), " + + " ATAN2(age, age), " + + " CBRT(age), " + + " CEIL(age), " + + " COS(age), " + + " COSH(age), " + + " DEGREES(age), " + + " EXP(age), " + + " EXPM1(age), " + + " FLOOR(age), " + + " LOG(age), " + + " LOG2(age), " + + " LOG10(age), " + + " LN(age), " + + " POW(age), " + + " RADIANS(age), " + + " RINT(age), " + + " ROUND(age), " + + " SIN(age), " + + " SINH(age), " + + " SQRT(age), " + + " TAN(age) " + + "FROM semantics"); + } - @Test - public void allSupportedMathFunctionCallInWhereClauseShouldPass() { - validate( - "SELECT * FROM semantics WHERE " + - " ABS(age) = 1 AND " + - " ASIN(age) = 1 AND " + - " ATAN(age) = 1 AND " + - " ATAN2(age, age) = 1 AND " + - " CBRT(age) = 1 AND " + - " CEIL(age) = 1 AND " + - " COS(age) = 1 AND " + - " COSH(age) = 1 AND " + - " DEGREES(age) = 1 AND " + - " EXP(age) = 1 AND " + - " EXPM1(age) = 1 AND " + - " FLOOR(age) = 1 AND " + - " LOG(age) = 1 AND " + - " LOG2(age) = 1 AND " + - " LOG10(age) = 1 AND " + - " LN(age) = 1 AND " + - " POW(age) = 1 AND " + - " RADIANS(age) = 1 AND " + - " RINT(age) = 1 AND " + - " ROUND(age) = 1 AND " + - " SIN(age) = 1 AND " + - " SINH(age) = 1 AND " + - " SQRT(age) = 1 AND " + - " TAN(age) = 1 " - ); - } + @Test + public void allSupportedMathFunctionCallInWhereClauseShouldPass() { + validate( + "SELECT * FROM semantics WHERE " + + " ABS(age) = 1 AND " + + " ASIN(age) = 1 AND " + + " ATAN(age) = 1 AND " + + " ATAN2(age, age) = 1 AND " + + " CBRT(age) = 1 AND " + + " CEIL(age) = 1 AND " + + " COS(age) = 1 AND " + + " COSH(age) = 1 AND " + + " DEGREES(age) = 1 AND " + + " EXP(age) = 1 AND " + + " EXPM1(age) = 1 AND " + + " FLOOR(age) = 1 AND " + + " LOG(age) = 1 AND " + + " LOG2(age) = 1 AND " + + " LOG10(age) = 1 AND " + + " LN(age) = 1 AND " + + " POW(age) = 1 AND " + + " RADIANS(age) = 1 AND " + + " RINT(age) = 1 AND " + + " ROUND(age) = 1 AND " + + " SIN(age) = 1 AND " + + " SINH(age) = 1 AND " + + " SQRT(age) = 1 AND " + + " TAN(age) = 1 "); + } - @Test - public void allSupportedConstantsUseInSelectClauseShouldPass() { - validate( - "SELECT " + - " E(), " + - " PI() " + - "FROM semantics" - ); - } + @Test + public void allSupportedConstantsUseInSelectClauseShouldPass() { + validate("SELECT " + " E(), " + " PI() " + "FROM semantics"); + } - @Test - public void allSupportedConstantsUseInWhereClauseShouldPass() { - validate( - "SELECT * FROM semantics WHERE " + - " E() > 1 OR " + - " PI() > 1" - ); - } + @Test + public void allSupportedConstantsUseInWhereClauseShouldPass() { + validate("SELECT * FROM semantics WHERE " + " E() > 1 OR " + " PI() > 1"); + } - @Test - public void allSupportedStringFunctionCallInSelectClauseShouldPass() { - validate( - "SELECT * FROM semantics WHERE " + - " SUBSTRING(city, 0, 3) = 'Sea' AND " + - " UPPER(city) = 'SEATTLE' AND " + - " LOWER(city) = 'seattle'" - ); - } + @Test + public void allSupportedStringFunctionCallInSelectClauseShouldPass() { + validate( + "SELECT * FROM semantics WHERE " + + " SUBSTRING(city, 0, 3) = 'Sea' AND " + + " UPPER(city) = 'SEATTLE' AND " + + " LOWER(city) = 'seattle'"); + } - @Test - public void allSupportedStringFunctionCallInWhereClauseShouldPass() { - validate( - "SELECT" + - " SUBSTRING(city, 0, 3), " + - " UPPER(address), " + - " LOWER(manager.name) " + - "FROM semantics " - ); - } + @Test + public void allSupportedStringFunctionCallInWhereClauseShouldPass() { + validate( + "SELECT" + + " SUBSTRING(city, 0, 3), " + + " UPPER(address), " + + " LOWER(manager.name) " + + "FROM semantics "); + } - @Test - public void dateFormatFunctionCallWithNumberShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT DATE_FORMAT(balance, 'yyyy-MM') FROM semantics", - "Function [DATE_FORMAT] cannot work with [DOUBLE, STRING].", - "Usage: DATE_FORMAT(DATE, STRING) -> STRING or DATE_FORMAT(DATE, STRING, STRING) -> STRING" - ); - } + @Test + public void dateFormatFunctionCallWithNumberShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT DATE_FORMAT(balance, 'yyyy-MM') FROM semantics", + "Function [DATE_FORMAT] cannot work with [DOUBLE, STRING].", + "Usage: DATE_FORMAT(DATE, STRING) -> STRING or DATE_FORMAT(DATE, STRING, STRING) ->" + + " STRING"); + } - @Test - public void allSupportedDateFunctionCallShouldPass() { - validate( - "SELECT date_format(birthday, 'yyyy-MM') " + - "FROM semantics " + - "WHERE date_format(birthday, 'yyyy-MM') > '1980-01' " + - "GROUP BY date_format(birthday, 'yyyy-MM') " + - "ORDER BY date_format(birthday, 'yyyy-MM') DESC" - ); - } + @Test + public void allSupportedDateFunctionCallShouldPass() { + validate( + "SELECT date_format(birthday, 'yyyy-MM') " + + "FROM semantics " + + "WHERE date_format(birthday, 'yyyy-MM') > '1980-01' " + + "GROUP BY date_format(birthday, 'yyyy-MM') " + + "ORDER BY date_format(birthday, 'yyyy-MM') DESC"); + } - @Test - public void concatRequiresVarargSupportShouldPassAnyway() { - validate("SELECT CONCAT('aaa') FROM semantics"); - validate("SELECT CONCAT('aaa', 'bbb') FROM semantics"); - validate("SELECT CONCAT('aaa', 'bbb', 123) FROM semantics"); - } + @Test + public void concatRequiresVarargSupportShouldPassAnyway() { + validate("SELECT CONCAT('aaa') FROM semantics"); + validate("SELECT CONCAT('aaa', 'bbb') FROM semantics"); + validate("SELECT CONCAT('aaa', 'bbb', 123) FROM semantics"); + } - @Test - public void castFunctionShouldPass() { - validateWithType("SELECT CAST(age AS DOUBLE) FROM semantics", OpenSearchDataType.DOUBLE); - } + @Test + public void castFunctionShouldPass() { + validateWithType("SELECT CAST(age AS DOUBLE) FROM semantics", OpenSearchDataType.DOUBLE); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerSubqueryTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerSubqueryTest.java index f34af4fe3a..7613806df7 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerSubqueryTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerSubqueryTest.java @@ -3,105 +3,94 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic; import org.junit.Test; import org.opensearch.sql.legacy.antlr.visitor.EarlyExitAnalysisException; -/** - * Semantic analysis test for subquery - */ +/** Semantic analysis test for subquery */ public class SemanticAnalyzerSubqueryTest extends SemanticAnalyzerTestBase { - @Test - public void useExistClauseOnNestedFieldShouldPass() { - validate( - "SELECT * FROM semantics AS s WHERE EXISTS " + - " ( SELECT * FROM s.projects AS p WHERE p.active IS TRUE ) " + - " AND s.age > 10" - ); - } - - @Test - public void useNotExistClauseOnNestedFieldShouldPass() { - validate( - "SELECT * FROM semantics AS s WHERE NOT EXISTS " + - " ( SELECT * FROM s.projects AS p WHERE p.active IS TRUE ) " + - " AND s.age > 10" - ); - } - - @Test - public void useInClauseOnAgeWithIntegerLiteralListShouldPass() { - validate("SELECT * FROM semantics WHERE age IN (30, 40)"); - } - - @Test - public void useAliasInSubqueryShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics s WHERE EXISTS (SELECT * FROM s.projects p) AND p.active IS TRUE", - "Field [p.active] cannot be found or used here.", - "Did you mean [projects.active]?" - ); - } - - @Test - public void useInClauseWithIncompatibleFieldTypesShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics s WHERE age IN (SELECT p.active FROM s.projects p)", - "Operator [IN] cannot work with [INTEGER, BOOLEAN]." - ); - } - - @Test - public void useInClauseWithCompatibleFieldTypesShouldPass() { - validate("SELECT * FROM semantics s WHERE address IN (SELECT city FROM s.projects p)"); - } - - @Test - public void useNotInClauseWithCompatibleFieldTypesShouldPass() { - validate("SELECT * FROM semantics s WHERE address NOT IN (SELECT city FROM s.projects p)"); - } - - @Test - public void useInClauseWithCompatibleConstantShouldPass() { - validate("SELECT * FROM semantics WHERE age IN (10, 20, 30)"); - validate("SELECT * FROM semantics WHERE city IN ('Seattle', 'Bellevue')"); - validate("SELECT * FROM semantics WHERE birthday IN ('2000-01-01', '2010-01-01')"); - } - - @Test - public void useInClauseWithIncompatibleConstantShouldPass() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics s WHERE age IN ('abc', 'def')", - "Operator [IN] cannot work with [INTEGER, STRING]." - ); - } - - @Test - public void useInClauseWithSelectStarShouldFail() { - expectValidationFailWithErrorMessages( - "SELECT * FROM semantics s WHERE address IN (SELECT * FROM s.projects p)", - "Operator [IN] cannot work with [TEXT, (*)]" - ); - } - - @Test - public void useExistsClauseWithSelectStarShouldPass() { - validate("SELECT * FROM semantics s WHERE EXISTS (SELECT * FROM s.projects p)"); - } - - @Test - public void useExistsClauseWithSelectConstantShouldPass() { - validate("SELECT * FROM semantics s WHERE EXISTS (SELECT 1 FROM s.projects p)"); - } - - /** - * Ignore the semantic analyzer by using {@link EarlyExitAnalysisException} - */ - @Test - public void useSubqueryInFromClauseWithSelectConstantShouldPass() { - validate("SELECT t.TEMP as count FROM (SELECT COUNT(*) as TEMP FROM semantics) t"); - } + @Test + public void useExistClauseOnNestedFieldShouldPass() { + validate( + "SELECT * FROM semantics AS s WHERE EXISTS " + + " ( SELECT * FROM s.projects AS p WHERE p.active IS TRUE ) " + + " AND s.age > 10"); + } + + @Test + public void useNotExistClauseOnNestedFieldShouldPass() { + validate( + "SELECT * FROM semantics AS s WHERE NOT EXISTS " + + " ( SELECT * FROM s.projects AS p WHERE p.active IS TRUE ) " + + " AND s.age > 10"); + } + + @Test + public void useInClauseOnAgeWithIntegerLiteralListShouldPass() { + validate("SELECT * FROM semantics WHERE age IN (30, 40)"); + } + + @Test + public void useAliasInSubqueryShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics s WHERE EXISTS (SELECT * FROM s.projects p) AND p.active IS TRUE", + "Field [p.active] cannot be found or used here.", + "Did you mean [projects.active]?"); + } + + @Test + public void useInClauseWithIncompatibleFieldTypesShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics s WHERE age IN (SELECT p.active FROM s.projects p)", + "Operator [IN] cannot work with [INTEGER, BOOLEAN]."); + } + + @Test + public void useInClauseWithCompatibleFieldTypesShouldPass() { + validate("SELECT * FROM semantics s WHERE address IN (SELECT city FROM s.projects p)"); + } + + @Test + public void useNotInClauseWithCompatibleFieldTypesShouldPass() { + validate("SELECT * FROM semantics s WHERE address NOT IN (SELECT city FROM s.projects p)"); + } + + @Test + public void useInClauseWithCompatibleConstantShouldPass() { + validate("SELECT * FROM semantics WHERE age IN (10, 20, 30)"); + validate("SELECT * FROM semantics WHERE city IN ('Seattle', 'Bellevue')"); + validate("SELECT * FROM semantics WHERE birthday IN ('2000-01-01', '2010-01-01')"); + } + + @Test + public void useInClauseWithIncompatibleConstantShouldPass() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics s WHERE age IN ('abc', 'def')", + "Operator [IN] cannot work with [INTEGER, STRING]."); + } + + @Test + public void useInClauseWithSelectStarShouldFail() { + expectValidationFailWithErrorMessages( + "SELECT * FROM semantics s WHERE address IN (SELECT * FROM s.projects p)", + "Operator [IN] cannot work with [TEXT, (*)]"); + } + + @Test + public void useExistsClauseWithSelectStarShouldPass() { + validate("SELECT * FROM semantics s WHERE EXISTS (SELECT * FROM s.projects p)"); + } + + @Test + public void useExistsClauseWithSelectConstantShouldPass() { + validate("SELECT * FROM semantics s WHERE EXISTS (SELECT 1 FROM s.projects p)"); + } + + /** Ignore the semantic analyzer by using {@link EarlyExitAnalysisException} */ + @Test + public void useSubqueryInFromClauseWithSelectConstantShouldPass() { + validate("SELECT t.TEMP as count FROM (SELECT COUNT(*) as TEMP FROM semantics) t"); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerTestBase.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerTestBase.java index 7b53619d9c..403c2f49b7 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerTestBase.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerTestBase.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic; import static java.util.stream.Collectors.toList; @@ -28,49 +27,45 @@ import org.opensearch.sql.legacy.antlr.semantic.types.Type; import org.opensearch.sql.legacy.esdomain.LocalClusterState; -/** - * Test cases for semantic analysis focused on semantic check which was missing in the past. - */ +/** Test cases for semantic analysis focused on semantic check which was missing in the past. */ public abstract class SemanticAnalyzerTestBase { - private static final String TEST_MAPPING_FILE = "mappings/semantics.json"; + private static final String TEST_MAPPING_FILE = "mappings/semantics.json"; - /** public accessor is required by @Rule annotation */ - @Rule - public ExpectedException exception = ExpectedException.none(); + /** public accessor is required by @Rule annotation */ + @Rule public ExpectedException exception = ExpectedException.none(); - private OpenSearchLegacySqlAnalyzer - analyzer = new OpenSearchLegacySqlAnalyzer(new SqlAnalysisConfig(true, true, 1000)); + private OpenSearchLegacySqlAnalyzer analyzer = + new OpenSearchLegacySqlAnalyzer(new SqlAnalysisConfig(true, true, 1000)); - @SuppressWarnings("UnstableApiUsage") - @BeforeClass - public static void init() throws IOException { - URL url = Resources.getResource(TEST_MAPPING_FILE); - String mappings = Resources.toString(url, Charsets.UTF_8); - LocalClusterState.state(null); - mockLocalClusterState(mappings); - } + @SuppressWarnings("UnstableApiUsage") + @BeforeClass + public static void init() throws IOException { + URL url = Resources.getResource(TEST_MAPPING_FILE); + String mappings = Resources.toString(url, Charsets.UTF_8); + LocalClusterState.state(null); + mockLocalClusterState(mappings); + } - @AfterClass - public static void cleanUp() { - LocalClusterState.state(null); - } + @AfterClass + public static void cleanUp() { + LocalClusterState.state(null); + } - protected void expectValidationFailWithErrorMessages(String query, String... messages) { - exception.expect(SemanticAnalysisException.class); - exception.expectMessage(allOf(Arrays.stream(messages). - map(Matchers::containsString). - collect(toList()))); - validate(query); - } + protected void expectValidationFailWithErrorMessages(String query, String... messages) { + exception.expect(SemanticAnalysisException.class); + exception.expectMessage( + allOf(Arrays.stream(messages).map(Matchers::containsString).collect(toList()))); + validate(query); + } - protected void validate(String sql) { - analyzer.analyze(sql, LocalClusterState.state()); - } + protected void validate(String sql) { + analyzer.analyze(sql, LocalClusterState.state()); + } - protected void validateWithType(String sql, Type type) { - Optional analyze = analyzer.analyze(sql, LocalClusterState.state()); - assertTrue(analyze.isPresent()); - assertEquals(type, analyze.get()); - } + protected void validateWithType(String sql, Type type) { + Optional analyze = analyzer.analyze(sql, LocalClusterState.state()); + assertTrue(analyze.isPresent()); + assertEquals(type, analyze.get()); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerTests.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerTests.java index 56a27b780f..7585152a4d 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerTests.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/SemanticAnalyzerTests.java @@ -3,29 +3,27 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic; import org.junit.runner.RunWith; import org.junit.runners.Suite; /** - * Semantic analyzer test suite to prepare mapping and avoid load from file every time. - * But Gradle seems not work well with suite. So move common logic to test base class - * and keep this for quick testing in IDE. + * Semantic analyzer test suite to prepare mapping and avoid load from file every time. But Gradle + * seems not work well with suite. So move common logic to test base class and keep this for quick + * testing in IDE. */ @RunWith(Suite.class) @Suite.SuiteClasses({ - SemanticAnalyzerBasicTest.class, - SemanticAnalyzerConfigTest.class, - SemanticAnalyzerFromClauseTest.class, - SemanticAnalyzerIdentifierTest.class, - SemanticAnalyzerScalarFunctionTest.class, - SemanticAnalyzerESScalarFunctionTest.class, - SemanticAnalyzerAggregateFunctionTest.class, - SemanticAnalyzerOperatorTest.class, - SemanticAnalyzerSubqueryTest.class, - SemanticAnalyzerMultiQueryTest.class, + SemanticAnalyzerBasicTest.class, + SemanticAnalyzerConfigTest.class, + SemanticAnalyzerFromClauseTest.class, + SemanticAnalyzerIdentifierTest.class, + SemanticAnalyzerScalarFunctionTest.class, + SemanticAnalyzerESScalarFunctionTest.class, + SemanticAnalyzerAggregateFunctionTest.class, + SemanticAnalyzerOperatorTest.class, + SemanticAnalyzerSubqueryTest.class, + SemanticAnalyzerMultiQueryTest.class, }) -public class SemanticAnalyzerTests { -} +public class SemanticAnalyzerTests {} diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/scope/EnvironmentTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/scope/EnvironmentTest.java index d9e9271728..c7639e542e 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/scope/EnvironmentTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/scope/EnvironmentTest.java @@ -3,13 +3,12 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.scope; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.aMapWithSize; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.hasEntry; -import static org.hamcrest.MatcherAssert.assertThat; import static org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType.BOOLEAN; import static org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType.DATE; import static org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType.KEYWORD; @@ -23,141 +22,136 @@ import org.opensearch.sql.legacy.antlr.semantic.types.Type; import org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchIndex; -/** - * Test cases for environment - */ +/** Test cases for environment */ public class EnvironmentTest { - /** Use context class for push/pop */ - private final SemanticContext context = new SemanticContext(); - - @Test - public void defineFieldSymbolInDifferentEnvironmentsShouldBeAbleToResolve() { - // Root environment - Symbol birthday = new Symbol(Namespace.FIELD_NAME, "s.birthday"); - environment().define(birthday, DATE); - Assert.assertTrue(environment().resolve(birthday).isPresent()); - - // New environment 1 - context.push(); - Symbol city = new Symbol(Namespace.FIELD_NAME, "s.city"); - environment().define(city, KEYWORD); - Assert.assertTrue(environment().resolve(birthday).isPresent()); - Assert.assertTrue(environment().resolve(city).isPresent()); - - // New environment 2 - context.push(); - Symbol manager = new Symbol(Namespace.FIELD_NAME, "s.manager"); - environment().define(manager, OBJECT); - Assert.assertTrue(environment().resolve(birthday).isPresent()); - Assert.assertTrue(environment().resolve(city).isPresent()); - Assert.assertTrue(environment().resolve(manager).isPresent()); - } - - @Test - public void defineFieldSymbolInDifferentEnvironmentsShouldNotAbleToResolveOncePopped() { - // Root environment - Symbol birthday = new Symbol(Namespace.FIELD_NAME, "s.birthday"); - environment().define(birthday, DATE); - - // New environment - context.push(); - Symbol city = new Symbol(Namespace.FIELD_NAME, "s.city"); - Symbol manager = new Symbol(Namespace.FIELD_NAME, "s.manager"); - environment().define(city, OBJECT); - environment().define(manager, OBJECT); - Assert.assertTrue(environment().resolve(birthday).isPresent()); - Assert.assertTrue(environment().resolve(city).isPresent()); - Assert.assertTrue(environment().resolve(manager).isPresent()); - - context.pop(); - Assert.assertFalse(environment().resolve(city).isPresent()); - Assert.assertFalse(environment().resolve(manager).isPresent()); - Assert.assertTrue(environment().resolve(birthday).isPresent()); - } - - @Test - public void defineFieldSymbolInDifferentEnvironmentsShouldBeAbleToResolveByPrefix() { - // Root environment - Symbol birthday = new Symbol(Namespace.FIELD_NAME, "s.birthday"); - environment().define(birthday, DATE); - - // New environment 1 - context.push(); - Symbol city = new Symbol(Namespace.FIELD_NAME, "s.city"); - environment().define(city, KEYWORD); - - // New environment 2 - context.push(); - Symbol manager = new Symbol(Namespace.FIELD_NAME, "s.manager"); - environment().define(manager, OBJECT); - - Map typeByName = environment().resolveByPrefix(new Symbol(Namespace.FIELD_NAME, "s")); - assertThat( - typeByName, - allOf( - aMapWithSize(3), - hasEntry("s.birthday", DATE), - hasEntry("s.city", KEYWORD), - hasEntry("s.manager", OBJECT) - ) - ); - } - - @Test - public void defineFieldSymbolShouldBeAbleToResolveAll() { - environment().define(new Symbol(Namespace.FIELD_NAME, "s.projects"), new OpenSearchIndex("s.projects", NESTED_FIELD)); - environment().define(new Symbol(Namespace.FIELD_NAME, "s.projects.release"), DATE); - environment().define(new Symbol(Namespace.FIELD_NAME, "s.projects.active"), BOOLEAN); - environment().define(new Symbol(Namespace.FIELD_NAME, "s.address"), TEXT); - environment().define(new Symbol(Namespace.FIELD_NAME, "s.city"), KEYWORD); - environment().define(new Symbol(Namespace.FIELD_NAME, "s.manager.name"), TEXT); - - Map typeByName = environment().resolveAll(Namespace.FIELD_NAME); - assertThat( - typeByName, - allOf( - aMapWithSize(6), - hasEntry("s.projects", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), - hasEntry("s.projects.release", DATE), - hasEntry("s.projects.active", BOOLEAN), - hasEntry("s.address", TEXT), - hasEntry("s.city", KEYWORD), - hasEntry("s.manager.name", TEXT) - ) - ); - } - - @Test - public void defineFieldSymbolInDifferentEnvironmentsShouldBeAbleToResolveAll() { - // Root environment - Symbol birthday = new Symbol(Namespace.FIELD_NAME, "s.birthday"); - environment().define(birthday, DATE); - - // New environment 1 - context.push(); - Symbol city = new Symbol(Namespace.FIELD_NAME, "s.city"); - environment().define(city, KEYWORD); - - // New environment 2 - context.push(); - Symbol manager = new Symbol(Namespace.FIELD_NAME, "s.manager"); - environment().define(manager, OBJECT); - - Map typeByName = environment().resolveAll(Namespace.FIELD_NAME); - assertThat( - typeByName, - allOf( - aMapWithSize(3), - hasEntry("s.birthday", DATE), - hasEntry("s.city", KEYWORD), - hasEntry("s.manager", OBJECT) - ) - ); - } - - private Environment environment() { - return context.peek(); - } - + /** Use context class for push/pop */ + private final SemanticContext context = new SemanticContext(); + + @Test + public void defineFieldSymbolInDifferentEnvironmentsShouldBeAbleToResolve() { + // Root environment + Symbol birthday = new Symbol(Namespace.FIELD_NAME, "s.birthday"); + environment().define(birthday, DATE); + Assert.assertTrue(environment().resolve(birthday).isPresent()); + + // New environment 1 + context.push(); + Symbol city = new Symbol(Namespace.FIELD_NAME, "s.city"); + environment().define(city, KEYWORD); + Assert.assertTrue(environment().resolve(birthday).isPresent()); + Assert.assertTrue(environment().resolve(city).isPresent()); + + // New environment 2 + context.push(); + Symbol manager = new Symbol(Namespace.FIELD_NAME, "s.manager"); + environment().define(manager, OBJECT); + Assert.assertTrue(environment().resolve(birthday).isPresent()); + Assert.assertTrue(environment().resolve(city).isPresent()); + Assert.assertTrue(environment().resolve(manager).isPresent()); + } + + @Test + public void defineFieldSymbolInDifferentEnvironmentsShouldNotAbleToResolveOncePopped() { + // Root environment + Symbol birthday = new Symbol(Namespace.FIELD_NAME, "s.birthday"); + environment().define(birthday, DATE); + + // New environment + context.push(); + Symbol city = new Symbol(Namespace.FIELD_NAME, "s.city"); + Symbol manager = new Symbol(Namespace.FIELD_NAME, "s.manager"); + environment().define(city, OBJECT); + environment().define(manager, OBJECT); + Assert.assertTrue(environment().resolve(birthday).isPresent()); + Assert.assertTrue(environment().resolve(city).isPresent()); + Assert.assertTrue(environment().resolve(manager).isPresent()); + + context.pop(); + Assert.assertFalse(environment().resolve(city).isPresent()); + Assert.assertFalse(environment().resolve(manager).isPresent()); + Assert.assertTrue(environment().resolve(birthday).isPresent()); + } + + @Test + public void defineFieldSymbolInDifferentEnvironmentsShouldBeAbleToResolveByPrefix() { + // Root environment + Symbol birthday = new Symbol(Namespace.FIELD_NAME, "s.birthday"); + environment().define(birthday, DATE); + + // New environment 1 + context.push(); + Symbol city = new Symbol(Namespace.FIELD_NAME, "s.city"); + environment().define(city, KEYWORD); + + // New environment 2 + context.push(); + Symbol manager = new Symbol(Namespace.FIELD_NAME, "s.manager"); + environment().define(manager, OBJECT); + + Map typeByName = + environment().resolveByPrefix(new Symbol(Namespace.FIELD_NAME, "s")); + assertThat( + typeByName, + allOf( + aMapWithSize(3), + hasEntry("s.birthday", DATE), + hasEntry("s.city", KEYWORD), + hasEntry("s.manager", OBJECT))); + } + + @Test + public void defineFieldSymbolShouldBeAbleToResolveAll() { + environment() + .define( + new Symbol(Namespace.FIELD_NAME, "s.projects"), + new OpenSearchIndex("s.projects", NESTED_FIELD)); + environment().define(new Symbol(Namespace.FIELD_NAME, "s.projects.release"), DATE); + environment().define(new Symbol(Namespace.FIELD_NAME, "s.projects.active"), BOOLEAN); + environment().define(new Symbol(Namespace.FIELD_NAME, "s.address"), TEXT); + environment().define(new Symbol(Namespace.FIELD_NAME, "s.city"), KEYWORD); + environment().define(new Symbol(Namespace.FIELD_NAME, "s.manager.name"), TEXT); + + Map typeByName = environment().resolveAll(Namespace.FIELD_NAME); + assertThat( + typeByName, + allOf( + aMapWithSize(6), + hasEntry("s.projects", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), + hasEntry("s.projects.release", DATE), + hasEntry("s.projects.active", BOOLEAN), + hasEntry("s.address", TEXT), + hasEntry("s.city", KEYWORD), + hasEntry("s.manager.name", TEXT))); + } + + @Test + public void defineFieldSymbolInDifferentEnvironmentsShouldBeAbleToResolveAll() { + // Root environment + Symbol birthday = new Symbol(Namespace.FIELD_NAME, "s.birthday"); + environment().define(birthday, DATE); + + // New environment 1 + context.push(); + Symbol city = new Symbol(Namespace.FIELD_NAME, "s.city"); + environment().define(city, KEYWORD); + + // New environment 2 + context.push(); + Symbol manager = new Symbol(Namespace.FIELD_NAME, "s.manager"); + environment().define(manager, OBJECT); + + Map typeByName = environment().resolveAll(Namespace.FIELD_NAME); + assertThat( + typeByName, + allOf( + aMapWithSize(3), + hasEntry("s.birthday", DATE), + hasEntry("s.city", KEYWORD), + hasEntry("s.manager", OBJECT))); + } + + private Environment environment() { + return context.peek(); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/scope/SemanticContextTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/scope/SemanticContextTest.java index 689fdd20f6..e19b48f2a0 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/scope/SemanticContextTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/scope/SemanticContextTest.java @@ -3,42 +3,37 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.scope; import org.junit.Assert; import org.junit.Test; -/** - * Test cases for semantic context - */ +/** Test cases for semantic context */ public class SemanticContextTest { - private final SemanticContext context = new SemanticContext(); - - @Test - public void rootEnvironmentShouldBeThereInitially() { - Assert.assertNotNull( - "Didn't find root environment. Context is NOT supposed to be empty initially", - context.peek() - ); - } - - @Test - public void pushAndPopEnvironmentShouldPass() { - context.push(); - context.pop(); - } - - @Test - public void popRootEnvironmentShouldPass() { - context.pop(); - } - - @Test(expected = NullPointerException.class) - public void popEmptyEnvironmentStackShouldFail() { - context.pop(); - context.pop(); - } - + private final SemanticContext context = new SemanticContext(); + + @Test + public void rootEnvironmentShouldBeThereInitially() { + Assert.assertNotNull( + "Didn't find root environment. Context is NOT supposed to be empty initially", + context.peek()); + } + + @Test + public void pushAndPopEnvironmentShouldPass() { + context.push(); + context.pop(); + } + + @Test + public void popRootEnvironmentShouldPass() { + context.pop(); + } + + @Test(expected = NullPointerException.class) + public void popEmptyEnvironmentStackShouldFail() { + context.pop(); + context.pop(); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/scope/SymbolTableTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/scope/SymbolTableTest.java index 3ccc75da62..8fde3bdc3c 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/scope/SymbolTableTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/scope/SymbolTableTest.java @@ -3,13 +3,12 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.scope; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.aMapWithSize; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.hasEntry; -import static org.hamcrest.MatcherAssert.assertThat; import static org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType.BOOLEAN; import static org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType.DATE; import static org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType.KEYWORD; @@ -25,65 +24,62 @@ import org.opensearch.sql.legacy.antlr.semantic.types.TypeExpression; import org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchIndex; -/** - * Test cases for symbol table - */ +/** Test cases for symbol table */ public class SymbolTableTest { - private final SymbolTable symbolTable = new SymbolTable(); + private final SymbolTable symbolTable = new SymbolTable(); - @Test - public void defineFieldSymbolShouldBeAbleToResolve() { - defineSymbolShouldBeAbleToResolve(new Symbol(Namespace.FIELD_NAME, "birthday"), DATE); - } + @Test + public void defineFieldSymbolShouldBeAbleToResolve() { + defineSymbolShouldBeAbleToResolve(new Symbol(Namespace.FIELD_NAME, "birthday"), DATE); + } - @Test - public void defineFunctionSymbolShouldBeAbleToResolve() { - String funcName = "LOG"; - Type expectedType = new TypeExpression() { - @Override - public String getName() { - return "Temp type expression with [NUMBER] -> NUMBER specification"; - } + @Test + public void defineFunctionSymbolShouldBeAbleToResolve() { + String funcName = "LOG"; + Type expectedType = + new TypeExpression() { + @Override + public String getName() { + return "Temp type expression with [NUMBER] -> NUMBER specification"; + } - @Override - public TypeExpressionSpec[] specifications() { - return new TypeExpressionSpec[] { - new TypeExpressionSpec().map(NUMBER).to(NUMBER) - }; - } + @Override + public TypeExpressionSpec[] specifications() { + return new TypeExpressionSpec[] {new TypeExpressionSpec().map(NUMBER).to(NUMBER)}; + } }; - Symbol symbol = new Symbol(Namespace.FUNCTION_NAME, funcName); - defineSymbolShouldBeAbleToResolve(symbol, expectedType); - } - - @Test - public void defineFieldSymbolShouldBeAbleToResolveByPrefix() { - symbolTable.store(new Symbol(Namespace.FIELD_NAME, "s.projects"), new OpenSearchIndex("s.projects", NESTED_FIELD)); - symbolTable.store(new Symbol(Namespace.FIELD_NAME, "s.projects.release"), DATE); - symbolTable.store(new Symbol(Namespace.FIELD_NAME, "s.projects.active"), BOOLEAN); - symbolTable.store(new Symbol(Namespace.FIELD_NAME, "s.address"), TEXT); - symbolTable.store(new Symbol(Namespace.FIELD_NAME, "s.city"), KEYWORD); - symbolTable.store(new Symbol(Namespace.FIELD_NAME, "s.manager.name"), TEXT); + Symbol symbol = new Symbol(Namespace.FUNCTION_NAME, funcName); + defineSymbolShouldBeAbleToResolve(symbol, expectedType); + } - Map typeByName = symbolTable.lookupByPrefix(new Symbol(Namespace.FIELD_NAME, "s.projects")); - assertThat( - typeByName, - allOf( - aMapWithSize(3), - hasEntry("s.projects", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), - hasEntry("s.projects.release", DATE), - hasEntry("s.projects.active", BOOLEAN) - ) - ); - } + @Test + public void defineFieldSymbolShouldBeAbleToResolveByPrefix() { + symbolTable.store( + new Symbol(Namespace.FIELD_NAME, "s.projects"), + new OpenSearchIndex("s.projects", NESTED_FIELD)); + symbolTable.store(new Symbol(Namespace.FIELD_NAME, "s.projects.release"), DATE); + symbolTable.store(new Symbol(Namespace.FIELD_NAME, "s.projects.active"), BOOLEAN); + symbolTable.store(new Symbol(Namespace.FIELD_NAME, "s.address"), TEXT); + symbolTable.store(new Symbol(Namespace.FIELD_NAME, "s.city"), KEYWORD); + symbolTable.store(new Symbol(Namespace.FIELD_NAME, "s.manager.name"), TEXT); - private void defineSymbolShouldBeAbleToResolve(Symbol symbol, Type expectedType) { - symbolTable.store(symbol, expectedType); + Map typeByName = + symbolTable.lookupByPrefix(new Symbol(Namespace.FIELD_NAME, "s.projects")); + assertThat( + typeByName, + allOf( + aMapWithSize(3), + hasEntry("s.projects", (Type) new OpenSearchIndex("s.projects", NESTED_FIELD)), + hasEntry("s.projects.release", DATE), + hasEntry("s.projects.active", BOOLEAN))); + } - Optional actualType = symbolTable.lookup(symbol); - Assert.assertTrue(actualType.isPresent()); - Assert.assertEquals(expectedType, actualType.get()); - } + private void defineSymbolShouldBeAbleToResolve(Symbol symbol, Type expectedType) { + symbolTable.store(symbol, expectedType); + Optional actualType = symbolTable.lookup(symbol); + Assert.assertTrue(actualType.isPresent()); + Assert.assertEquals(expectedType, actualType.get()); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/scope/TypeSupplierTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/scope/TypeSupplierTest.java index a0b60de4be..e6090117c1 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/scope/TypeSupplierTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/scope/TypeSupplierTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.scope; import static org.junit.Assert.assertEquals; @@ -15,31 +14,30 @@ import org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType; public class TypeSupplierTest { - @Rule - public ExpectedException exception = ExpectedException.none(); + @Rule public ExpectedException exception = ExpectedException.none(); - @Test - public void haveOneTypeShouldPass() { - TypeSupplier age = new TypeSupplier("age", OpenSearchDataType.INTEGER); + @Test + public void haveOneTypeShouldPass() { + TypeSupplier age = new TypeSupplier("age", OpenSearchDataType.INTEGER); - assertEquals(OpenSearchDataType.INTEGER, age.get()); - } + assertEquals(OpenSearchDataType.INTEGER, age.get()); + } - @Test - public void addSameTypeShouldPass() { - TypeSupplier age = new TypeSupplier("age", OpenSearchDataType.INTEGER); - age.add(OpenSearchDataType.INTEGER); + @Test + public void addSameTypeShouldPass() { + TypeSupplier age = new TypeSupplier("age", OpenSearchDataType.INTEGER); + age.add(OpenSearchDataType.INTEGER); - assertEquals(OpenSearchDataType.INTEGER, age.get()); - } + assertEquals(OpenSearchDataType.INTEGER, age.get()); + } - @Test - public void haveTwoTypesShouldThrowException() { - TypeSupplier age = new TypeSupplier("age", OpenSearchDataType.INTEGER); - age.add(OpenSearchDataType.TEXT); + @Test + public void haveTwoTypesShouldThrowException() { + TypeSupplier age = new TypeSupplier("age", OpenSearchDataType.INTEGER); + age.add(OpenSearchDataType.TEXT); - exception.expect(SemanticAnalysisException.class); - exception.expectMessage("Field [age] have conflict type"); - age.get(); - } + exception.expect(SemanticAnalysisException.class); + exception.expectMessage("Field [age] have conflict type"); + age.get(); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/types/BaseTypeTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/types/BaseTypeTest.java index a8ddfd43e8..0269c6b01c 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/types/BaseTypeTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/types/BaseTypeTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.types; import static org.junit.Assert.assertEquals; @@ -30,78 +29,75 @@ import org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchDataType; import org.opensearch.sql.legacy.antlr.semantic.types.base.OpenSearchIndex; -/** - * Test base type compatibility - */ +/** Test base type compatibility */ public class BaseTypeTest { - @Test - public void unknownTypeNameShouldReturnUnknown() { - assertEquals(UNKNOWN, OpenSearchDataType.typeOf("this_is_a_new_es_type_we_arent_aware")); - } - - @Test - public void typeOfShouldIgnoreCase() { - assertEquals(INTEGER, OpenSearchDataType.typeOf("Integer")); - } - - @Test - public void sameBaseTypeShouldBeCompatible() { - assertTrue(INTEGER.isCompatible(INTEGER)); - assertTrue(BOOLEAN.isCompatible(BOOLEAN)); - } - - @Test - public void parentBaseTypeShouldBeCompatibleWithSubBaseType() { - assertTrue(NUMBER.isCompatible(DOUBLE)); - assertTrue(DOUBLE.isCompatible(FLOAT)); - assertTrue(FLOAT.isCompatible(INTEGER)); - assertTrue(INTEGER.isCompatible(SHORT)); - assertTrue(INTEGER.isCompatible(LONG)); - assertTrue(STRING.isCompatible(TEXT)); - assertTrue(STRING.isCompatible(KEYWORD)); - assertTrue(DATE.isCompatible(STRING)); - } - - @Test - public void ancestorBaseTypeShouldBeCompatibleWithSubBaseType() { - assertTrue(NUMBER.isCompatible(LONG)); - assertTrue(NUMBER.isCompatible(DOUBLE)); - assertTrue(DOUBLE.isCompatible(INTEGER)); - assertTrue(INTEGER.isCompatible(SHORT)); - assertTrue(INTEGER.isCompatible(LONG)); - } - - @Ignore("Two way compatibility is not necessary") - @Test - public void subBaseTypeShouldBeCompatibleWithParentBaseType() { - assertTrue(KEYWORD.isCompatible(STRING)); - } - - @Test - public void nonRelatedBaseTypeShouldNotBeCompatible() { - assertFalse(SHORT.isCompatible(TEXT)); - assertFalse(DATE.isCompatible(BOOLEAN)); - } - - @Test - public void unknownBaseTypeShouldBeCompatibleWithAnyBaseType() { - assertTrue(UNKNOWN.isCompatible(INTEGER)); - assertTrue(UNKNOWN.isCompatible(KEYWORD)); - assertTrue(UNKNOWN.isCompatible(BOOLEAN)); - } - - @Test - public void anyBaseTypeShouldBeCompatibleWithUnknownBaseType() { - assertTrue(LONG.isCompatible(UNKNOWN)); - assertTrue(TEXT.isCompatible(UNKNOWN)); - assertTrue(DATE.isCompatible(UNKNOWN)); - } - - @Test - public void nestedIndexTypeShouldBeCompatibleWithNestedDataType() { - assertTrue(NESTED.isCompatible(new OpenSearchIndex("test", NESTED_FIELD))); - assertTrue(OPENSEARCH_TYPE.isCompatible(new OpenSearchIndex("test", NESTED_FIELD))); - } - + @Test + public void unknownTypeNameShouldReturnUnknown() { + assertEquals(UNKNOWN, OpenSearchDataType.typeOf("this_is_a_new_es_type_we_arent_aware")); + } + + @Test + public void typeOfShouldIgnoreCase() { + assertEquals(INTEGER, OpenSearchDataType.typeOf("Integer")); + } + + @Test + public void sameBaseTypeShouldBeCompatible() { + assertTrue(INTEGER.isCompatible(INTEGER)); + assertTrue(BOOLEAN.isCompatible(BOOLEAN)); + } + + @Test + public void parentBaseTypeShouldBeCompatibleWithSubBaseType() { + assertTrue(NUMBER.isCompatible(DOUBLE)); + assertTrue(DOUBLE.isCompatible(FLOAT)); + assertTrue(FLOAT.isCompatible(INTEGER)); + assertTrue(INTEGER.isCompatible(SHORT)); + assertTrue(INTEGER.isCompatible(LONG)); + assertTrue(STRING.isCompatible(TEXT)); + assertTrue(STRING.isCompatible(KEYWORD)); + assertTrue(DATE.isCompatible(STRING)); + } + + @Test + public void ancestorBaseTypeShouldBeCompatibleWithSubBaseType() { + assertTrue(NUMBER.isCompatible(LONG)); + assertTrue(NUMBER.isCompatible(DOUBLE)); + assertTrue(DOUBLE.isCompatible(INTEGER)); + assertTrue(INTEGER.isCompatible(SHORT)); + assertTrue(INTEGER.isCompatible(LONG)); + } + + @Ignore("Two way compatibility is not necessary") + @Test + public void subBaseTypeShouldBeCompatibleWithParentBaseType() { + assertTrue(KEYWORD.isCompatible(STRING)); + } + + @Test + public void nonRelatedBaseTypeShouldNotBeCompatible() { + assertFalse(SHORT.isCompatible(TEXT)); + assertFalse(DATE.isCompatible(BOOLEAN)); + } + + @Test + public void unknownBaseTypeShouldBeCompatibleWithAnyBaseType() { + assertTrue(UNKNOWN.isCompatible(INTEGER)); + assertTrue(UNKNOWN.isCompatible(KEYWORD)); + assertTrue(UNKNOWN.isCompatible(BOOLEAN)); + } + + @Test + public void anyBaseTypeShouldBeCompatibleWithUnknownBaseType() { + assertTrue(LONG.isCompatible(UNKNOWN)); + assertTrue(TEXT.isCompatible(UNKNOWN)); + assertTrue(DATE.isCompatible(UNKNOWN)); + } + + @Test + public void nestedIndexTypeShouldBeCompatibleWithNestedDataType() { + assertTrue(NESTED.isCompatible(new OpenSearchIndex("test", NESTED_FIELD))); + assertTrue(OPENSEARCH_TYPE.isCompatible(new OpenSearchIndex("test", NESTED_FIELD))); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/types/GenericTypeTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/types/GenericTypeTest.java index db76c01947..0bd8b526bb 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/types/GenericTypeTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/types/GenericTypeTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.types; import static java.util.Collections.singletonList; @@ -19,34 +18,31 @@ import org.junit.Test; -/** - * Generic type test - */ +/** Generic type test */ public class GenericTypeTest { - @Test - public void passNumberArgToLogShouldReturnNumber() { - assertEquals(DOUBLE, LOG.construct(singletonList(NUMBER))); - } - - @Test - public void passIntegerArgToLogShouldReturnDouble() { - assertEquals(DOUBLE, LOG.construct(singletonList(INTEGER))); - } - - @Test - public void passLongArgToLogShouldReturnDouble() { - assertEquals(DOUBLE, LOG.construct(singletonList(LONG))); - } - - @Test - public void passTextArgToLogShouldReturnTypeError() { - assertEquals(TYPE_ERROR, LOG.construct(singletonList(TEXT))); - } - - @Test - public void passKeywordArgToLogShouldReturnTypeError() { - assertEquals(TYPE_ERROR, LOG.construct(singletonList(KEYWORD))); - } - + @Test + public void passNumberArgToLogShouldReturnNumber() { + assertEquals(DOUBLE, LOG.construct(singletonList(NUMBER))); + } + + @Test + public void passIntegerArgToLogShouldReturnDouble() { + assertEquals(DOUBLE, LOG.construct(singletonList(INTEGER))); + } + + @Test + public void passLongArgToLogShouldReturnDouble() { + assertEquals(DOUBLE, LOG.construct(singletonList(LONG))); + } + + @Test + public void passTextArgToLogShouldReturnTypeError() { + assertEquals(TYPE_ERROR, LOG.construct(singletonList(TEXT))); + } + + @Test + public void passKeywordArgToLogShouldReturnTypeError() { + assertEquals(TYPE_ERROR, LOG.construct(singletonList(KEYWORD))); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/types/ProductTypeTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/types/ProductTypeTest.java index 326dd6ce06..5c87aabdee 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/types/ProductTypeTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/types/ProductTypeTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.types; import static java.util.Collections.singletonList; @@ -18,56 +17,53 @@ import org.junit.Test; import org.opensearch.sql.legacy.antlr.semantic.types.special.Product; -/** - * Test cases fro product type - */ +/** Test cases fro product type */ public class ProductTypeTest { - @Test - public void singleSameTypeInTwoProductsShouldPass() { - Product product1 = new Product(singletonList(INTEGER)); - Product product2 = new Product(singletonList(INTEGER)); - Assert.assertTrue(product1.isCompatible(product2)); - Assert.assertTrue(product2.isCompatible(product1)); - } - - @Test - public void singleCompatibleTypeInTwoProductsShouldPass() { - Product product1 = new Product(singletonList(NUMBER)); - Product product2 = new Product(singletonList(INTEGER)); - Assert.assertTrue(product1.isCompatible(product2)); - Assert.assertTrue(product2.isCompatible(product1)); - } + @Test + public void singleSameTypeInTwoProductsShouldPass() { + Product product1 = new Product(singletonList(INTEGER)); + Product product2 = new Product(singletonList(INTEGER)); + Assert.assertTrue(product1.isCompatible(product2)); + Assert.assertTrue(product2.isCompatible(product1)); + } - @Test - public void twoCompatibleTypesInTwoProductsShouldPass() { - Product product1 = new Product(Arrays.asList(NUMBER, KEYWORD)); - Product product2 = new Product(Arrays.asList(INTEGER, STRING)); - Assert.assertTrue(product1.isCompatible(product2)); - Assert.assertTrue(product2.isCompatible(product1)); - } + @Test + public void singleCompatibleTypeInTwoProductsShouldPass() { + Product product1 = new Product(singletonList(NUMBER)); + Product product2 = new Product(singletonList(INTEGER)); + Assert.assertTrue(product1.isCompatible(product2)); + Assert.assertTrue(product2.isCompatible(product1)); + } - @Test - public void incompatibleTypesInTwoProductsShouldFail() { - Product product1 = new Product(singletonList(BOOLEAN)); - Product product2 = new Product(singletonList(STRING)); - Assert.assertFalse(product1.isCompatible(product2)); - Assert.assertFalse(product2.isCompatible(product1)); - } + @Test + public void twoCompatibleTypesInTwoProductsShouldPass() { + Product product1 = new Product(Arrays.asList(NUMBER, KEYWORD)); + Product product2 = new Product(Arrays.asList(INTEGER, STRING)); + Assert.assertTrue(product1.isCompatible(product2)); + Assert.assertTrue(product2.isCompatible(product1)); + } - @Test - public void compatibleButDifferentTypeNumberInTwoProductsShouldFail() { - Product product1 = new Product(Arrays.asList(KEYWORD, INTEGER)); - Product product2 = new Product(singletonList(STRING)); - Assert.assertFalse(product1.isCompatible(product2)); - Assert.assertFalse(product2.isCompatible(product1)); - } + @Test + public void incompatibleTypesInTwoProductsShouldFail() { + Product product1 = new Product(singletonList(BOOLEAN)); + Product product2 = new Product(singletonList(STRING)); + Assert.assertFalse(product1.isCompatible(product2)); + Assert.assertFalse(product2.isCompatible(product1)); + } - @Test - public void baseTypeShouldBeIncompatibleWithProductType() { - Product product = new Product(singletonList(INTEGER)); - Assert.assertFalse(INTEGER.isCompatible(product)); - Assert.assertFalse(product.isCompatible(INTEGER)); - } + @Test + public void compatibleButDifferentTypeNumberInTwoProductsShouldFail() { + Product product1 = new Product(Arrays.asList(KEYWORD, INTEGER)); + Product product2 = new Product(singletonList(STRING)); + Assert.assertFalse(product1.isCompatible(product2)); + Assert.assertFalse(product2.isCompatible(product1)); + } + @Test + public void baseTypeShouldBeIncompatibleWithProductType() { + Product product = new Product(singletonList(INTEGER)); + Assert.assertFalse(INTEGER.isCompatible(product)); + Assert.assertFalse(product.isCompatible(INTEGER)); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/types/TypeExpressionTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/types/TypeExpressionTest.java index d1d1d7799b..55c184bcaa 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/types/TypeExpressionTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/semantic/types/TypeExpressionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.semantic.types; import static org.junit.Assert.assertEquals; @@ -21,59 +20,58 @@ import java.util.Arrays; import org.junit.Test; -/** - * Test cases for default implementation methods in interface TypeExpression - */ +/** Test cases for default implementation methods in interface TypeExpression */ public class TypeExpressionTest { - private final TypeExpression test123 = new TypeExpression() { + private final TypeExpression test123 = + new TypeExpression() { @Override public String getName() { - return "TEST123"; + return "TEST123"; } @Override public TypeExpressionSpec[] specifications() { - return new TypeExpressionSpec[] { - new TypeExpressionSpec().map(T(NUMBER)).to(T), - new TypeExpressionSpec().map(STRING, BOOLEAN).to(DATE) - }; + return new TypeExpressionSpec[] { + new TypeExpressionSpec().map(T(NUMBER)).to(T), + new TypeExpressionSpec().map(STRING, BOOLEAN).to(DATE) + }; } - }; + }; - @Test - public void emptySpecificationShouldAlwaysReturnUnknown() { - TypeExpression expr = new TypeExpression() { - @Override - public TypeExpressionSpec[] specifications() { - return new TypeExpressionSpec[0]; - } + @Test + public void emptySpecificationShouldAlwaysReturnUnknown() { + TypeExpression expr = + new TypeExpression() { + @Override + public TypeExpressionSpec[] specifications() { + return new TypeExpressionSpec[0]; + } - @Override - public String getName() { - return "Temp type expression with empty specification"; - } + @Override + public String getName() { + return "Temp type expression with empty specification"; + } }; - assertEquals(UNKNOWN, expr.construct(Arrays.asList(NUMBER))); - assertEquals(UNKNOWN, expr.construct(Arrays.asList(STRING, BOOLEAN))); - assertEquals(UNKNOWN, expr.construct(Arrays.asList(INTEGER, DOUBLE, GEO_POINT))); - } - - @Test - public void compatibilityCheckShouldPassIfAnySpecificationCompatible() { - assertEquals(DOUBLE, test123.construct(Arrays.asList(DOUBLE))); - assertEquals(DATE, test123.construct(Arrays.asList(STRING, BOOLEAN))); - } + assertEquals(UNKNOWN, expr.construct(Arrays.asList(NUMBER))); + assertEquals(UNKNOWN, expr.construct(Arrays.asList(STRING, BOOLEAN))); + assertEquals(UNKNOWN, expr.construct(Arrays.asList(INTEGER, DOUBLE, GEO_POINT))); + } - @Test - public void compatibilityCheckShouldFailIfNoSpecificationCompatible() { - assertEquals(TYPE_ERROR, test123.construct(Arrays.asList(BOOLEAN))); - } + @Test + public void compatibilityCheckShouldPassIfAnySpecificationCompatible() { + assertEquals(DOUBLE, test123.construct(Arrays.asList(DOUBLE))); + assertEquals(DATE, test123.construct(Arrays.asList(STRING, BOOLEAN))); + } - @Test - public void usageShouldPrintAllSpecifications() { - assertEquals("TEST123(NUMBER T) -> T or TEST123(STRING, BOOLEAN) -> DATE", test123.usage()); - } + @Test + public void compatibilityCheckShouldFailIfNoSpecificationCompatible() { + assertEquals(TYPE_ERROR, test123.construct(Arrays.asList(BOOLEAN))); + } + @Test + public void usageShouldPrintAllSpecifications() { + assertEquals("TEST123(NUMBER T) -> T or TEST123(STRING, BOOLEAN) -> DATE", test123.usage()); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/visitor/AntlrSqlParseTreeVisitorTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/visitor/AntlrSqlParseTreeVisitorTest.java index c4e7a7e725..be4b5a5197 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/antlr/visitor/AntlrSqlParseTreeVisitorTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/antlr/visitor/AntlrSqlParseTreeVisitorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.antlr.visitor; import static java.util.Collections.emptyList; @@ -25,95 +24,97 @@ import org.opensearch.sql.legacy.antlr.semantic.visitor.TypeChecker; import org.opensearch.sql.legacy.exception.SqlFeatureNotImplementedException; -/** - * Test cases for AntlrSqlParseTreeVisitor - */ +/** Test cases for AntlrSqlParseTreeVisitor */ public class AntlrSqlParseTreeVisitorTest { - private TypeChecker analyzer = new TypeChecker(new SemanticContext()) { + private TypeChecker analyzer = + new TypeChecker(new SemanticContext()) { @Override public Type visitIndexName(String indexName) { - return null; // avoid querying mapping on null LocalClusterState + return null; // avoid querying mapping on null LocalClusterState } @Override public Type visitFieldName(String fieldName) { - switch (fieldName) { - case "age": return INTEGER; - case "birthday": return DATE; - default: return UNKNOWN; - } + switch (fieldName) { + case "age": + return INTEGER; + case "birthday": + return DATE; + default: + return UNKNOWN; + } } - }; - - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); - - @Test - public void selectNumberShouldReturnNumberAsQueryVisitingResult() { - Type result = visit("SELECT age FROM test"); - Assert.assertSame(result, INTEGER); - } - - @Test - public void selectNumberAndDateShouldReturnProductOfThemAsQueryVisitingResult() { - Type result = visit("SELECT age, birthday FROM test"); - Assert.assertTrue(result instanceof Product ); - Assert.assertTrue(result.isCompatible(new Product(Arrays.asList(INTEGER, DATE)))); - } - - @Test - public void selectStarShouldReturnEmptyProductAsQueryVisitingResult() { - Type result = visit("SELECT * FROM test"); - Assert.assertTrue(result instanceof Product); - Assert.assertTrue(result.isCompatible(new Product(emptyList()))); - } - - @Test - public void visitSelectNestedFunctionShouldThrowException() { - exceptionRule.expect(SqlFeatureNotImplementedException.class); - exceptionRule.expectMessage("Nested function calls like [abs(log(age))] are not supported yet"); - visit("SELECT abs(log(age)) FROM test"); - } - - @Test - public void visitWhereNestedFunctionShouldThrowException() { - exceptionRule.expect(SqlFeatureNotImplementedException.class); - exceptionRule.expectMessage("Nested function calls like [abs(log(age))] are not supported yet"); - visit("SELECT age FROM test WHERE abs(log(age)) = 1"); - } - - @Test - public void visitMathConstantAsNestedFunctionShouldPass() { - visit("SELECT abs(pi()) FROM test"); - } - - @Test - public void visitSupportedNestedFunctionShouldPass() { - visit("SELECT sum(nested(name.balance)) FROM test"); - } - - @Test - public void visitFunctionAsAggregatorShouldThrowException() { - exceptionRule.expect(SqlFeatureNotImplementedException.class); - exceptionRule.expectMessage("Aggregation calls with function aggregator like [max(abs(age))] are not supported yet"); - visit("SELECT max(abs(age)) FROM test"); - } - - @Test - public void visitUnsupportedOperatorShouldThrowException() { - exceptionRule.expect(SqlFeatureNotImplementedException.class); - exceptionRule.expectMessage("Operator [DIV] is not supported yet"); - visit("SELECT balance DIV age FROM test"); - } - - private ParseTree createParseTree(String sql) { - return new OpenSearchLegacySqlAnalyzer(new SqlAnalysisConfig(true, true, 1000)).analyzeSyntax(sql); - } - - private Type visit(String sql) { - ParseTree parseTree = createParseTree(sql); - return parseTree.accept(new AntlrSqlParseTreeVisitor<>(analyzer)); - } - + }; + + @Rule public ExpectedException exceptionRule = ExpectedException.none(); + + @Test + public void selectNumberShouldReturnNumberAsQueryVisitingResult() { + Type result = visit("SELECT age FROM test"); + Assert.assertSame(result, INTEGER); + } + + @Test + public void selectNumberAndDateShouldReturnProductOfThemAsQueryVisitingResult() { + Type result = visit("SELECT age, birthday FROM test"); + Assert.assertTrue(result instanceof Product); + Assert.assertTrue(result.isCompatible(new Product(Arrays.asList(INTEGER, DATE)))); + } + + @Test + public void selectStarShouldReturnEmptyProductAsQueryVisitingResult() { + Type result = visit("SELECT * FROM test"); + Assert.assertTrue(result instanceof Product); + Assert.assertTrue(result.isCompatible(new Product(emptyList()))); + } + + @Test + public void visitSelectNestedFunctionShouldThrowException() { + exceptionRule.expect(SqlFeatureNotImplementedException.class); + exceptionRule.expectMessage("Nested function calls like [abs(log(age))] are not supported yet"); + visit("SELECT abs(log(age)) FROM test"); + } + + @Test + public void visitWhereNestedFunctionShouldThrowException() { + exceptionRule.expect(SqlFeatureNotImplementedException.class); + exceptionRule.expectMessage("Nested function calls like [abs(log(age))] are not supported yet"); + visit("SELECT age FROM test WHERE abs(log(age)) = 1"); + } + + @Test + public void visitMathConstantAsNestedFunctionShouldPass() { + visit("SELECT abs(pi()) FROM test"); + } + + @Test + public void visitSupportedNestedFunctionShouldPass() { + visit("SELECT sum(nested(name.balance)) FROM test"); + } + + @Test + public void visitFunctionAsAggregatorShouldThrowException() { + exceptionRule.expect(SqlFeatureNotImplementedException.class); + exceptionRule.expectMessage( + "Aggregation calls with function aggregator like [max(abs(age))] are not supported yet"); + visit("SELECT max(abs(age)) FROM test"); + } + + @Test + public void visitUnsupportedOperatorShouldThrowException() { + exceptionRule.expect(SqlFeatureNotImplementedException.class); + exceptionRule.expectMessage("Operator [DIV] is not supported yet"); + visit("SELECT balance DIV age FROM test"); + } + + private ParseTree createParseTree(String sql) { + return new OpenSearchLegacySqlAnalyzer(new SqlAnalysisConfig(true, true, 1000)) + .analyzeSyntax(sql); + } + + private Type visit(String sql) { + ParseTree parseTree = createParseTree(sql); + return parseTree.accept(new AntlrSqlParseTreeVisitor<>(analyzer)); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMappingTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMappingTest.java index fe8b25ed1c..ed2611786a 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMappingTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMappingTest.java @@ -3,12 +3,11 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.esdomain.mapping; import static java.util.Collections.emptyMap; -import static org.hamcrest.Matchers.is; import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; import static org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetadata; import com.google.common.collect.ImmutableMap; @@ -22,109 +21,83 @@ import org.opensearch.sql.legacy.util.MatcherUtils; /** - * Unit test for {@code FieldMapping} with trivial methods ignored such as isSpecified, isMetaField etc. + * Unit test for {@code FieldMapping} with trivial methods ignored such as isSpecified, isMetaField + * etc. */ public class FieldMappingTest { - @Test - public void testFieldMatchesWildcardPatternSpecifiedInQuery() { - assertThat( - new FieldMapping("employee.first", emptyMap(), fieldsSpecifiedInQuery("employee.*")), - isWildcardSpecified(true) - ); - } - - @Test - public void testFieldMismatchesWildcardPatternSpecifiedInQuery() { - assertThat( - new FieldMapping("employee.first", emptyMap(), fieldsSpecifiedInQuery("manager.*")), - isWildcardSpecified(false) - ); - } - - @Test - public void testFieldIsProperty() { - assertThat( - new FieldMapping("employee.first"), - isPropertyField(true) - ); - } - - @Test - public void testNestedMultiFieldIsProperty() { - assertThat( - new FieldMapping("employee.first.keyword"), - isPropertyField(true) - ); - } - - @Test - public void testFieldIsNotProperty() { - assertThat( - new FieldMapping("employee"), - isPropertyField(false) - ); - } - - @Test - public void testMultiFieldIsNotProperty() { - assertThat( - new FieldMapping("employee.keyword"), - isPropertyField(false) - ); - } - - @Test - public void testUnknownFieldTreatedAsObject() { - assertThat( - new FieldMapping("employee"), - hasType("object") - ); - } - - @Test - public void testDeepNestedField() { - assertThat( - new FieldMapping( + @Test + public void testFieldMatchesWildcardPatternSpecifiedInQuery() { + assertThat( + new FieldMapping("employee.first", emptyMap(), fieldsSpecifiedInQuery("employee.*")), + isWildcardSpecified(true)); + } + + @Test + public void testFieldMismatchesWildcardPatternSpecifiedInQuery() { + assertThat( + new FieldMapping("employee.first", emptyMap(), fieldsSpecifiedInQuery("manager.*")), + isWildcardSpecified(false)); + } + + @Test + public void testFieldIsProperty() { + assertThat(new FieldMapping("employee.first"), isPropertyField(true)); + } + + @Test + public void testNestedMultiFieldIsProperty() { + assertThat(new FieldMapping("employee.first.keyword"), isPropertyField(true)); + } + + @Test + public void testFieldIsNotProperty() { + assertThat(new FieldMapping("employee"), isPropertyField(false)); + } + + @Test + public void testMultiFieldIsNotProperty() { + assertThat(new FieldMapping("employee.keyword"), isPropertyField(false)); + } + + @Test + public void testUnknownFieldTreatedAsObject() { + assertThat(new FieldMapping("employee"), hasType("object")); + } + + @Test + public void testDeepNestedField() { + assertThat( + new FieldMapping( + "employee.location.city", + ImmutableMap.of( "employee.location.city", - ImmutableMap.of( + new FieldMappingMetadata( "employee.location.city", - new FieldMappingMetadata("employee.location.city", new BytesArray( - "{\n" + - " \"city\" : {\n" + - " \"type\" : \"text\"\n" + - " }\n" + - "}") - ) - ), - emptyMap() - ), - hasType("text") - ); - } - - private Matcher isWildcardSpecified(boolean isMatched) { - return MatcherUtils.featureValueOf("is field match wildcard specified in query", - is(isMatched), - FieldMapping::isWildcardSpecified); - } - - private Matcher isPropertyField(boolean isProperty) { - return MatcherUtils.featureValueOf("isPropertyField", - is(isProperty), - FieldMapping::isPropertyField); - } - - private Matcher hasType(String expected) { - return MatcherUtils.featureValueOf("type", - is(expected), - FieldMapping::type); - } - - private Map fieldsSpecifiedInQuery(String...fieldNames) { - return Arrays.stream(fieldNames). - collect(Collectors.toMap(name -> name, - name -> new Field(name, ""))); - } - + new BytesArray( + "{\n" + " \"city\" : {\n" + " \"type\" : \"text\"\n" + " }\n" + "}"))), + emptyMap()), + hasType("text")); + } + + private Matcher isWildcardSpecified(boolean isMatched) { + return MatcherUtils.featureValueOf( + "is field match wildcard specified in query", + is(isMatched), + FieldMapping::isWildcardSpecified); + } + + private Matcher isPropertyField(boolean isProperty) { + return MatcherUtils.featureValueOf( + "isPropertyField", is(isProperty), FieldMapping::isPropertyField); + } + + private Matcher hasType(String expected) { + return MatcherUtils.featureValueOf("type", is(expected), FieldMapping::type); + } + + private Map fieldsSpecifiedInQuery(String... fieldNames) { + return Arrays.stream(fieldNames) + .collect(Collectors.toMap(name -> name, name -> new Field(name, ""))); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMappingsTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMappingsTest.java index 412c351c56..f6de8a98e6 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMappingsTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/esdomain/mapping/FieldMappingsTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.esdomain.mapping; import static org.hamcrest.MatcherAssert.assertThat; @@ -23,51 +22,47 @@ import org.junit.Test; import org.opensearch.sql.legacy.esdomain.LocalClusterState; -/** - * Test for FieldMappings class - */ +/** Test for FieldMappings class */ public class FieldMappingsTest { - private static final String TEST_MAPPING_FILE = "mappings/field_mappings.json"; - - @Before - public void setUp() throws IOException { - URL url = Resources.getResource(TEST_MAPPING_FILE); - String mappings = Resources.toString(url, Charsets.UTF_8); - mockLocalClusterState(mappings); - } + private static final String TEST_MAPPING_FILE = "mappings/field_mappings.json"; - @After - public void cleanUp() { - LocalClusterState.state(null); - } + @Before + public void setUp() throws IOException { + URL url = Resources.getResource(TEST_MAPPING_FILE); + String mappings = Resources.toString(url, Charsets.UTF_8); + mockLocalClusterState(mappings); + } - @Test - public void flatFieldMappingsShouldIncludeFieldsOnAllLevels() { - IndexMappings indexMappings = LocalClusterState.state().getFieldMappings(new String[]{"field_mappings"}); - FieldMappings fieldMappings = indexMappings.firstMapping(); + @After + public void cleanUp() { + LocalClusterState.state(null); + } - Map typeByFieldName = new HashMap<>(); - fieldMappings.flat(typeByFieldName::put); - assertThat( - typeByFieldName, - allOf( - aMapWithSize(13), - hasEntry("address", "text"), - hasEntry("age", "integer"), - hasEntry("employer", "text"), - hasEntry("employer.raw", "text"), - hasEntry("employer.keyword", "keyword"), - hasEntry("projects", "nested"), - hasEntry("projects.active", "boolean"), - hasEntry("projects.members", "nested"), - hasEntry("projects.members.name", "text"), - hasEntry("manager", "object"), - hasEntry("manager.name", "text"), - hasEntry("manager.name.keyword", "keyword"), - hasEntry("manager.address", "keyword") - ) - ); - } + @Test + public void flatFieldMappingsShouldIncludeFieldsOnAllLevels() { + IndexMappings indexMappings = + LocalClusterState.state().getFieldMappings(new String[] {"field_mappings"}); + FieldMappings fieldMappings = indexMappings.firstMapping(); + Map typeByFieldName = new HashMap<>(); + fieldMappings.flat(typeByFieldName::put); + assertThat( + typeByFieldName, + allOf( + aMapWithSize(13), + hasEntry("address", "text"), + hasEntry("age", "integer"), + hasEntry("employer", "text"), + hasEntry("employer.raw", "text"), + hasEntry("employer.keyword", "keyword"), + hasEntry("projects", "nested"), + hasEntry("projects.active", "boolean"), + hasEntry("projects.members", "nested"), + hasEntry("projects.members.name", "text"), + hasEntry("manager", "object"), + hasEntry("manager.name", "text"), + hasEntry("manager.name.keyword", "keyword"), + hasEntry("manager.address", "keyword"))); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/executor/AsyncRestExecutorTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/executor/AsyncRestExecutorTest.java index b26e171ce7..9be2517c4a 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/executor/AsyncRestExecutorTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/executor/AsyncRestExecutorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor; import static java.util.Collections.emptyList; @@ -34,77 +33,69 @@ import org.opensearch.sql.opensearch.setting.OpenSearchSettings; import org.opensearch.threadpool.ThreadPool; -/** - * Test AsyncRestExecutor behavior. - */ +/** Test AsyncRestExecutor behavior. */ @RunWith(MockitoJUnitRunner.Silent.class) public class AsyncRestExecutorTest { - private static final boolean NON_BLOCKING = false; - - @Mock - private RestExecutor executor; + private static final boolean NON_BLOCKING = false; - @Mock - private Client client; + @Mock private RestExecutor executor; - private Map params = emptyMap(); + @Mock private Client client; - @Mock - private QueryAction action; + private Map params = emptyMap(); - @Mock - private RestChannel channel; + @Mock private QueryAction action; - @Mock - private ClusterSettings clusterSettings; + @Mock private RestChannel channel; - @Before - public void setUp() { - when(client.threadPool()).thenReturn(mock(ThreadPool.class)); - when(action.getSqlRequest()).thenReturn(SqlRequest.NULL); - when(clusterSettings.get(ClusterName.CLUSTER_NAME_SETTING)).thenReturn(ClusterName.DEFAULT); + @Mock private ClusterSettings clusterSettings; - OpenSearchSettings settings = spy(new OpenSearchSettings(clusterSettings)); - doReturn(emptyList()).when(settings).getSettings(); - LocalClusterState.state().setPluginSettings(settings); - } + @Before + public void setUp() { + when(client.threadPool()).thenReturn(mock(ThreadPool.class)); + when(action.getSqlRequest()).thenReturn(SqlRequest.NULL); + when(clusterSettings.get(ClusterName.CLUSTER_NAME_SETTING)).thenReturn(ClusterName.DEFAULT); - @Test - public void executeBlockingQuery() throws Exception { - Thread.currentThread().setName(TRANSPORT_WORKER_THREAD_NAME_PREFIX); - execute(); - verifyRunInWorkerThread(); - } + OpenSearchSettings settings = spy(new OpenSearchSettings(clusterSettings)); + doReturn(emptyList()).when(settings).getSettings(); + LocalClusterState.state().setPluginSettings(settings); + } - @Test - public void executeBlockingQueryButNotInTransport() throws Exception { - execute(); - verifyRunInCurrentThread(); - } + @Test + public void executeBlockingQuery() throws Exception { + Thread.currentThread().setName(TRANSPORT_WORKER_THREAD_NAME_PREFIX); + execute(); + verifyRunInWorkerThread(); + } - @Test - public void executeNonBlockingQuery() throws Exception { - execute(anyAction -> NON_BLOCKING); - verifyRunInCurrentThread(); - } + @Test + public void executeBlockingQueryButNotInTransport() throws Exception { + execute(); + verifyRunInCurrentThread(); + } - private void execute() throws Exception { - AsyncRestExecutor asyncExecutor = new AsyncRestExecutor(executor); - asyncExecutor.execute(client, params, action, channel); - } + @Test + public void executeNonBlockingQuery() throws Exception { + execute(anyAction -> NON_BLOCKING); + verifyRunInCurrentThread(); + } - private void execute(Predicate isBlocking) throws Exception { - AsyncRestExecutor asyncExecutor = new AsyncRestExecutor(executor, isBlocking); - asyncExecutor.execute(client, params, action, channel); - } + private void execute() throws Exception { + AsyncRestExecutor asyncExecutor = new AsyncRestExecutor(executor); + asyncExecutor.execute(client, params, action, channel); + } - private void verifyRunInCurrentThread() { - verify(client, never()).threadPool(); - } + private void execute(Predicate isBlocking) throws Exception { + AsyncRestExecutor asyncExecutor = new AsyncRestExecutor(executor, isBlocking); + asyncExecutor.execute(client, params, action, channel); + } - private void verifyRunInWorkerThread() { - verify(client, times(1)).threadPool(); - } + private void verifyRunInCurrentThread() { + verify(client, never()).threadPool(); + } + private void verifyRunInWorkerThread() { + verify(client, times(1)).threadPool(); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/executor/csv/CSVResultTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/executor/csv/CSVResultTest.java index 1a24045881..c877095d8f 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/executor/csv/CSVResultTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/executor/csv/CSVResultTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.csv; import static org.junit.Assert.assertEquals; @@ -13,25 +12,21 @@ import java.util.stream.Collectors; import org.junit.Test; -/** - * Unit tests for {@link CSVResult} - */ +/** Unit tests for {@link CSVResult} */ public class CSVResultTest { - private static final String SEPARATOR = ","; + private static final String SEPARATOR = ","; - @Test - public void getHeadersShouldReturnHeadersSanitized() { - CSVResult csv = csv(headers("name", "=age"), lines(line("John", "30"))); - assertEquals( - headers("name", "'=age"), - csv.getHeaders() - ); - } + @Test + public void getHeadersShouldReturnHeadersSanitized() { + CSVResult csv = csv(headers("name", "=age"), lines(line("John", "30"))); + assertEquals(headers("name", "'=age"), csv.getHeaders()); + } - @Test - public void getLinesShouldReturnLinesSanitized() { - CSVResult csv = csv( + @Test + public void getLinesShouldReturnLinesSanitized() { + CSVResult csv = + csv( headers("name", "city"), lines( line("John", "Seattle"), @@ -39,53 +34,42 @@ public void getLinesShouldReturnLinesSanitized() { line("John", "+Seattle"), line("-John", "Seattle"), line("@John", "Seattle"), - line("John", "Seattle=") - ) - ); - - assertEquals( - line( - "John,Seattle", - "John,'=Seattle", - "John,'+Seattle", - "'-John,Seattle", - "'@John,Seattle", - "John,Seattle=" - ), - csv.getLines() - ); - } - - @Test - public void getHeadersShouldReturnHeadersQuotedIfRequired() { - CSVResult csv = csv(headers("na,me", ",,age"), lines(line("John", "30"))); - assertEquals( - headers("\"na,me\"", "\",,age\""), - csv.getHeaders() - ); - } - - @Test - public void getLinesShouldReturnLinesQuotedIfRequired() { - CSVResult csv = csv(headers("name", "age"), lines(line("John,Smith", "30,,,"))); - assertEquals( - line("\"John,Smith\",\"30,,,\""), - csv.getLines() - ); - } - - @Test - public void getHeadersShouldReturnHeadersBothSanitizedAndQuotedIfRequired() { - CSVResult csv = csv(headers("na,+me", ",,,=age", "=city,"), lines(line("John", "30", "Seattle"))); - assertEquals( - headers("\"na,+me\"", "\",,,=age\"", "\"'=city,\""), - csv.getHeaders() - ); - } - - @Test - public void getLinesShouldReturnLinesBothSanitizedAndQuotedIfRequired() { - CSVResult csv = csv( + line("John", "Seattle="))); + + assertEquals( + line( + "John,Seattle", + "John,'=Seattle", + "John,'+Seattle", + "'-John,Seattle", + "'@John,Seattle", + "John,Seattle="), + csv.getLines()); + } + + @Test + public void getHeadersShouldReturnHeadersQuotedIfRequired() { + CSVResult csv = csv(headers("na,me", ",,age"), lines(line("John", "30"))); + assertEquals(headers("\"na,me\"", "\",,age\""), csv.getHeaders()); + } + + @Test + public void getLinesShouldReturnLinesQuotedIfRequired() { + CSVResult csv = csv(headers("name", "age"), lines(line("John,Smith", "30,,,"))); + assertEquals(line("\"John,Smith\",\"30,,,\""), csv.getLines()); + } + + @Test + public void getHeadersShouldReturnHeadersBothSanitizedAndQuotedIfRequired() { + CSVResult csv = + csv(headers("na,+me", ",,,=age", "=city,"), lines(line("John", "30", "Seattle"))); + assertEquals(headers("\"na,+me\"", "\",,,=age\"", "\"'=city,\""), csv.getHeaders()); + } + + @Test + public void getLinesShouldReturnLinesBothSanitizedAndQuotedIfRequired() { + CSVResult csv = + csv( headers("name", "city"), lines( line("John", "Seattle"), @@ -93,38 +77,33 @@ public void getLinesShouldReturnLinesBothSanitizedAndQuotedIfRequired() { line("John", "+Sea,ttle"), line(",-John", "Seattle"), line(",,,@John", "Seattle"), - line("John", "Seattle=") - ) - ); - - assertEquals( - line( - "John,Seattle", - "John,'=Seattle", - "John,\"'+Sea,ttle\"", - "\",-John\",Seattle", - "\",,,@John\",Seattle", - "John,Seattle=" - ), - csv.getLines() - ); - } - - private CSVResult csv(List headers, List> lines) { - return new CSVResult(SEPARATOR, headers, lines); - } - - private List headers(String... headers) { - return Arrays.stream(headers).collect(Collectors.toList()); - } - - private List line(String... line) { - return Arrays.stream(line).collect(Collectors.toList()); - } - - @SafeVarargs - private final List> lines(List... lines) { - return Arrays.stream(lines).collect(Collectors.toList()); - } - + line("John", "Seattle="))); + + assertEquals( + line( + "John,Seattle", + "John,'=Seattle", + "John,\"'+Sea,ttle\"", + "\",-John\",Seattle", + "\",,,@John\",Seattle", + "John,Seattle="), + csv.getLines()); + } + + private CSVResult csv(List headers, List> lines) { + return new CSVResult(SEPARATOR, headers, lines); + } + + private List headers(String... headers) { + return Arrays.stream(headers).collect(Collectors.toList()); + } + + private List line(String... line) { + return Arrays.stream(line).collect(Collectors.toList()); + } + + @SafeVarargs + private final List> lines(List... lines) { + return Arrays.stream(lines).collect(Collectors.toList()); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/executor/format/DateFieldFormatterTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/executor/format/DateFieldFormatterTest.java index 5807ee2c44..1c2d1bae62 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/executor/format/DateFieldFormatterTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/executor/format/DateFieldFormatterTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.format; import static org.junit.Assert.assertEquals; @@ -18,685 +17,634 @@ public class DateFieldFormatterTest { - @Test - public void testOpenSearchDashboardsSampleDataEcommerceOrderDateField() - { - String columnName = "order_date"; - String dateFormat = "date_optional_time"; - String originalDateValue = "2020-02-24T09:28:48+00:00"; - String expectedDateValue = "2020-02-24 09:28:48.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testOpenSearchDashboardsSampleDataFlightsTimestampField() - { - String columnName = "timestamp"; - String dateFormat = "date_optional_time"; - String originalDateValue = "2020-02-03T00:00:00"; - String expectedDateValue = "2020-02-03 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testOpenSearchDashboardsSampleDataFlightsTimestampFieldNoTime() - { - String columnName = "timestamp"; - String dateFormat = "date_optional_time"; - String originalDateValue = "2020-02-03T"; - String expectedDateValue = "2020-02-03 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testOpenSearchDashboardsSampleDataLogsUtcDateField() - { - String columnName = "utc_date"; - String dateFormat = "date_optional_time"; - String originalDateValue = "2020-02-02T00:39:02.912Z"; - String expectedDateValue = "2020-02-02 00:39:02.912"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testEpochMillis() - { - String columnName = "date_field"; - String dateFormat = "epoch_millis"; - String originalDateValue = "727430805000"; - String expectedDateValue = "1993-01-19 08:06:45.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testEpochSecond() - { - String columnName = "date_field"; - String dateFormat = "epoch_second"; - String originalDateValue = "727430805"; - String expectedDateValue = "1993-01-19 08:06:45.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testDateOptionalTimeDateOnly() - { - String columnName = "date_field"; - String dateFormat = "date_optional_time"; - String originalDateValue = "1993-01-19"; - String expectedDateValue = "1993-01-19 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testDateOptionalTimeDateAndTime() - { - String columnName = "date_field"; - String dateFormat = "date_optional_time"; - String originalDateValue = "1993-01-19T00:06:45.123-0800"; - String expectedDateValue = "1993-01-19 08:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testBasicDate() - { - String columnName = "date_field"; - String dateFormat = "basic_date"; - String originalDateValue = "19930119"; - String expectedDateValue = "1993-01-19 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testBasicDateTime() - { - String columnName = "date_field"; - String dateFormat = "basic_date_time"; - String originalDateValue = "19930119T120645.123-0800"; - String expectedDateValue = "1993-01-19 20:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testBasicDateTimeNoMillis() - { - String columnName = "date_field"; - String dateFormat = "basic_date_time_no_millis"; - String originalDateValue = "19930119T120645-0800"; - String expectedDateValue = "1993-01-19 20:06:45.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testBasicOrdinalDate() - { - String columnName = "date_field"; - String dateFormat = "basic_ordinal_date"; - String originalDateValue = "1993019"; - String expectedDateValue = "1993-01-19 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testBasicOrdinalDateTime() - { - String columnName = "date_field"; - String dateFormat = "basic_ordinal_date_time"; - String originalDateValue = "1993019T120645.123-0800"; - String expectedDateValue = "1993-01-19 20:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testBasicOrdinalDateTimeNoMillis() - { - String columnName = "date_field"; - String dateFormat = "basic_ordinal_date_time_no_millis"; - String originalDateValue = "1993019T120645-0800"; - String expectedDateValue = "1993-01-19 20:06:45.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testBasicTime() - { - String columnName = "date_field"; - String dateFormat = "basic_time"; - String originalDateValue = "120645.123-0800"; - String expectedDateValue = "1970-01-01 20:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testBasicTimeNoMillis() - { - String columnName = "date_field"; - String dateFormat = "basic_time_no_millis"; - String originalDateValue = "120645-0800"; - String expectedDateValue = "1970-01-01 20:06:45.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testBasicTTime() - { - String columnName = "date_field"; - String dateFormat = "basic_t_time"; - String originalDateValue = "T120645.123-0800"; - String expectedDateValue = "1970-01-01 20:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testBasicTTimeNoMillis() - { - String columnName = "date_field"; - String dateFormat = "basic_t_time_no_millis"; - String originalDateValue = "T120645-0800"; - String expectedDateValue = "1970-01-01 20:06:45.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testBasicWeekDate() - { - String columnName = "date_field"; - String dateFormat = "basic_week_date"; - String originalDateValue = "1993W042"; - String expectedDateValue = "1993-01-19 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testBasicWeekDateTime() - { - String columnName = "date_field"; - String dateFormat = "basic_week_date_time"; - String originalDateValue = "1993W042T120645.123-0800"; - String expectedDateValue = "1993-01-19 20:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testBasicWeekDateTimeNoMillis() - { - String columnName = "date_field"; - String dateFormat = "basic_week_date_time_no_millis"; - String originalDateValue = "1993W042T120645-0800"; - String expectedDateValue = "1993-01-19 20:06:45.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testDate() - { - String columnName = "date_field"; - String dateFormat = "date"; - String originalDateValue = "1993-01-19"; - String expectedDateValue = "1993-01-19 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testDateHour() - { - String columnName = "date_field"; - String dateFormat = "date_hour"; - String originalDateValue = "1993-01-19T12"; - String expectedDateValue = "1993-01-19 12:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testDateHourMinute() - { - String columnName = "date_field"; - String dateFormat = "date_hour_minute"; - String originalDateValue = "1993-01-19T12:06"; - String expectedDateValue = "1993-01-19 12:06:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testDateHourMinuteSecond() - { - String columnName = "date_field"; - String dateFormat = "date_hour_minute_second"; - String originalDateValue = "1993-01-19T12:06:45"; - String expectedDateValue = "1993-01-19 12:06:45.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testDateHourMinuteSecondFraction() - { - String columnName = "date_field"; - String dateFormat = "date_hour_minute_second_fraction"; - String originalDateValue = "1993-01-19T12:06:45.123"; - String expectedDateValue = "1993-01-19 12:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testDateHourMinuteSecondMillis() - { - String columnName = "date_field"; - String dateFormat = "date_hour_minute_second_millis"; - String originalDateValue = "1993-01-19T12:06:45.123"; - String expectedDateValue = "1993-01-19 12:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testDateTime() - { - String columnName = "date_field"; - String dateFormat = "date_time"; - String originalDateValue = "1993-01-19T12:06:45.123-0800"; - String expectedDateValue = "1993-01-19 20:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testDateTimeNoMillis() - { - String columnName = "date_field"; - String dateFormat = "date_time_no_millis"; - String originalDateValue = "1993-01-19T12:06:45-0800"; - String expectedDateValue = "1993-01-19 20:06:45.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testHour() - { - String columnName = "date_field"; - String dateFormat = "hour"; - String originalDateValue = "12"; - String expectedDateValue = "1970-01-01 12:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testHourMinute() - { - String columnName = "date_field"; - String dateFormat = "hour_minute"; - String originalDateValue = "12:06"; - String expectedDateValue = "1970-01-01 12:06:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testHourMinuteSecond() - { - String columnName = "date_field"; - String dateFormat = "hour_minute_second"; - String originalDateValue = "12:06:45"; - String expectedDateValue = "1970-01-01 12:06:45.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testHourMinuteSecondFraction() - { - String columnName = "date_field"; - String dateFormat = "hour_minute_second_fraction"; - String originalDateValue = "12:06:45.123"; - String expectedDateValue = "1970-01-01 12:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testHourMinuteSecondMillis() - { - String columnName = "date_field"; - String dateFormat = "hour_minute_second_millis"; - String originalDateValue = "12:06:45.123"; - String expectedDateValue = "1970-01-01 12:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testOrdinalDate() - { - String columnName = "date_field"; - String dateFormat = "ordinal_date"; - String originalDateValue = "1993-019"; - String expectedDateValue = "1993-01-19 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testOrdinalDateTime() - { - String columnName = "date_field"; - String dateFormat = "ordinal_date_time"; - String originalDateValue = "1993-019T12:06:45.123-0800"; - String expectedDateValue = "1993-01-19 20:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testOrdinalDateTimeNoMillis() - { - String columnName = "date_field"; - String dateFormat = "ordinal_date_time_no_millis"; - String originalDateValue = "1993-019T12:06:45-0800"; - String expectedDateValue = "1993-01-19 20:06:45.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testTime() - { - String columnName = "date_field"; - String dateFormat = "time"; - String originalDateValue = "12:06:45.123-0800"; - String expectedDateValue = "1970-01-01 20:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testTimeNoMillis() - { - String columnName = "date_field"; - String dateFormat = "time_no_millis"; - String originalDateValue = "12:06:45-0800"; - String expectedDateValue = "1970-01-01 20:06:45.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testTTime() - { - String columnName = "date_field"; - String dateFormat = "t_time"; - String originalDateValue = "T12:06:45.123-0800"; - String expectedDateValue = "1970-01-01 20:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testTTimeNoMillis() - { - String columnName = "date_field"; - String dateFormat = "t_time_no_millis"; - String originalDateValue = "T12:06:45-0800"; - String expectedDateValue = "1970-01-01 20:06:45.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testWeekDate() - { - String columnName = "date_field"; - String dateFormat = "week_date"; - String originalDateValue = "1993-W04-2"; - String expectedDateValue = "1993-01-19 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testWeekDateTime() - { - String columnName = "date_field"; - String dateFormat = "week_date_time"; - String originalDateValue = "1993-W04-2T12:06:45.123-0800"; - String expectedDateValue = "1993-01-19 20:06:45.123"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testWeekDateTimeNoMillis() - { - String columnName = "date_field"; - String dateFormat = "week_date_time_no_millis"; - String originalDateValue = "1993-W04-2T12:06:45-0800"; - String expectedDateValue = "1993-01-19 20:06:45.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testWeekyear() - { - String columnName = "date_field"; - String dateFormat = "week_year"; - String originalDateValue = "1993"; - String expectedDateValue = "1993-01-01 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testWeekyearWeek() - { - String columnName = "date_field"; - String dateFormat = "weekyear_week"; - String originalDateValue = "1993-W04"; - String expectedDateValue = "1993-01-17 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testWeekyearWeekDay() - { - String columnName = "date_field"; - String dateFormat = "weekyear_week_day"; - String originalDateValue = "1993-W04-2"; - String expectedDateValue = "1993-01-19 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testYear() - { - String columnName = "date_field"; - String dateFormat = "year"; - String originalDateValue = "1993"; - String expectedDateValue = "1993-01-01 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testYearMonth() - { - String columnName = "date_field"; - String dateFormat = "year_month"; - String originalDateValue = "1993-01"; - String expectedDateValue = "1993-01-01 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testYearMonthDay() - { - String columnName = "date_field"; - String dateFormat = "year_month_day"; - String originalDateValue = "1993-01-19"; - String expectedDateValue = "1993-01-19 00:00:00.000"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testCustomFormat() - { - String columnName = "date_field"; - String dateFormat = "EEE, MMM d, ''yy"; - - String originalDateValue = "Tue, Jan 19, '93"; - String expectedDateValue = "1993-01-19 00:00:00.000"; - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testMultipleFormats() - { - String columnName = "date_field"; - String dateFormat = "date_optional_time||epoch_millis"; - - String originalDateValue = "1993-01-19"; - String expectedDateValue = "1993-01-19 00:00:00.000"; - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - - originalDateValue = "727401600000"; - expectedDateValue = "1993-01-19 00:00:00.000"; - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testMultipleCustomFormats() - { - String columnName = "date_field"; - String dateFormat = "EEE, MMM d, ''yy||yyMMddHHmmssZ"; - - String originalDateValue = "Tue, Jan 19, '93"; - String expectedDateValue = "1993-01-19 00:00:00.000"; - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - - originalDateValue = "930119000000-0000"; - expectedDateValue = "1993-01-19 00:00:00.000"; - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testNamedAndCustomFormats() - { - String columnName = "date_field"; - String dateFormat = "EEE, MMM d, ''yy||hour_minute_second"; - - String originalDateValue = "Tue, Jan 19, '93"; - String expectedDateValue = "1993-01-19 00:00:00.000"; - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - - originalDateValue = "12:06:45"; - expectedDateValue = "1970-01-01 12:06:45.000"; - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testIncorrectFormat() - { - String columnName = "date_field"; - String dateFormat = "date_optional_time"; - String originalDateValue = "1581724085"; - // Invalid format for date value; should return original value - String expectedDateValue = "1581724085"; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testNullDateData() - { - String columnName = "date_field"; - String dateFormat = "date_optional_time"; - String originalDateValue = null; - // Nulls should be preserved - String expectedDateValue = null; - - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - @Test - public void testStrictDateOptionalTimeOrEpochMillsShouldPass() - { - String columnName = "date_field"; - String dateFormat = "strict_date_optional_time||epoch_millis"; - - String originalDateValue = "2015-01-01"; - String expectedDateValue = "2015-01-01 00:00:00.000"; - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - - originalDateValue = "2015-01-01T12:10:30Z"; - expectedDateValue = "2015-01-01 12:10:30.000"; - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - - originalDateValue = "1420070400001"; - expectedDateValue = "2015-01-01 00:00:00.001"; - verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); - } - - private void verifyFormatting(String columnName, String dateFormatProperty, String originalDateValue, String expectedDateValue) - { - List columns = buildColumnList(columnName); - Map> dateFieldFormatMap = buildDateFieldFormatMap(columnName, dateFormatProperty); - - Map rowSource = new HashMap<>(); - rowSource.put(columnName, originalDateValue); - - DateFieldFormatter dateFieldFormatter = new DateFieldFormatter(dateFieldFormatMap, columns, new HashMap<>()); - executeFormattingAndCompare(dateFieldFormatter, rowSource, columnName, expectedDateValue); - } - - private void executeFormattingAndCompare( - DateFieldFormatter formatter, - Map rowSource, - String columnToCheck, - String expectedDateValue) { - formatter.applyJDBCDateFormat(rowSource); - assertEquals(expectedDateValue, rowSource.get(columnToCheck)); - } - - private List buildColumnList(String columnName) { - return ImmutableList.builder() - .add(new Schema.Column(columnName, null, Schema.Type.DATE)) - .build(); - } - - private Map> buildDateFieldFormatMap(String columnName, String dateFormatProperty) { - return ImmutableMap.>builder() - .put(columnName, Arrays.asList(dateFormatProperty.split("\\|\\|"))) - .build(); - } + @Test + public void testOpenSearchDashboardsSampleDataEcommerceOrderDateField() { + String columnName = "order_date"; + String dateFormat = "date_optional_time"; + String originalDateValue = "2020-02-24T09:28:48+00:00"; + String expectedDateValue = "2020-02-24 09:28:48.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testOpenSearchDashboardsSampleDataFlightsTimestampField() { + String columnName = "timestamp"; + String dateFormat = "date_optional_time"; + String originalDateValue = "2020-02-03T00:00:00"; + String expectedDateValue = "2020-02-03 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testOpenSearchDashboardsSampleDataFlightsTimestampFieldNoTime() { + String columnName = "timestamp"; + String dateFormat = "date_optional_time"; + String originalDateValue = "2020-02-03T"; + String expectedDateValue = "2020-02-03 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testOpenSearchDashboardsSampleDataLogsUtcDateField() { + String columnName = "utc_date"; + String dateFormat = "date_optional_time"; + String originalDateValue = "2020-02-02T00:39:02.912Z"; + String expectedDateValue = "2020-02-02 00:39:02.912"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testEpochMillis() { + String columnName = "date_field"; + String dateFormat = "epoch_millis"; + String originalDateValue = "727430805000"; + String expectedDateValue = "1993-01-19 08:06:45.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testEpochSecond() { + String columnName = "date_field"; + String dateFormat = "epoch_second"; + String originalDateValue = "727430805"; + String expectedDateValue = "1993-01-19 08:06:45.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testDateOptionalTimeDateOnly() { + String columnName = "date_field"; + String dateFormat = "date_optional_time"; + String originalDateValue = "1993-01-19"; + String expectedDateValue = "1993-01-19 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testDateOptionalTimeDateAndTime() { + String columnName = "date_field"; + String dateFormat = "date_optional_time"; + String originalDateValue = "1993-01-19T00:06:45.123-0800"; + String expectedDateValue = "1993-01-19 08:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testBasicDate() { + String columnName = "date_field"; + String dateFormat = "basic_date"; + String originalDateValue = "19930119"; + String expectedDateValue = "1993-01-19 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testBasicDateTime() { + String columnName = "date_field"; + String dateFormat = "basic_date_time"; + String originalDateValue = "19930119T120645.123-0800"; + String expectedDateValue = "1993-01-19 20:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testBasicDateTimeNoMillis() { + String columnName = "date_field"; + String dateFormat = "basic_date_time_no_millis"; + String originalDateValue = "19930119T120645-0800"; + String expectedDateValue = "1993-01-19 20:06:45.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testBasicOrdinalDate() { + String columnName = "date_field"; + String dateFormat = "basic_ordinal_date"; + String originalDateValue = "1993019"; + String expectedDateValue = "1993-01-19 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testBasicOrdinalDateTime() { + String columnName = "date_field"; + String dateFormat = "basic_ordinal_date_time"; + String originalDateValue = "1993019T120645.123-0800"; + String expectedDateValue = "1993-01-19 20:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testBasicOrdinalDateTimeNoMillis() { + String columnName = "date_field"; + String dateFormat = "basic_ordinal_date_time_no_millis"; + String originalDateValue = "1993019T120645-0800"; + String expectedDateValue = "1993-01-19 20:06:45.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testBasicTime() { + String columnName = "date_field"; + String dateFormat = "basic_time"; + String originalDateValue = "120645.123-0800"; + String expectedDateValue = "1970-01-01 20:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testBasicTimeNoMillis() { + String columnName = "date_field"; + String dateFormat = "basic_time_no_millis"; + String originalDateValue = "120645-0800"; + String expectedDateValue = "1970-01-01 20:06:45.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testBasicTTime() { + String columnName = "date_field"; + String dateFormat = "basic_t_time"; + String originalDateValue = "T120645.123-0800"; + String expectedDateValue = "1970-01-01 20:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testBasicTTimeNoMillis() { + String columnName = "date_field"; + String dateFormat = "basic_t_time_no_millis"; + String originalDateValue = "T120645-0800"; + String expectedDateValue = "1970-01-01 20:06:45.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testBasicWeekDate() { + String columnName = "date_field"; + String dateFormat = "basic_week_date"; + String originalDateValue = "1993W042"; + String expectedDateValue = "1993-01-19 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testBasicWeekDateTime() { + String columnName = "date_field"; + String dateFormat = "basic_week_date_time"; + String originalDateValue = "1993W042T120645.123-0800"; + String expectedDateValue = "1993-01-19 20:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testBasicWeekDateTimeNoMillis() { + String columnName = "date_field"; + String dateFormat = "basic_week_date_time_no_millis"; + String originalDateValue = "1993W042T120645-0800"; + String expectedDateValue = "1993-01-19 20:06:45.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testDate() { + String columnName = "date_field"; + String dateFormat = "date"; + String originalDateValue = "1993-01-19"; + String expectedDateValue = "1993-01-19 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testDateHour() { + String columnName = "date_field"; + String dateFormat = "date_hour"; + String originalDateValue = "1993-01-19T12"; + String expectedDateValue = "1993-01-19 12:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testDateHourMinute() { + String columnName = "date_field"; + String dateFormat = "date_hour_minute"; + String originalDateValue = "1993-01-19T12:06"; + String expectedDateValue = "1993-01-19 12:06:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testDateHourMinuteSecond() { + String columnName = "date_field"; + String dateFormat = "date_hour_minute_second"; + String originalDateValue = "1993-01-19T12:06:45"; + String expectedDateValue = "1993-01-19 12:06:45.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testDateHourMinuteSecondFraction() { + String columnName = "date_field"; + String dateFormat = "date_hour_minute_second_fraction"; + String originalDateValue = "1993-01-19T12:06:45.123"; + String expectedDateValue = "1993-01-19 12:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testDateHourMinuteSecondMillis() { + String columnName = "date_field"; + String dateFormat = "date_hour_minute_second_millis"; + String originalDateValue = "1993-01-19T12:06:45.123"; + String expectedDateValue = "1993-01-19 12:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testDateTime() { + String columnName = "date_field"; + String dateFormat = "date_time"; + String originalDateValue = "1993-01-19T12:06:45.123-0800"; + String expectedDateValue = "1993-01-19 20:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testDateTimeNoMillis() { + String columnName = "date_field"; + String dateFormat = "date_time_no_millis"; + String originalDateValue = "1993-01-19T12:06:45-0800"; + String expectedDateValue = "1993-01-19 20:06:45.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testHour() { + String columnName = "date_field"; + String dateFormat = "hour"; + String originalDateValue = "12"; + String expectedDateValue = "1970-01-01 12:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testHourMinute() { + String columnName = "date_field"; + String dateFormat = "hour_minute"; + String originalDateValue = "12:06"; + String expectedDateValue = "1970-01-01 12:06:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testHourMinuteSecond() { + String columnName = "date_field"; + String dateFormat = "hour_minute_second"; + String originalDateValue = "12:06:45"; + String expectedDateValue = "1970-01-01 12:06:45.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testHourMinuteSecondFraction() { + String columnName = "date_field"; + String dateFormat = "hour_minute_second_fraction"; + String originalDateValue = "12:06:45.123"; + String expectedDateValue = "1970-01-01 12:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testHourMinuteSecondMillis() { + String columnName = "date_field"; + String dateFormat = "hour_minute_second_millis"; + String originalDateValue = "12:06:45.123"; + String expectedDateValue = "1970-01-01 12:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testOrdinalDate() { + String columnName = "date_field"; + String dateFormat = "ordinal_date"; + String originalDateValue = "1993-019"; + String expectedDateValue = "1993-01-19 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testOrdinalDateTime() { + String columnName = "date_field"; + String dateFormat = "ordinal_date_time"; + String originalDateValue = "1993-019T12:06:45.123-0800"; + String expectedDateValue = "1993-01-19 20:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testOrdinalDateTimeNoMillis() { + String columnName = "date_field"; + String dateFormat = "ordinal_date_time_no_millis"; + String originalDateValue = "1993-019T12:06:45-0800"; + String expectedDateValue = "1993-01-19 20:06:45.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testTime() { + String columnName = "date_field"; + String dateFormat = "time"; + String originalDateValue = "12:06:45.123-0800"; + String expectedDateValue = "1970-01-01 20:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testTimeNoMillis() { + String columnName = "date_field"; + String dateFormat = "time_no_millis"; + String originalDateValue = "12:06:45-0800"; + String expectedDateValue = "1970-01-01 20:06:45.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testTTime() { + String columnName = "date_field"; + String dateFormat = "t_time"; + String originalDateValue = "T12:06:45.123-0800"; + String expectedDateValue = "1970-01-01 20:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testTTimeNoMillis() { + String columnName = "date_field"; + String dateFormat = "t_time_no_millis"; + String originalDateValue = "T12:06:45-0800"; + String expectedDateValue = "1970-01-01 20:06:45.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testWeekDate() { + String columnName = "date_field"; + String dateFormat = "week_date"; + String originalDateValue = "1993-W04-2"; + String expectedDateValue = "1993-01-19 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testWeekDateTime() { + String columnName = "date_field"; + String dateFormat = "week_date_time"; + String originalDateValue = "1993-W04-2T12:06:45.123-0800"; + String expectedDateValue = "1993-01-19 20:06:45.123"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testWeekDateTimeNoMillis() { + String columnName = "date_field"; + String dateFormat = "week_date_time_no_millis"; + String originalDateValue = "1993-W04-2T12:06:45-0800"; + String expectedDateValue = "1993-01-19 20:06:45.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testWeekyear() { + String columnName = "date_field"; + String dateFormat = "week_year"; + String originalDateValue = "1993"; + String expectedDateValue = "1993-01-01 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testWeekyearWeek() { + String columnName = "date_field"; + String dateFormat = "weekyear_week"; + String originalDateValue = "1993-W04"; + String expectedDateValue = "1993-01-17 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testWeekyearWeekDay() { + String columnName = "date_field"; + String dateFormat = "weekyear_week_day"; + String originalDateValue = "1993-W04-2"; + String expectedDateValue = "1993-01-19 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testYear() { + String columnName = "date_field"; + String dateFormat = "year"; + String originalDateValue = "1993"; + String expectedDateValue = "1993-01-01 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testYearMonth() { + String columnName = "date_field"; + String dateFormat = "year_month"; + String originalDateValue = "1993-01"; + String expectedDateValue = "1993-01-01 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testYearMonthDay() { + String columnName = "date_field"; + String dateFormat = "year_month_day"; + String originalDateValue = "1993-01-19"; + String expectedDateValue = "1993-01-19 00:00:00.000"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testCustomFormat() { + String columnName = "date_field"; + String dateFormat = "EEE, MMM d, ''yy"; + + String originalDateValue = "Tue, Jan 19, '93"; + String expectedDateValue = "1993-01-19 00:00:00.000"; + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testMultipleFormats() { + String columnName = "date_field"; + String dateFormat = "date_optional_time||epoch_millis"; + + String originalDateValue = "1993-01-19"; + String expectedDateValue = "1993-01-19 00:00:00.000"; + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + + originalDateValue = "727401600000"; + expectedDateValue = "1993-01-19 00:00:00.000"; + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testMultipleCustomFormats() { + String columnName = "date_field"; + String dateFormat = "EEE, MMM d, ''yy||yyMMddHHmmssZ"; + + String originalDateValue = "Tue, Jan 19, '93"; + String expectedDateValue = "1993-01-19 00:00:00.000"; + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + + originalDateValue = "930119000000-0000"; + expectedDateValue = "1993-01-19 00:00:00.000"; + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testNamedAndCustomFormats() { + String columnName = "date_field"; + String dateFormat = "EEE, MMM d, ''yy||hour_minute_second"; + + String originalDateValue = "Tue, Jan 19, '93"; + String expectedDateValue = "1993-01-19 00:00:00.000"; + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + + originalDateValue = "12:06:45"; + expectedDateValue = "1970-01-01 12:06:45.000"; + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testIncorrectFormat() { + String columnName = "date_field"; + String dateFormat = "date_optional_time"; + String originalDateValue = "1581724085"; + // Invalid format for date value; should return original value + String expectedDateValue = "1581724085"; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testNullDateData() { + String columnName = "date_field"; + String dateFormat = "date_optional_time"; + String originalDateValue = null; + // Nulls should be preserved + String expectedDateValue = null; + + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + @Test + public void testStrictDateOptionalTimeOrEpochMillsShouldPass() { + String columnName = "date_field"; + String dateFormat = "strict_date_optional_time||epoch_millis"; + + String originalDateValue = "2015-01-01"; + String expectedDateValue = "2015-01-01 00:00:00.000"; + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + + originalDateValue = "2015-01-01T12:10:30Z"; + expectedDateValue = "2015-01-01 12:10:30.000"; + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + + originalDateValue = "1420070400001"; + expectedDateValue = "2015-01-01 00:00:00.001"; + verifyFormatting(columnName, dateFormat, originalDateValue, expectedDateValue); + } + + private void verifyFormatting( + String columnName, + String dateFormatProperty, + String originalDateValue, + String expectedDateValue) { + List columns = buildColumnList(columnName); + Map> dateFieldFormatMap = + buildDateFieldFormatMap(columnName, dateFormatProperty); + + Map rowSource = new HashMap<>(); + rowSource.put(columnName, originalDateValue); + + DateFieldFormatter dateFieldFormatter = + new DateFieldFormatter(dateFieldFormatMap, columns, new HashMap<>()); + executeFormattingAndCompare(dateFieldFormatter, rowSource, columnName, expectedDateValue); + } + + private void executeFormattingAndCompare( + DateFieldFormatter formatter, + Map rowSource, + String columnToCheck, + String expectedDateValue) { + formatter.applyJDBCDateFormat(rowSource); + assertEquals(expectedDateValue, rowSource.get(columnToCheck)); + } + + private List buildColumnList(String columnName) { + return ImmutableList.builder() + .add(new Schema.Column(columnName, null, Schema.Type.DATE)) + .build(); + } + + private Map> buildDateFieldFormatMap( + String columnName, String dateFormatProperty) { + return ImmutableMap.>builder() + .put(columnName, Arrays.asList(dateFormatProperty.split("\\|\\|"))) + .build(); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/executor/format/ResultSetTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/executor/format/ResultSetTest.java index 69da4ca475..c3d7e21704 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/executor/format/ResultSetTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/executor/format/ResultSetTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.executor.format; import static org.junit.Assert.assertFalse; @@ -13,19 +12,17 @@ public class ResultSetTest { - private final ResultSet resultSet = new ResultSet() { - @Override - public Schema getSchema() { - return super.getSchema(); - } - }; + private final ResultSet resultSet = + new ResultSet() { + @Override + public Schema getSchema() { + return super.getSchema(); + } + }; /** - * Case #1: - * LIKE 'test%' is converted to: - * 1. Regex pattern: test.* - * 2. OpenSearch search pattern: test* - * In this case, what OpenSearch returns is the final result. + * Case #1: LIKE 'test%' is converted to: 1. Regex pattern: test.* 2. OpenSearch search pattern: + * test* In this case, what OpenSearch returns is the final result. */ @Test public void testWildcardForZeroOrMoreCharacters() { @@ -33,13 +30,10 @@ public void testWildcardForZeroOrMoreCharacters() { } /** - * Case #2: - * LIKE 'test_123' is converted to: - * 1. Regex pattern: test.123 - * 2. OpenSearch search pattern: (all) - * Because OpenSearch doesn't support single wildcard character, in this case, none is passed - * as OpenSearch search pattern. So all index names are returned and need to be filtered by - * regex pattern again. + * Case #2: LIKE 'test_123' is converted to: 1. Regex pattern: test.123 2. OpenSearch search + * pattern: (all) Because OpenSearch doesn't support single wildcard character, in this case, none + * is passed as OpenSearch search pattern. So all index names are returned and need to be filtered + * by regex pattern again. */ @Test public void testWildcardForSingleCharacter() { @@ -49,12 +43,10 @@ public void testWildcardForSingleCharacter() { } /** - * Case #3: - * LIKE 'acc' has same regex and OpenSearch pattern. - * In this case, only index name(s) aliased by 'acc' is returned. - * So regex match is skipped to avoid wrong empty result. - * The assumption here is OpenSearch won't return unrelated index names if - * LIKE pattern doesn't include any wildcard. + * Case #3: LIKE 'acc' has same regex and OpenSearch pattern. In this case, only index name(s) + * aliased by 'acc' is returned. So regex match is skipped to avoid wrong empty result. The + * assumption here is OpenSearch won't return unrelated index names if LIKE pattern doesn't + * include any wildcard. */ @Test public void testIndexAlias() { @@ -62,11 +54,9 @@ public void testIndexAlias() { } /** - * Case #4: - * LIKE 'test.2020.10' has same regex pattern. Because it includes dot (wildcard), - * OpenSearch search pattern is all. - * In this case, all index names are returned. Because the pattern includes dot, - * it's treated as regex and regex match won't be skipped. + * Case #4: LIKE 'test.2020.10' has same regex pattern. Because it includes dot (wildcard), + * OpenSearch search pattern is all. In this case, all index names are returned. Because the + * pattern includes dot, it's treated as regex and regex match won't be skipped. */ @Test public void testIndexNameWithDot() { @@ -74,5 +64,4 @@ public void testIndexNameWithDot() { assertFalse(resultSet.matchesPatternIfRegex(".opensearch_dashboards", "test.2020.10")); assertTrue(resultSet.matchesPatternIfRegex("test.2020.10", "test.2020.10")); } - } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/plugin/RestSQLQueryActionCursorFallbackTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/plugin/RestSQLQueryActionCursorFallbackTest.java index c885ffae6a..30d8c9d27d 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/plugin/RestSQLQueryActionCursorFallbackTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/plugin/RestSQLQueryActionCursorFallbackTest.java @@ -6,8 +6,6 @@ package org.opensearch.sql.legacy.plugin; import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; import static org.opensearch.sql.legacy.plugin.RestSqlAction.QUERY_API_ENDPOINT; import java.io.IOException; @@ -29,7 +27,6 @@ import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.RestChannel; import org.opensearch.rest.RestRequest; -import org.opensearch.sql.common.antlr.SyntaxCheckException; import org.opensearch.sql.executor.QueryManager; import org.opensearch.sql.executor.execution.QueryPlanFactory; import org.opensearch.sql.sql.SQLService; @@ -37,25 +34,19 @@ import org.opensearch.sql.sql.domain.SQLQueryRequest; import org.opensearch.threadpool.ThreadPool; -/** - * A test suite that verifies fallback behaviour of cursor queries. - */ +/** A test suite that verifies fallback behaviour of cursor queries. */ @RunWith(MockitoJUnitRunner.class) public class RestSQLQueryActionCursorFallbackTest extends BaseRestHandler { private NodeClient nodeClient; - @Mock - private ThreadPool threadPool; + @Mock private ThreadPool threadPool; - @Mock - private QueryManager queryManager; + @Mock private QueryManager queryManager; - @Mock - private QueryPlanFactory factory; + @Mock private QueryPlanFactory factory; - @Mock - private RestChannel restChannel; + @Mock private RestChannel restChannel; private Injector injector; @@ -63,11 +54,14 @@ public class RestSQLQueryActionCursorFallbackTest extends BaseRestHandler { public void setup() { nodeClient = new NodeClient(org.opensearch.common.settings.Settings.EMPTY, threadPool); ModulesBuilder modules = new ModulesBuilder(); - modules.add(b -> { - b.bind(SQLService.class).toInstance(new SQLService(new SQLSyntaxParser(), queryManager, factory)); - }); + modules.add( + b -> { + b.bind(SQLService.class) + .toInstance(new SQLService(new SQLSyntaxParser(), queryManager, factory)); + }); injector = modules.createInjector(); - Mockito.lenient().when(threadPool.getThreadContext()) + Mockito.lenient() + .when(threadPool.getThreadContext()) .thenReturn(new ThreadContext(org.opensearch.common.settings.Settings.EMPTY)); } @@ -76,17 +70,14 @@ public void setup() { @Test public void no_fallback_with_column_reference() throws Exception { String query = "SELECT name FROM test1"; - SQLQueryRequest request = createSqlQueryRequest(query, Optional.empty(), - Optional.of(5)); + SQLQueryRequest request = createSqlQueryRequest(query, Optional.empty(), Optional.of(5)); assertFalse(doesQueryFallback(request)); } - private static SQLQueryRequest createSqlQueryRequest(String query, Optional cursorId, - Optional fetchSize) throws IOException { - var builder = XContentFactory.jsonBuilder() - .startObject() - .field("query").value(query); + private static SQLQueryRequest createSqlQueryRequest( + String query, Optional cursorId, Optional fetchSize) throws IOException { + var builder = XContentFactory.jsonBuilder().startObject().field("query").value(query); if (cursorId.isPresent()) { builder.field("cursor").value(cursorId.get()); } @@ -97,17 +88,21 @@ private static SQLQueryRequest createSqlQueryRequest(String query, Optional { - fallback.set(true); - }, (channel, exception) -> { - }).accept(restChannel); + queryAction + .prepareRequest( + request, + (channel, exception) -> { + fallback.set(true); + }, + (channel, exception) -> {}) + .accept(restChannel); return fallback.get(); } @@ -118,8 +113,8 @@ public String getName() { } @Override - protected BaseRestHandler.RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient nodeClient) - { + protected BaseRestHandler.RestChannelConsumer prepareRequest( + RestRequest restRequest, NodeClient nodeClient) { // do nothing, RestChannelConsumer is protected which required to extend BaseRestHandler return null; } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/plugin/RestSQLQueryActionTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/plugin/RestSQLQueryActionTest.java index be572f3dfb..b14b2c09cb 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/plugin/RestSQLQueryActionTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/plugin/RestSQLQueryActionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.plugin; import static org.junit.Assert.assertTrue; @@ -42,17 +41,13 @@ public class RestSQLQueryActionTest extends BaseRestHandler { private NodeClient nodeClient; - @Mock - private ThreadPool threadPool; + @Mock private ThreadPool threadPool; - @Mock - private QueryManager queryManager; + @Mock private QueryManager queryManager; - @Mock - private QueryPlanFactory factory; + @Mock private QueryPlanFactory factory; - @Mock - private RestChannel restChannel; + @Mock private RestChannel restChannel; private Injector injector; @@ -60,88 +55,112 @@ public class RestSQLQueryActionTest extends BaseRestHandler { public void setup() { nodeClient = new NodeClient(org.opensearch.common.settings.Settings.EMPTY, threadPool); ModulesBuilder modules = new ModulesBuilder(); - modules.add(b -> { - b.bind(SQLService.class).toInstance(new SQLService(new SQLSyntaxParser(), queryManager, factory)); - }); + modules.add( + b -> { + b.bind(SQLService.class) + .toInstance(new SQLService(new SQLSyntaxParser(), queryManager, factory)); + }); injector = modules.createInjector(); - Mockito.lenient().when(threadPool.getThreadContext()) + Mockito.lenient() + .when(threadPool.getThreadContext()) .thenReturn(new ThreadContext(org.opensearch.common.settings.Settings.EMPTY)); } @Test public void handleQueryThatCanSupport() throws Exception { - SQLQueryRequest request = new SQLQueryRequest( - new JSONObject("{\"query\": \"SELECT -123\"}"), - "SELECT -123", - QUERY_API_ENDPOINT, - "jdbc"); + SQLQueryRequest request = + new SQLQueryRequest( + new JSONObject("{\"query\": \"SELECT -123\"}"), + "SELECT -123", + QUERY_API_ENDPOINT, + "jdbc"); RestSQLQueryAction queryAction = new RestSQLQueryAction(injector); - queryAction.prepareRequest(request, (channel, exception) -> { - fail(); - }, (channel, exception) -> { - fail(); - }).accept(restChannel); + queryAction + .prepareRequest( + request, + (channel, exception) -> { + fail(); + }, + (channel, exception) -> { + fail(); + }) + .accept(restChannel); } @Test public void handleExplainThatCanSupport() throws Exception { - SQLQueryRequest request = new SQLQueryRequest( - new JSONObject("{\"query\": \"SELECT -123\"}"), - "SELECT -123", - EXPLAIN_API_ENDPOINT, - "jdbc"); + SQLQueryRequest request = + new SQLQueryRequest( + new JSONObject("{\"query\": \"SELECT -123\"}"), + "SELECT -123", + EXPLAIN_API_ENDPOINT, + "jdbc"); RestSQLQueryAction queryAction = new RestSQLQueryAction(injector); - queryAction.prepareRequest(request, (channel, exception) -> { - fail(); - }, (channel, exception) -> { - fail(); - }).accept(restChannel); + queryAction + .prepareRequest( + request, + (channel, exception) -> { + fail(); + }, + (channel, exception) -> { + fail(); + }) + .accept(restChannel); } @Test public void queryThatNotSupportIsHandledByFallbackHandler() throws Exception { - SQLQueryRequest request = new SQLQueryRequest( - new JSONObject( - "{\"query\": \"SELECT name FROM test1 JOIN test2 ON test1.name = test2.name\"}"), - "SELECT name FROM test1 JOIN test2 ON test1.name = test2.name", - QUERY_API_ENDPOINT, - "jdbc"); + SQLQueryRequest request = + new SQLQueryRequest( + new JSONObject( + "{\"query\": \"SELECT name FROM test1 JOIN test2 ON test1.name = test2.name\"}"), + "SELECT name FROM test1 JOIN test2 ON test1.name = test2.name", + QUERY_API_ENDPOINT, + "jdbc"); AtomicBoolean fallback = new AtomicBoolean(false); RestSQLQueryAction queryAction = new RestSQLQueryAction(injector); - queryAction.prepareRequest(request, (channel, exception) -> { - fallback.set(true); - assertTrue(exception instanceof SyntaxCheckException); - }, (channel, exception) -> { - fail(); - }).accept(restChannel); + queryAction + .prepareRequest( + request, + (channel, exception) -> { + fallback.set(true); + assertTrue(exception instanceof SyntaxCheckException); + }, + (channel, exception) -> { + fail(); + }) + .accept(restChannel); assertTrue(fallback.get()); } @Test public void queryExecutionFailedIsHandledByExecutionErrorHandler() throws Exception { - SQLQueryRequest request = new SQLQueryRequest( - new JSONObject( - "{\"query\": \"SELECT -123\"}"), - "SELECT -123", - QUERY_API_ENDPOINT, - "jdbc"); + SQLQueryRequest request = + new SQLQueryRequest( + new JSONObject("{\"query\": \"SELECT -123\"}"), + "SELECT -123", + QUERY_API_ENDPOINT, + "jdbc"); - doThrow(new IllegalStateException("execution exception")) - .when(queryManager) - .submit(any()); + doThrow(new IllegalStateException("execution exception")).when(queryManager).submit(any()); AtomicBoolean executionErrorHandler = new AtomicBoolean(false); RestSQLQueryAction queryAction = new RestSQLQueryAction(injector); - queryAction.prepareRequest(request, (channel, exception) -> { - assertTrue(exception instanceof SyntaxCheckException); - }, (channel, exception) -> { - executionErrorHandler.set(true); - assertTrue(exception instanceof IllegalStateException); - }).accept(restChannel); + queryAction + .prepareRequest( + request, + (channel, exception) -> { + assertTrue(exception instanceof SyntaxCheckException); + }, + (channel, exception) -> { + executionErrorHandler.set(true); + assertTrue(exception instanceof IllegalStateException); + }) + .accept(restChannel); assertTrue(executionErrorHandler.get()); } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/SearchHitRowTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/SearchHitRowTest.java index fe5c641009..dd0fc626c0 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/SearchHitRowTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/query/planner/physical/node/scroll/SearchHitRowTest.java @@ -20,7 +20,7 @@ public void testKeyWithObjectField() { SearchHit hit = new SearchHit(1); hit.sourceRef(new BytesArray("{\"id\": {\"serial\": 3}}")); SearchHitRow row = new SearchHitRow(hit, "a"); - RowKey key = row.key(new String[]{"id.serial"}); + RowKey key = row.key(new String[] {"id.serial"}); Object[] data = key.keys(); assertEquals(1, data.length); @@ -32,7 +32,7 @@ public void testKeyWithUnexpandedObjectField() { SearchHit hit = new SearchHit(1); hit.sourceRef(new BytesArray("{\"attributes.hardware.correlate_id\": 10}")); SearchHitRow row = new SearchHitRow(hit, "a"); - RowKey key = row.key(new String[]{"attributes.hardware.correlate_id"}); + RowKey key = row.key(new String[] {"attributes.hardware.correlate_id"}); Object[] data = key.keys(); assertEquals(1, data.length); diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/rewriter/alias/IdentifierTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/rewriter/alias/IdentifierTest.java index b9c4935f50..5a6bc4541e 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/rewriter/alias/IdentifierTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/rewriter/alias/IdentifierTest.java @@ -3,46 +3,43 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.alias; import com.alibaba.druid.sql.ast.expr.SQLIdentifierExpr; import org.junit.Assert; import org.junit.Test; -/** - * Test cases for util class {@link Identifier}. - */ +/** Test cases for util class {@link Identifier}. */ public class IdentifierTest { - @Test - public void identifierWithWordBeforeFirstDotShouldBeConsideredHavePrefix() { - Assert.assertTrue(identifier("accounts.age").hasPrefix()); - } - - @Test - public void identifierWithoutDotShouldNotBeConsideredHavePrefix() { - Assert.assertFalse(identifier("age").hasPrefix()); - } - - @Test - public void identifierStartingWithDotShouldNotBeConsideredHavePrefix() { - Assert.assertFalse(identifier(".age").hasPrefix()); - } - - @Test - public void prefixOfIdentifierShouldBeWordBeforeFirstDot() { - Assert.assertEquals("accounts", identifier("accounts.age").prefix()); - } - - @Test - public void removePrefixShouldRemoveFirstWordAndDot() { - Identifier identifier = identifier("accounts.age"); - identifier.removePrefix(); - Assert.assertEquals("age", identifier.name()); - } - - private Identifier identifier(String name) { - return new Identifier(new SQLIdentifierExpr(name)); - } + @Test + public void identifierWithWordBeforeFirstDotShouldBeConsideredHavePrefix() { + Assert.assertTrue(identifier("accounts.age").hasPrefix()); + } + + @Test + public void identifierWithoutDotShouldNotBeConsideredHavePrefix() { + Assert.assertFalse(identifier("age").hasPrefix()); + } + + @Test + public void identifierStartingWithDotShouldNotBeConsideredHavePrefix() { + Assert.assertFalse(identifier(".age").hasPrefix()); + } + + @Test + public void prefixOfIdentifierShouldBeWordBeforeFirstDot() { + Assert.assertEquals("accounts", identifier("accounts.age").prefix()); + } + + @Test + public void removePrefixShouldRemoveFirstWordAndDot() { + Identifier identifier = identifier("accounts.age"); + identifier.removePrefix(); + Assert.assertEquals("age", identifier.name()); + } + + private Identifier identifier(String name) { + return new Identifier(new SQLIdentifierExpr(name)); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/rewriter/alias/TableAliasPrefixRemoveRuleTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/rewriter/alias/TableAliasPrefixRemoveRuleTest.java index b59bd218e0..4a4161a585 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/rewriter/alias/TableAliasPrefixRemoveRuleTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/rewriter/alias/TableAliasPrefixRemoveRuleTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.alias; import com.alibaba.druid.sql.SQLUtils; @@ -12,122 +11,116 @@ import org.junit.Test; import org.opensearch.sql.legacy.util.SqlParserUtils; -/** - * Test cases for field name prefix remove rule. - */ +/** Test cases for field name prefix remove rule. */ public class TableAliasPrefixRemoveRuleTest { - @Test - public void queryWithUnAliasedTableNameShouldMatch() { - query("SELECT account.age FROM accounts").shouldMatchRule(); - } - - @Test - public void queryWithUnAliasedTableNameInSubQueryShouldNotMatch() { - query("SELECT * FROM test t WHERE t.name IN (SELECT accounts.name FROM accounts)").shouldNotMatchRule(); - } - - @Test - public void queryWithoutUnAliasedTableNameShouldMatch() { - query("SELECT a.age FROM accounts a WHERE a.balance > 1000").shouldMatchRule(); - } - - @Test - public void joinQueryWithoutUnAliasedTableNameShouldNotMatch() { - query("SELECT * FROM accounts a1 JOIN accounts a2 ON a1.city = a2.city").shouldNotMatchRule(); - } - - @Test - public void nestedFieldQueryWithoutUnAliasedTableNameShouldNotMatch() { - query("SELECT * FROM accounts a, a.project p").shouldNotMatchRule(); - } - - @Test - public void selectedFieldNamePrefixedByUnAliasedTableNameShouldRemoveTableNamePrefix() { - query("SELECT accounts.age FROM accounts").shouldBeAfterRewrite("SELECT age FROM accounts"); - query("SELECT accounts.age FROM accounts/temp").shouldBeAfterRewrite("SELECT age FROM accounts/temp"); - query("SELECT age FROM accounts/temp a").shouldBeAfterRewrite("SELECT age FROM accounts/temp"); - } - - @Test - public void allFieldNamePrefixedByUnAliasedTableNameEverywhereShouldRemoveTableNamePrefix() { - query( - "SELECT accounts.age, AVG(accounts.salary) FROM accounts WHERE accounts.age > 10 " + - "GROUP BY accounts.age HAVING AVG(accounts.balance) > 1000 ORDER BY accounts.age" - ).shouldBeAfterRewrite( - "SELECT age, AVG(salary) FROM accounts WHERE age > 10 " + - "GROUP BY age HAVING AVG(balance) > 1000 ORDER BY age" - ); - } - - @Test - public void selectedFieldNamePrefixedByTableAliasShouldRemoveTableAliasPrefix() { - query("SELECT a.age FROM accounts a").shouldBeAfterRewrite("SELECT age FROM accounts"); - query("SELECT a.age FROM accounts/temp a").shouldBeAfterRewrite("SELECT age FROM accounts/temp"); + @Test + public void queryWithUnAliasedTableNameShouldMatch() { + query("SELECT account.age FROM accounts").shouldMatchRule(); + } + + @Test + public void queryWithUnAliasedTableNameInSubQueryShouldNotMatch() { + query("SELECT * FROM test t WHERE t.name IN (SELECT accounts.name FROM accounts)") + .shouldNotMatchRule(); + } + + @Test + public void queryWithoutUnAliasedTableNameShouldMatch() { + query("SELECT a.age FROM accounts a WHERE a.balance > 1000").shouldMatchRule(); + } + + @Test + public void joinQueryWithoutUnAliasedTableNameShouldNotMatch() { + query("SELECT * FROM accounts a1 JOIN accounts a2 ON a1.city = a2.city").shouldNotMatchRule(); + } + + @Test + public void nestedFieldQueryWithoutUnAliasedTableNameShouldNotMatch() { + query("SELECT * FROM accounts a, a.project p").shouldNotMatchRule(); + } + + @Test + public void selectedFieldNamePrefixedByUnAliasedTableNameShouldRemoveTableNamePrefix() { + query("SELECT accounts.age FROM accounts").shouldBeAfterRewrite("SELECT age FROM accounts"); + query("SELECT accounts.age FROM accounts/temp") + .shouldBeAfterRewrite("SELECT age FROM accounts/temp"); + query("SELECT age FROM accounts/temp a").shouldBeAfterRewrite("SELECT age FROM accounts/temp"); + } + + @Test + public void allFieldNamePrefixedByUnAliasedTableNameEverywhereShouldRemoveTableNamePrefix() { + query( + "SELECT accounts.age, AVG(accounts.salary) FROM accounts WHERE accounts.age > 10 " + + "GROUP BY accounts.age HAVING AVG(accounts.balance) > 1000 ORDER BY accounts.age") + .shouldBeAfterRewrite( + "SELECT age, AVG(salary) FROM accounts WHERE age > 10 " + + "GROUP BY age HAVING AVG(balance) > 1000 ORDER BY age"); + } + + @Test + public void selectedFieldNamePrefixedByTableAliasShouldRemoveTableAliasPrefix() { + query("SELECT a.age FROM accounts a").shouldBeAfterRewrite("SELECT age FROM accounts"); + query("SELECT a.age FROM accounts/temp a") + .shouldBeAfterRewrite("SELECT age FROM accounts/temp"); + } + + @Test + public void allFieldNamePrefixedByTableAliasShouldRemoveTableAliasPrefix() { + query( + "SELECT a.age, AVG(a.salary) FROM accounts a WHERE a.age > 10 " + + "GROUP BY a.age HAVING AVG(a.balance) > 1000 ORDER BY a.age") + .shouldBeAfterRewrite( + "SELECT age, AVG(salary) FROM accounts WHERE age > 10 " + + "GROUP BY age HAVING AVG(balance) > 1000 ORDER BY age"); + } + + @Test + public void allFieldNamePrefixedByTableAliasInMultiQueryShouldRemoveTableAliasPrefix() { + query("SELECT t.name FROM test t UNION SELECT a.age FROM accounts a WHERE a.age > 10") + .shouldBeAfterRewrite( + "SELECT name FROM test UNION SELECT age FROM accounts WHERE age > 10"); + } + + @Test + public void unAliasedFieldNameShouldNotBeChanged() { + query("SELECT a.age, name FROM accounts a WHERE balance > 1000") + .shouldBeAfterRewrite("SELECT age, name FROM accounts WHERE balance > 1000"); + query("SELECT accounts.age, name FROM accounts WHERE balance > 1000") + .shouldBeAfterRewrite("SELECT age, name FROM accounts WHERE balance > 1000"); + } + + private QueryAssertion query(String sql) { + return new QueryAssertion(sql); + } + + private static class QueryAssertion { + + private final TableAliasPrefixRemoveRule rule = new TableAliasPrefixRemoveRule(); + + private final SQLQueryExpr expr; + + QueryAssertion(String sql) { + this.expr = SqlParserUtils.parse(sql); } - @Test - public void allFieldNamePrefixedByTableAliasShouldRemoveTableAliasPrefix() { - query( - "SELECT a.age, AVG(a.salary) FROM accounts a WHERE a.age > 10 " + - "GROUP BY a.age HAVING AVG(a.balance) > 1000 ORDER BY a.age" - ).shouldBeAfterRewrite( - "SELECT age, AVG(salary) FROM accounts WHERE age > 10 " + - "GROUP BY age HAVING AVG(balance) > 1000 ORDER BY age" - ); + void shouldMatchRule() { + Assert.assertTrue(match()); } - @Test - public void allFieldNamePrefixedByTableAliasInMultiQueryShouldRemoveTableAliasPrefix() { - query( - "SELECT t.name FROM test t UNION SELECT a.age FROM accounts a WHERE a.age > 10" - ).shouldBeAfterRewrite( - "SELECT name FROM test UNION SELECT age FROM accounts WHERE age > 10" - ); + void shouldNotMatchRule() { + Assert.assertFalse(match()); } - @Test - public void unAliasedFieldNameShouldNotBeChanged() { - query("SELECT a.age, name FROM accounts a WHERE balance > 1000"). - shouldBeAfterRewrite("SELECT age, name FROM accounts WHERE balance > 1000"); - query("SELECT accounts.age, name FROM accounts WHERE balance > 1000"). - shouldBeAfterRewrite("SELECT age, name FROM accounts WHERE balance > 1000"); + void shouldBeAfterRewrite(String expected) { + shouldMatchRule(); + rule.rewrite(expr); + Assert.assertEquals( + SQLUtils.toMySqlString(SqlParserUtils.parse(expected)), SQLUtils.toMySqlString(expr)); } - private QueryAssertion query(String sql) { - return new QueryAssertion(sql); + private boolean match() { + return rule.match(expr); } - - private static class QueryAssertion { - - private final TableAliasPrefixRemoveRule rule = new TableAliasPrefixRemoveRule(); - - private final SQLQueryExpr expr; - - QueryAssertion(String sql) { - this.expr = SqlParserUtils.parse(sql); - } - - void shouldMatchRule() { - Assert.assertTrue(match()); - } - - void shouldNotMatchRule() { - Assert.assertFalse(match()); - } - - void shouldBeAfterRewrite(String expected) { - shouldMatchRule(); - rule.rewrite(expr); - Assert.assertEquals( - SQLUtils.toMySqlString(SqlParserUtils.parse(expected)), - SQLUtils.toMySqlString(expr) - ); - } - - private boolean match() { - return rule.match(expr); - } - } - + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/rewriter/alias/TableTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/rewriter/alias/TableTest.java index 5fc677785d..ab5c6b3d10 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/rewriter/alias/TableTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/rewriter/alias/TableTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.rewriter.alias; import com.alibaba.druid.sql.ast.expr.SQLBinaryOpExpr; @@ -13,27 +12,24 @@ import org.junit.Assert; import org.junit.Test; -/** - * Test cases for util class {@link Table}. - */ +/** Test cases for util class {@link Table}. */ public class TableTest { - @Test - public void identifierOfTableNameShouldReturnTheTableName() { - Table table = new Table(new SQLExprTableSource(new SQLIdentifierExpr("accounts"))); - Assert.assertEquals("accounts", table.name()); - } - - @Test - public void identifierOfTableAndTypeNameShouldReturnTheTableNameOnly() { - Table table = new Table(new SQLExprTableSource( - new SQLBinaryOpExpr( - new SQLIdentifierExpr("accounts"), - SQLBinaryOperator.Divide, - new SQLIdentifierExpr("test") - ) - )); - Assert.assertEquals("accounts", table.name()); - } + @Test + public void identifierOfTableNameShouldReturnTheTableName() { + Table table = new Table(new SQLExprTableSource(new SQLIdentifierExpr("accounts"))); + Assert.assertEquals("accounts", table.name()); + } + @Test + public void identifierOfTableAndTypeNameShouldReturnTheTableNameOnly() { + Table table = + new Table( + new SQLExprTableSource( + new SQLBinaryOpExpr( + new SQLIdentifierExpr("accounts"), + SQLBinaryOperator.Divide, + new SQLIdentifierExpr("test")))); + Assert.assertEquals("accounts", table.name()); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/AggregationOptionTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/AggregationOptionTest.java index e5f44eacf0..526642e8ea 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/AggregationOptionTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/AggregationOptionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import com.alibaba.druid.sql.ast.expr.SQLAggregateOption; @@ -17,55 +16,53 @@ import org.opensearch.sql.legacy.parser.SqlParser; import org.opensearch.sql.legacy.util.SqlParserUtils; -/** - * Unit test class for feature of aggregation options: DISTINCT, ALL, UNIQUE, DEDUPLICATION - */ +/** Unit test class for feature of aggregation options: DISTINCT, ALL, UNIQUE, DEDUPLICATION */ public class AggregationOptionTest { - @Test - public void selectDistinctFieldsShouldHaveAggregationOption() { - List fields = getSelectFields("SELECT DISTINCT gender, city FROM accounts"); - for (Field field: fields) { - Assert.assertEquals(field.getOption(), SQLAggregateOption.DISTINCT); - } + @Test + public void selectDistinctFieldsShouldHaveAggregationOption() { + List fields = getSelectFields("SELECT DISTINCT gender, city FROM accounts"); + for (Field field : fields) { + Assert.assertEquals(field.getOption(), SQLAggregateOption.DISTINCT); } + } - @Test - public void selectWithoutDistinctFieldsShouldNotHaveAggregationOption() { - List fields = getSelectFields("SELECT gender, city FROM accounts"); - for (Field field: fields) { - Assert.assertNull(field.getOption()); - } + @Test + public void selectWithoutDistinctFieldsShouldNotHaveAggregationOption() { + List fields = getSelectFields("SELECT gender, city FROM accounts"); + for (Field field : fields) { + Assert.assertNull(field.getOption()); } + } - @Test - public void selectDistinctWithoutGroupByShouldHaveGroupByItems() { - List> groupBys = getGroupBys("SELECT DISTINCT gender, city FROM accounts"); - Assert.assertFalse(groupBys.isEmpty()); - } + @Test + public void selectDistinctWithoutGroupByShouldHaveGroupByItems() { + List> groupBys = getGroupBys("SELECT DISTINCT gender, city FROM accounts"); + Assert.assertFalse(groupBys.isEmpty()); + } - @Test - public void selectWithoutDistinctWithoutGroupByShouldNotHaveGroupByItems() { - List> groupBys = getGroupBys("SELECT gender, city FROM accounts"); - Assert.assertTrue(groupBys.isEmpty()); - } + @Test + public void selectWithoutDistinctWithoutGroupByShouldNotHaveGroupByItems() { + List> groupBys = getGroupBys("SELECT gender, city FROM accounts"); + Assert.assertTrue(groupBys.isEmpty()); + } - private List> getGroupBys(String query) { - return getSelect(query).getGroupBys(); - } + private List> getGroupBys(String query) { + return getSelect(query).getGroupBys(); + } - private List getSelectFields(String query) { - return getSelect(query).getFields(); - } + private List getSelectFields(String query) { + return getSelect(query).getFields(); + } - private Select getSelect(String query) { - SQLQueryExpr queryExpr = SqlParserUtils.parse(query); - Select select = null; - try { - select = new SqlParser().parseSelect(queryExpr); - } catch (SqlParseException e) { - e.printStackTrace(); - } - return select; + private Select getSelect(String query) { + SQLQueryExpr queryExpr = SqlParserUtils.parse(query); + Select select = null; + try { + select = new SqlParser().parseSelect(queryExpr); + } catch (SqlParseException e) { + e.printStackTrace(); } + return select; + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/DateFormatTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/DateFormatTest.java index 89ac8b4563..3bb7b4a2b6 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/DateFormatTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/DateFormatTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import static org.hamcrest.MatcherAssert.assertThat; @@ -40,196 +39,238 @@ public class DateFormatTest { - private static final String SELECT_CNT_FROM_DATE = "SELECT COUNT(*) AS c FROM dates "; - - @Test - public void simpleFormatCondition() { - List q = query(SELECT_CNT_FROM_DATE + "WHERE date_format(creationDate, 'YYYY') < '2018'"); - - assertThat(q, hasQueryWithValue("fieldName", equalTo("creationDate"))); - assertThat(q, hasQueryWithValueGetter(MatcherUtils.featureValueOf("has format", equalTo("YYYY"), f->((RangeQueryBuilder)f).format()))); - } - - @Test - public void equalCondition() { - List q = query(SELECT_CNT_FROM_DATE + "WHERE date_format(creationDate, 'YYYY-MM-dd') = '2018-04-02'"); - - assertThat(q, hasQueryWithValueGetter(MatcherUtils.featureValueOf("has format", equalTo("YYYY-MM-dd"), f->((RangeQueryBuilder)f).format()))); - - // Equality query for date_format is created with a rangeQuery where the 'from' and 'to' values are equal to the value we are equating to - assertThat(q, hasQueryWithValue("from", equalTo(BytesRefs.toBytesRef("2018-04-02")))); // converting string to bytes ref as RangeQueryBuilder stores it this way - assertThat(q, hasQueryWithValue("to", equalTo(BytesRefs.toBytesRef("2018-04-02")))); - } - - @Test - public void orderByTest() { - String query = "SELECT agent, ip, date_format(utc_time, 'dd-MM-YYYY') date " + - "FROM opensearch_dashboards_sample_data_logs " + - "ORDER BY date_format(utc_time, 'dd-MM-YYYY') desc, ip"; - - Select select = getSelect(query); - - List orderBys = select.getOrderBys(); - assertThat(orderBys.size(), equalTo(2)); - - Order formula = orderBys.get(0); - - assertThat(formula.isScript(), is(true)); - assertThat(formula.getType(), is("DESC")); - assertThat(formula.getName(), containsString("DateTimeFormatter.ofPattern")); - - Order ip = orderBys.get(1); - - assertThat(ip.isScript(), is(false)); - assertThat(ip.getName(), is("ip")); - assertThat(ip.getType(), is("ASC")); - } - - @Test - public void groupByWithDescOrder() throws SqlParseException { - String query = "SELECT date_format(utc_time, 'dd-MM-YYYY'), count(*) " + - "FROM opensearch_dashboards_sample_data_logs " + - "GROUP BY date_format(utc_time, 'dd-MM-YYYY') " + - "ORDER BY date_format(utc_time, 'dd-MM-YYYY') DESC"; - - JSONObject aggregation = getAggregation(query); - assertThat(aggregation.getInt("size"), is(getSelect(query).getRowCount())); - assertThat(aggregation.getJSONObject("order").getString("_key"), is("desc")); - } - - @Test - public void groupByWithAscOrder() throws SqlParseException { - String query = "SELECT date_format(utc_time, 'dd-MM-YYYY'), count(*) " + - "FROM opensearch_dashboards_sample_data_logs " + - "GROUP BY date_format(utc_time, 'dd-MM-YYYY') " + - "ORDER BY date_format(utc_time, 'dd-MM-YYYY')"; - - JSONObject aggregation = getAggregation(query); - - assertThat(aggregation.getJSONObject("order").getString("_key"), is("asc")); - } - - @Test - @Ignore("https://github.com/opendistro-for-elasticsearch/sql/issues/158") - public void groupByWithAndAlias() throws SqlParseException { - String query = "SELECT date_format(utc_time, 'dd-MM-YYYY') x, count(*) " + - "FROM opensearch_dashboards_sample_data_logs " + - "GROUP BY x " + - "ORDER BY x"; - - JSONObject aggregation = getAggregation(query); - assertThat(aggregation.getJSONObject("order").getString("_key"), is("asc")); - } - - public JSONObject getAggregation(String query) throws SqlParseException { - Select select = getSelect(query); - - Client client = mock(Client.class); - AggregationQueryAction queryAction = new AggregationQueryAction(client, select); - - String elasticDsl = queryAction.explain().explain(); - JSONObject elasticQuery = new JSONObject(elasticDsl); - - JSONObject aggregations = elasticQuery.getJSONObject("aggregations"); - String dateFormatAggregationKey = getScriptAggregationKey(aggregations, "date_format"); - - return aggregations.getJSONObject(dateFormatAggregationKey).getJSONObject("terms"); - } - - public static String getScriptAggregationKey(JSONObject aggregation, String prefix) { - return aggregation.keySet() - .stream() - .filter(x -> x.startsWith(prefix)) - .findFirst() - .orElseThrow(()-> new RuntimeException("Can't find key" + prefix + " in aggregation " + aggregation)); - } - - @Test - public void notEqualCondition() { - List q = query(SELECT_CNT_FROM_DATE + "WHERE date_format(creationDate, 'YYYY-MM-dd') <> '2018-04-02'"); - - assertThat(q, hasNotQueryWithValue("from", equalTo(BytesRefs.toBytesRef("2018-04-02")))); - assertThat(q, hasNotQueryWithValue("to", equalTo(BytesRefs.toBytesRef("2018-04-02")))); - } - - @Test - public void greaterThanCondition() { - List q = query(SELECT_CNT_FROM_DATE + "WHERE date_format(creationDate, 'YYYY-MM-dd') > '2018-04-02'"); - - assertThat(q, hasQueryWithValue("from", equalTo(BytesRefs.toBytesRef("2018-04-02")))); - assertThat(q, hasQueryWithValue("includeLower", equalTo(false))); - assertThat(q, hasQueryWithValue("includeUpper", equalTo(true))); - } - - @Test - public void greaterThanOrEqualToCondition() { - List q = query(SELECT_CNT_FROM_DATE + "WHERE date_format(creationDate, 'YYYY-MM-dd') >= '2018-04-02'"); - - assertThat(q, hasQueryWithValue("from", equalTo(BytesRefs.toBytesRef("2018-04-02")))); - assertThat(q, hasQueryWithValue("to", equalTo(null))); - assertThat(q, hasQueryWithValue("includeLower", equalTo(true))); - assertThat(q, hasQueryWithValue("includeUpper", equalTo(true))); + private static final String SELECT_CNT_FROM_DATE = "SELECT COUNT(*) AS c FROM dates "; + + @Test + public void simpleFormatCondition() { + List q = + query(SELECT_CNT_FROM_DATE + "WHERE date_format(creationDate, 'YYYY') < '2018'"); + + assertThat(q, hasQueryWithValue("fieldName", equalTo("creationDate"))); + assertThat( + q, + hasQueryWithValueGetter( + MatcherUtils.featureValueOf( + "has format", equalTo("YYYY"), f -> ((RangeQueryBuilder) f).format()))); + } + + @Test + public void equalCondition() { + List q = + query( + SELECT_CNT_FROM_DATE + "WHERE date_format(creationDate, 'YYYY-MM-dd') = '2018-04-02'"); + + assertThat( + q, + hasQueryWithValueGetter( + MatcherUtils.featureValueOf( + "has format", equalTo("YYYY-MM-dd"), f -> ((RangeQueryBuilder) f).format()))); + + // Equality query for date_format is created with a rangeQuery where the 'from' and 'to' values + // are equal to the value we are equating to + assertThat( + q, + hasQueryWithValue( + "from", + equalTo( + BytesRefs.toBytesRef( + "2018-04-02")))); // converting string to bytes ref as RangeQueryBuilder stores + // it this way + assertThat(q, hasQueryWithValue("to", equalTo(BytesRefs.toBytesRef("2018-04-02")))); + } + + @Test + public void orderByTest() { + String query = + "SELECT agent, ip, date_format(utc_time, 'dd-MM-YYYY') date " + + "FROM opensearch_dashboards_sample_data_logs " + + "ORDER BY date_format(utc_time, 'dd-MM-YYYY') desc, ip"; + + Select select = getSelect(query); + + List orderBys = select.getOrderBys(); + assertThat(orderBys.size(), equalTo(2)); + + Order formula = orderBys.get(0); + + assertThat(formula.isScript(), is(true)); + assertThat(formula.getType(), is("DESC")); + assertThat(formula.getName(), containsString("DateTimeFormatter.ofPattern")); + + Order ip = orderBys.get(1); + + assertThat(ip.isScript(), is(false)); + assertThat(ip.getName(), is("ip")); + assertThat(ip.getType(), is("ASC")); + } + + @Test + public void groupByWithDescOrder() throws SqlParseException { + String query = + "SELECT date_format(utc_time, 'dd-MM-YYYY'), count(*) " + + "FROM opensearch_dashboards_sample_data_logs " + + "GROUP BY date_format(utc_time, 'dd-MM-YYYY') " + + "ORDER BY date_format(utc_time, 'dd-MM-YYYY') DESC"; + + JSONObject aggregation = getAggregation(query); + assertThat(aggregation.getInt("size"), is(getSelect(query).getRowCount())); + assertThat(aggregation.getJSONObject("order").getString("_key"), is("desc")); + } + + @Test + public void groupByWithAscOrder() throws SqlParseException { + String query = + "SELECT date_format(utc_time, 'dd-MM-YYYY'), count(*) " + + "FROM opensearch_dashboards_sample_data_logs " + + "GROUP BY date_format(utc_time, 'dd-MM-YYYY') " + + "ORDER BY date_format(utc_time, 'dd-MM-YYYY')"; + + JSONObject aggregation = getAggregation(query); + + assertThat(aggregation.getJSONObject("order").getString("_key"), is("asc")); + } + + @Test + @Ignore("https://github.com/opendistro-for-elasticsearch/sql/issues/158") + public void groupByWithAndAlias() throws SqlParseException { + String query = + "SELECT date_format(utc_time, 'dd-MM-YYYY') x, count(*) " + + "FROM opensearch_dashboards_sample_data_logs " + + "GROUP BY x " + + "ORDER BY x"; + + JSONObject aggregation = getAggregation(query); + assertThat(aggregation.getJSONObject("order").getString("_key"), is("asc")); + } + + public JSONObject getAggregation(String query) throws SqlParseException { + Select select = getSelect(query); + + Client client = mock(Client.class); + AggregationQueryAction queryAction = new AggregationQueryAction(client, select); + + String elasticDsl = queryAction.explain().explain(); + JSONObject elasticQuery = new JSONObject(elasticDsl); + + JSONObject aggregations = elasticQuery.getJSONObject("aggregations"); + String dateFormatAggregationKey = getScriptAggregationKey(aggregations, "date_format"); + + return aggregations.getJSONObject(dateFormatAggregationKey).getJSONObject("terms"); + } + + public static String getScriptAggregationKey(JSONObject aggregation, String prefix) { + return aggregation.keySet().stream() + .filter(x -> x.startsWith(prefix)) + .findFirst() + .orElseThrow( + () -> + new RuntimeException("Can't find key" + prefix + " in aggregation " + aggregation)); + } + + @Test + public void notEqualCondition() { + List q = + query( + SELECT_CNT_FROM_DATE + "WHERE date_format(creationDate, 'YYYY-MM-dd') <> '2018-04-02'"); + + assertThat(q, hasNotQueryWithValue("from", equalTo(BytesRefs.toBytesRef("2018-04-02")))); + assertThat(q, hasNotQueryWithValue("to", equalTo(BytesRefs.toBytesRef("2018-04-02")))); + } + + @Test + public void greaterThanCondition() { + List q = + query( + SELECT_CNT_FROM_DATE + "WHERE date_format(creationDate, 'YYYY-MM-dd') > '2018-04-02'"); + + assertThat(q, hasQueryWithValue("from", equalTo(BytesRefs.toBytesRef("2018-04-02")))); + assertThat(q, hasQueryWithValue("includeLower", equalTo(false))); + assertThat(q, hasQueryWithValue("includeUpper", equalTo(true))); + } + + @Test + public void greaterThanOrEqualToCondition() { + List q = + query( + SELECT_CNT_FROM_DATE + "WHERE date_format(creationDate, 'YYYY-MM-dd') >= '2018-04-02'"); + + assertThat(q, hasQueryWithValue("from", equalTo(BytesRefs.toBytesRef("2018-04-02")))); + assertThat(q, hasQueryWithValue("to", equalTo(null))); + assertThat(q, hasQueryWithValue("includeLower", equalTo(true))); + assertThat(q, hasQueryWithValue("includeUpper", equalTo(true))); + } + + @Test + public void timeZoneCondition() { + List q = + query( + SELECT_CNT_FROM_DATE + + "WHERE date_format(creationDate, 'YYYY-MM-dd', 'America/Phoenix') >" + + " '2018-04-02'"); + + // Used hasProperty here as getter followed convention for obtaining ID and Feature Matcher was + // having issues with generic type to obtain value + assertThat(q, hasQueryWithValue("timeZone", hasProperty("id", equalTo("America/Phoenix")))); + } + + private List query(String sql) { + return translate(parseSql(sql)); + } + + private List translate(SQLQueryExpr expr) { + try { + Select select = new SqlParser().parseSelect(expr); + QueryBuilder whereQuery = QueryMaker.explain(select.getWhere(), select.isQuery); + return ((BoolQueryBuilder) whereQuery).filter(); + } catch (SqlParseException e) { + throw new ParserException("Illegal sql expr: " + expr.toString()); } + } - @Test - public void timeZoneCondition() { - List q = query(SELECT_CNT_FROM_DATE + "WHERE date_format(creationDate, 'YYYY-MM-dd', 'America/Phoenix') > '2018-04-02'"); - - // Used hasProperty here as getter followed convention for obtaining ID and Feature Matcher was having issues with generic type to obtain value - assertThat(q, hasQueryWithValue("timeZone", hasProperty("id", equalTo("America/Phoenix")))); - } - - private List query(String sql) { - return translate(parseSql(sql)); - } - - private List translate(SQLQueryExpr expr) { - try { - Select select = new SqlParser().parseSelect(expr); - QueryBuilder whereQuery = QueryMaker.explain(select.getWhere(), select.isQuery); - return ((BoolQueryBuilder) whereQuery).filter(); - } catch (SqlParseException e) { - throw new ParserException("Illegal sql expr: " + expr.toString()); - } - } - - private SQLQueryExpr parseSql(String sql) { - ElasticSqlExprParser parser = new ElasticSqlExprParser(sql); - SQLExpr expr = parser.expr(); - if (parser.getLexer().token() != Token.EOF) { - throw new ParserException("Illegal sql: " + sql); - } - return (SQLQueryExpr) expr; + private SQLQueryExpr parseSql(String sql) { + ElasticSqlExprParser parser = new ElasticSqlExprParser(sql); + SQLExpr expr = parser.expr(); + if (parser.getLexer().token() != Token.EOF) { + throw new ParserException("Illegal sql: " + sql); } - - private Select getSelect(String query) { - try { - Select select = new SqlParser().parseSelect(parseSql(query)); - if (select.getRowCount() == null){ - select.setRowCount(Select.DEFAULT_LIMIT); - } - return select; - } catch (SqlParseException e) { - throw new RuntimeException(e); - } - } - - private Matcher> hasQueryWithValue(String name, Matcher matcher) { - return hasItem( - hasFieldWithValue("mustClauses", "has mustClauses", - hasItem(hasFieldWithValue(name, "has " + name, matcher)))); - } - - private Matcher> hasNotQueryWithValue(String name, Matcher matcher) { - return hasItem( - hasFieldWithValue("mustClauses", "has mustClauses", - hasItem(hasFieldWithValue("mustNotClauses", "has mustNotClauses", - hasItem(hasFieldWithValue(name, "has " + name, matcher)))))); - } - - private Matcher> hasQueryWithValueGetter(Matcher matcher) { - return hasItem( - hasFieldWithValue("mustClauses", "has mustClauses", - hasItem(matcher))); + return (SQLQueryExpr) expr; + } + + private Select getSelect(String query) { + try { + Select select = new SqlParser().parseSelect(parseSql(query)); + if (select.getRowCount() == null) { + select.setRowCount(Select.DEFAULT_LIMIT); + } + return select; + } catch (SqlParseException e) { + throw new RuntimeException(e); } + } + + private Matcher> hasQueryWithValue( + String name, Matcher matcher) { + return hasItem( + hasFieldWithValue( + "mustClauses", + "has mustClauses", + hasItem(hasFieldWithValue(name, "has " + name, matcher)))); + } + + private Matcher> hasNotQueryWithValue( + String name, Matcher matcher) { + return hasItem( + hasFieldWithValue( + "mustClauses", + "has mustClauses", + hasItem( + hasFieldWithValue( + "mustNotClauses", + "has mustNotClauses", + hasItem(hasFieldWithValue(name, "has " + name, matcher)))))); + } + + private Matcher> hasQueryWithValueGetter(Matcher matcher) { + return hasItem(hasFieldWithValue("mustClauses", "has mustClauses", hasItem(matcher))); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/DateFunctionsTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/DateFunctionsTest.java index 771b0ce1bf..cf1be90665 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/DateFunctionsTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/DateFunctionsTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import static org.junit.Assert.assertTrue; @@ -20,181 +19,132 @@ public class DateFunctionsTest { - private static SqlParser parser; - - @BeforeClass - public static void init() { parser = new SqlParser(); } - - /** - * The following unit tests will only cover a subset of the available date functions as the painless script is - * generated from the same template. More thorough testing will be done in integration tests since output will - * differ for each function. - */ - - @Test - public void yearInSelect() { - String query = "SELECT YEAR(creationDate) " + - "FROM dates"; - ScriptField scriptField = getScriptFieldFromQuery(query); - assertTrue( - scriptContainsString( - scriptField, - "doc['creationDate'].value.year")); - } - - @Test - public void yearInWhere() { - String query = "SELECT * " + - "FROM dates " + - "WHERE YEAR(creationDate) > 2012"; - ScriptFilter scriptFilter = getScriptFilterFromQuery(query, parser); - assertTrue( - scriptContainsString( - scriptFilter, - "doc['creationDate'].value.year")); - assertTrue( - scriptHasPattern( - scriptFilter, - "year_\\d+ > 2012")); - } - - @Test - public void weekOfYearInSelect() { - String query = "SELECT WEEK_OF_YEAR(creationDate) " + - "FROM dates"; - ScriptField scriptField = getScriptFieldFromQuery(query); - assertTrue( - scriptContainsString( - scriptField, - "doc['creationDate'].value.get(WeekFields.ISO.weekOfWeekBasedYear())")); - } - - @Test - public void weekOfYearInWhere() { - String query = "SELECT * " + - "FROM dates " + - "WHERE WEEK_OF_YEAR(creationDate) > 15"; - ScriptFilter scriptFilter = getScriptFilterFromQuery(query, parser); - assertTrue( - scriptContainsString( - scriptFilter, - "doc['creationDate'].value.get(WeekFields.ISO.weekOfWeekBasedYear())")); - assertTrue( - scriptHasPattern( - scriptFilter, - "weekOfWeekyear_\\d+ > 15")); - } - - @Test - public void dayOfMonth() { - String query = "SELECT DAY_OF_MONTH(creationDate) " + - "FROM dates"; - ScriptField scriptField = getScriptFieldFromQuery(query); - assertTrue( - scriptContainsString( - scriptField, - "doc['creationDate'].value.dayOfMonth")); - } - - @Test - public void hourOfDay() { - String query = "SELECT HOUR_OF_DAY(creationDate) " + - "FROM dates"; - ScriptField scriptField = getScriptFieldFromQuery(query); - assertTrue( - scriptContainsString( - scriptField, - "doc['creationDate'].value.hour")); - } - - @Test - public void secondOfMinute() { - String query = "SELECT SECOND_OF_MINUTE(creationDate) " + - "FROM dates"; - ScriptField scriptField = getScriptFieldFromQuery(query); - assertTrue( - scriptContainsString( - scriptField, - "doc['creationDate'].value.second")); - } - - @Test - public void month() { - String query = "SELECT MONTH(creationDate) FROM dates"; - ScriptField scriptField = getScriptFieldFromQuery(query); - assertTrue( - scriptContainsString( - scriptField, - "doc['creationDate'].value.monthValue")); - } - - @Test - public void dayofmonth() { - String query = "SELECT DAY_OF_MONTH(creationDate) FROM dates"; - ScriptField scriptField = getScriptFieldFromQuery(query); - assertTrue( - scriptContainsString( - scriptField, - "doc['creationDate'].value.dayOfMonth")); - } - - @Test - public void date() { - String query = "SELECT DATE(creationDate) FROM dates"; - ScriptField scriptField = getScriptFieldFromQuery(query); - assertTrue( - scriptContainsString( - scriptField, - "LocalDate.parse(doc['creationDate'].value.toString(),DateTimeFormatter.ISO_DATE_TIME)")); - } - - @Test - public void monthname() { - String query = "SELECT MONTHNAME(creationDate) FROM dates"; - ScriptField scriptField = getScriptFieldFromQuery(query); - assertTrue( - scriptContainsString( - scriptField, - "doc['creationDate'].value.month")); - } - - @Test - public void timestamp() { - String query = "SELECT TIMESTAMP(creationDate) FROM dates"; - ScriptField scriptField = getScriptFieldFromQuery(query); - assertTrue( - scriptContainsString( - scriptField, - "DateTimeFormatter.ofPattern('yyyy-MM-dd HH:mm:ss')")); - } - - @Test - public void maketime() { - String query = "SELECT MAKETIME(1, 1, 1) FROM dates"; - ScriptField scriptField = getScriptFieldFromQuery(query); - assertTrue( - scriptContainsString( - scriptField, - "LocalTime.of(1, 1, 1).format(DateTimeFormatter.ofPattern('HH:mm:ss'))")); - } - - @Test - public void now() { - String query = "SELECT NOW() FROM dates"; - ScriptField scriptField = getScriptFieldFromQuery(query); - assertTrue( - scriptContainsString( - scriptField, - "System.currentTimeMillis()")); - } - - @Test - public void curdate() { - String query = "SELECT CURDATE() FROM dates"; - ScriptField scriptField = getScriptFieldFromQuery(query); - assertTrue( - scriptContainsString( - scriptField, - "System.currentTimeMillis()")); - } + private static SqlParser parser; + + @BeforeClass + public static void init() { + parser = new SqlParser(); + } + + /** + * The following unit tests will only cover a subset of the available date functions as the + * painless script is generated from the same template. More thorough testing will be done in + * integration tests since output will differ for each function. + */ + @Test + public void yearInSelect() { + String query = "SELECT YEAR(creationDate) " + "FROM dates"; + ScriptField scriptField = getScriptFieldFromQuery(query); + assertTrue(scriptContainsString(scriptField, "doc['creationDate'].value.year")); + } + + @Test + public void yearInWhere() { + String query = "SELECT * " + "FROM dates " + "WHERE YEAR(creationDate) > 2012"; + ScriptFilter scriptFilter = getScriptFilterFromQuery(query, parser); + assertTrue(scriptContainsString(scriptFilter, "doc['creationDate'].value.year")); + assertTrue(scriptHasPattern(scriptFilter, "year_\\d+ > 2012")); + } + + @Test + public void weekOfYearInSelect() { + String query = "SELECT WEEK_OF_YEAR(creationDate) " + "FROM dates"; + ScriptField scriptField = getScriptFieldFromQuery(query); + assertTrue( + scriptContainsString( + scriptField, "doc['creationDate'].value.get(WeekFields.ISO.weekOfWeekBasedYear())")); + } + + @Test + public void weekOfYearInWhere() { + String query = "SELECT * " + "FROM dates " + "WHERE WEEK_OF_YEAR(creationDate) > 15"; + ScriptFilter scriptFilter = getScriptFilterFromQuery(query, parser); + assertTrue( + scriptContainsString( + scriptFilter, "doc['creationDate'].value.get(WeekFields.ISO.weekOfWeekBasedYear())")); + assertTrue(scriptHasPattern(scriptFilter, "weekOfWeekyear_\\d+ > 15")); + } + + @Test + public void dayOfMonth() { + String query = "SELECT DAY_OF_MONTH(creationDate) " + "FROM dates"; + ScriptField scriptField = getScriptFieldFromQuery(query); + assertTrue(scriptContainsString(scriptField, "doc['creationDate'].value.dayOfMonth")); + } + + @Test + public void hourOfDay() { + String query = "SELECT HOUR_OF_DAY(creationDate) " + "FROM dates"; + ScriptField scriptField = getScriptFieldFromQuery(query); + assertTrue(scriptContainsString(scriptField, "doc['creationDate'].value.hour")); + } + + @Test + public void secondOfMinute() { + String query = "SELECT SECOND_OF_MINUTE(creationDate) " + "FROM dates"; + ScriptField scriptField = getScriptFieldFromQuery(query); + assertTrue(scriptContainsString(scriptField, "doc['creationDate'].value.second")); + } + + @Test + public void month() { + String query = "SELECT MONTH(creationDate) FROM dates"; + ScriptField scriptField = getScriptFieldFromQuery(query); + assertTrue(scriptContainsString(scriptField, "doc['creationDate'].value.monthValue")); + } + + @Test + public void dayofmonth() { + String query = "SELECT DAY_OF_MONTH(creationDate) FROM dates"; + ScriptField scriptField = getScriptFieldFromQuery(query); + assertTrue(scriptContainsString(scriptField, "doc['creationDate'].value.dayOfMonth")); + } + + @Test + public void date() { + String query = "SELECT DATE(creationDate) FROM dates"; + ScriptField scriptField = getScriptFieldFromQuery(query); + assertTrue( + scriptContainsString( + scriptField, + "LocalDate.parse(doc['creationDate'].value.toString(),DateTimeFormatter.ISO_DATE_TIME)")); + } + + @Test + public void monthname() { + String query = "SELECT MONTHNAME(creationDate) FROM dates"; + ScriptField scriptField = getScriptFieldFromQuery(query); + assertTrue(scriptContainsString(scriptField, "doc['creationDate'].value.month")); + } + + @Test + public void timestamp() { + String query = "SELECT TIMESTAMP(creationDate) FROM dates"; + ScriptField scriptField = getScriptFieldFromQuery(query); + assertTrue( + scriptContainsString(scriptField, "DateTimeFormatter.ofPattern('yyyy-MM-dd HH:mm:ss')")); + } + + @Test + public void maketime() { + String query = "SELECT MAKETIME(1, 1, 1) FROM dates"; + ScriptField scriptField = getScriptFieldFromQuery(query); + assertTrue( + scriptContainsString( + scriptField, "LocalTime.of(1, 1, 1).format(DateTimeFormatter.ofPattern('HH:mm:ss'))")); + } + + @Test + public void now() { + String query = "SELECT NOW() FROM dates"; + ScriptField scriptField = getScriptFieldFromQuery(query); + assertTrue(scriptContainsString(scriptField, "System.currentTimeMillis()")); + } + + @Test + public void curdate() { + String query = "SELECT CURDATE() FROM dates"; + ScriptField scriptField = getScriptFieldFromQuery(query); + assertTrue(scriptContainsString(scriptField, "System.currentTimeMillis()")); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/ErrorMessageFactoryTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/ErrorMessageFactoryTest.java index c4c9504486..09cd9e9efc 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/ErrorMessageFactoryTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/ErrorMessageFactoryTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import org.junit.Assert; @@ -16,35 +15,39 @@ public class ErrorMessageFactoryTest { - private Throwable nonOpenSearchThrowable = new Throwable(); - private Throwable openSearchThrowable = new OpenSearchException(nonOpenSearchThrowable); - - @Test - public void openSearchExceptionShouldCreateEsErrorMessage() { - Exception exception = new OpenSearchException(nonOpenSearchThrowable); - ErrorMessage msg = ErrorMessageFactory.createErrorMessage(exception, RestStatus.BAD_REQUEST.getStatus()); - Assert.assertTrue(msg instanceof OpenSearchErrorMessage); - } - - @Test - public void nonOpenSearchExceptionShouldCreateGenericErrorMessage() { - Exception exception = new Exception(nonOpenSearchThrowable); - ErrorMessage msg = ErrorMessageFactory.createErrorMessage(exception, RestStatus.BAD_REQUEST.getStatus()); - Assert.assertFalse(msg instanceof OpenSearchErrorMessage); - } - - @Test - public void nonOpenSearchExceptionWithWrappedEsExceptionCauseShouldCreateEsErrorMessage() { - Exception exception = (Exception) openSearchThrowable; - ErrorMessage msg = ErrorMessageFactory.createErrorMessage(exception, RestStatus.BAD_REQUEST.getStatus()); - Assert.assertTrue(msg instanceof OpenSearchErrorMessage); - } - - @Test - public void nonOpenSearchExceptionWithMultiLayerWrappedEsExceptionCauseShouldCreateEsErrorMessage() { - Exception exception = new Exception(new Throwable(new Throwable(openSearchThrowable))); - ErrorMessage msg = ErrorMessageFactory.createErrorMessage(exception, RestStatus.BAD_REQUEST.getStatus()); - Assert.assertTrue(msg instanceof OpenSearchErrorMessage); - } - + private Throwable nonOpenSearchThrowable = new Throwable(); + private Throwable openSearchThrowable = new OpenSearchException(nonOpenSearchThrowable); + + @Test + public void openSearchExceptionShouldCreateEsErrorMessage() { + Exception exception = new OpenSearchException(nonOpenSearchThrowable); + ErrorMessage msg = + ErrorMessageFactory.createErrorMessage(exception, RestStatus.BAD_REQUEST.getStatus()); + Assert.assertTrue(msg instanceof OpenSearchErrorMessage); + } + + @Test + public void nonOpenSearchExceptionShouldCreateGenericErrorMessage() { + Exception exception = new Exception(nonOpenSearchThrowable); + ErrorMessage msg = + ErrorMessageFactory.createErrorMessage(exception, RestStatus.BAD_REQUEST.getStatus()); + Assert.assertFalse(msg instanceof OpenSearchErrorMessage); + } + + @Test + public void nonOpenSearchExceptionWithWrappedEsExceptionCauseShouldCreateEsErrorMessage() { + Exception exception = (Exception) openSearchThrowable; + ErrorMessage msg = + ErrorMessageFactory.createErrorMessage(exception, RestStatus.BAD_REQUEST.getStatus()); + Assert.assertTrue(msg instanceof OpenSearchErrorMessage); + } + + @Test + public void + nonOpenSearchExceptionWithMultiLayerWrappedEsExceptionCauseShouldCreateEsErrorMessage() { + Exception exception = new Exception(new Throwable(new Throwable(openSearchThrowable))); + ErrorMessage msg = + ErrorMessageFactory.createErrorMessage(exception, RestStatus.BAD_REQUEST.getStatus()); + Assert.assertTrue(msg instanceof OpenSearchErrorMessage); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/FormatTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/FormatTest.java index 5a13125013..deb7b5f600 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/FormatTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/FormatTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import static org.junit.Assert.assertEquals; @@ -16,15 +15,15 @@ public class FormatTest { - @Test - public void ofJdbcShouldReturnJDBCFormat() { - Optional format = Format.of(Format.JDBC.getFormatName()); - assertTrue(format.isPresent()); - assertEquals(Format.JDBC, format.get()); - } + @Test + public void ofJdbcShouldReturnJDBCFormat() { + Optional format = Format.of(Format.JDBC.getFormatName()); + assertTrue(format.isPresent()); + assertEquals(Format.JDBC, format.get()); + } - @Test - public void ofUnknownFormatShouldReturnEmpty() { - assertFalse(Format.of("xml").isPresent()); - } + @Test + public void ofUnknownFormatShouldReturnEmpty() { + assertFalse(Format.of("xml").isPresent()); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/HavingTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/HavingTest.java index fee440c3e9..8863af0463 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/HavingTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/HavingTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import static java.util.stream.Collectors.toMap; @@ -35,360 +34,277 @@ import org.opensearch.sql.legacy.query.maker.AggMaker; import org.opensearch.sql.legacy.util.SqlParserUtils; - public class HavingTest { - private static final String SELECT_CNT = "SELECT COUNT(*) as c "; - private static final String SELECT_CNT_AVG = "SELECT COUNT(*) as c, AVG(age) as a "; - private static final String SELECT_CNT_AVG_SUM = "SELECT COUNT(*) as c, AVG(age) as a, SUM(income) as i "; - private static final String FROM_BANK = "FROM bank "; - private static final String GROUP_BY_AGE = "GROUP BY age "; - private static final String SELECT_CNT_FROM_BANK_GROUP_BY_AGE = SELECT_CNT + FROM_BANK + GROUP_BY_AGE; - private static final String SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE = SELECT_CNT_AVG + FROM_BANK + GROUP_BY_AGE; - private static final String SELECT_CNT_AVG_SUM_FROM_BANK_GROUP_BY_AGE = SELECT_CNT_AVG_SUM + FROM_BANK + GROUP_BY_AGE; - private static final String NESTED_SELECT_COUNT = "SELECT COUNT(nested(income, 'income')) as c "; - private static final String NESTED_SELECT_CNT_FROM_BANK_GROUP_BY_AGE = NESTED_SELECT_COUNT + FROM_BANK + GROUP_BY_AGE; - - @Test - public void singleCondition() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a > 30"), - contains( - bucketSelector( - hasBucketPath("c: c", "a: a"), - hasScript("params.a > 30") - ) - )); - } - - @Ignore - @Test - public void singleConditionWithTwoAggExpr() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a > c"), - contains( - bucketSelector( - hasBucketPath("c: c", "a: a"), - hasScript("params.a > params.c") - ) - )); - } - - @Test - public void singleConditionWithHavingAgg() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(age) > 30"), - contains( - bucketSelector( - hasBucketPath("c: c", "a: a", "avg_0: avg_0"), - hasScript("params.avg_0 > 30") - ) - )); - } - - @Ignore - @Test - public void singleConditionWithHavingTwoAggExpr() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(age) > COUNT(*)"), - contains( - bucketSelector( - hasBucketPath("c: c", "a: a", "avg_0: avg_0", "count_0: count_0"), - hasScript("params.avg_0 > count_0") - ) - )); - } - - @Test - public void nestedSingleCondition() { - assertThat( - query(NESTED_SELECT_CNT_FROM_BANK_GROUP_BY_AGE + "HAVING c > 30"), - contains( - bucketSelector( - hasBucketPath("c: income@NESTED.c"), - hasScript("params.c > 30") - ) - )); - } - - @Test - public void singleConditionWithOneFieldInSelect() { - assertThat( - query(SELECT_CNT_FROM_BANK_GROUP_BY_AGE + "HAVING a > 30"), - contains( - bucketSelector( - hasBucketPath("c: c") - ) - )); - } - - @Test - public void singleConditionWithOneFieldInSelectWithHavingAgg() { - assertThat( - query(SELECT_CNT_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) > 30"), - contains( - bucketSelector( - hasBucketPath("c: c", "avg_0: avg_0"), - hasScript("params.avg_0 > 30") - ) - )); - } - - @Test - public void singleConditionWithThreeFieldsInSelect() { - assertThat( - query(SELECT_CNT_AVG_SUM_FROM_BANK_GROUP_BY_AGE + "HAVING a > 30"), - contains( - bucketSelector( - hasBucketPath("c: c", "a: a", "i: i") - ) - )); - } - - @Test - public void singleConditionWithThreeFieldsInSelectWithHavingAgg() { - assertThat( - query(SELECT_CNT_AVG_SUM_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) > 30"), - contains( - bucketSelector( - hasBucketPath("c: c", "a: a", "i: i", "avg_0: avg_0"), - hasScript("params.avg_0 > 30") - ) - )); - } - - @Test - public void notEqualCondition() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a <> 30"), - contains( - bucketSelector( - hasScript("params.a != 30") - ) - )); - } - - @Test - public void notEqualConditionWithHavingAgg() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) <> 30"), - contains( - bucketSelector( - hasScript("params.avg_0 != 30") - ) - )); - } - - @Test - public void notCondition() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING NOT (a > 30)"), - contains( - bucketSelector( - hasScript("params.a <= 30") - ) - )); - } - - @Test - public void notConditionWithHavingAgg() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING NOT (AVG(a) > 30)"), - contains( - bucketSelector( - hasScript("params.avg_0 <= 30") - ) - )); - } - - @Test - public void andConditions() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a > 30 AND c <= 10"), - contains( - bucketSelector( - hasScript("params.a > 30 && params.c <= 10") - ) - )); - } - - @Test - public void andConditionsWithHavingAgg() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) > 30 AND SUM(c) <= 10"), - contains( - bucketSelector( - hasScript("params.avg_0 > 30 && params.sum_1 <= 10") - ) - )); - } - - @Test - public void orConditions() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a > 30 OR c <= 10"), - contains( - bucketSelector( - hasScript("params.a > 30 || params.c <= 10") - ) - )); - } - - @Test - public void orConditionsWithHavingAgg() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) > 30 OR SUM(c) <= 10"), - contains( - bucketSelector( - hasScript("params.avg_0 > 30 || params.sum_1 <= 10") - ) - )); - } - - @Test - public void betweenCondition() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a BETWEEN 30 AND 50"), - contains( - bucketSelector( - hasScript("params.a >= 30 && params.a <= 50") - ) - )); - } - - @Test - public void betweenConditionWithHavingAgg() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) BETWEEN 30 AND 50"), - contains( - bucketSelector( - hasScript("params.avg_0 >= 30 && params.avg_0 <= 50") - ) - )); - } - - @Test - public void notBetweenCondition() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a NOT BETWEEN 30 AND 50"), - contains( - bucketSelector( - hasScript("params.a < 30 || params.a > 50") - ) - )); - } - - @Test - public void notBetweenConditionWithHavingAgg() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) NOT BETWEEN 30 AND 50"), - contains( - bucketSelector( - hasScript("params.avg_0 < 30 || params.avg_0 > 50") - ) - )); - } - - @Test - public void inCondition() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a IN (30, 40, 50)"), - contains( - bucketSelector( - hasScript("params.a == 30 || params.a == 40 || params.a == 50") - ) - )); - } - - @Test - public void inConditionWithHavingAgg() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) IN (30, 40, 50)"), - contains( - bucketSelector( - hasScript("params.avg_0 == 30 || params.avg_0 == 40 || params.avg_0 == 50") - ) - )); - } - - @Test - public void notInCondition() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a NOT IN (30, 40, 50)"), - contains( - bucketSelector( - hasScript("params.a != 30 && params.a != 40 && params.a != 50") - ) - )); - } - - @Test - public void notInConditionWithHavingAgg() { - assertThat( - query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) NOT IN (30, 40, 50)"), - contains( - bucketSelector( - hasScript("params.avg_0 != 30 && params.avg_0 != 40 && params.avg_0 != 50") - ) - )); - } - - @Test - public void nestedConditions() { - assertThat( - query(SELECT_CNT_AVG_SUM_FROM_BANK_GROUP_BY_AGE + "HAVING i <= 10000 OR NOT (a < 10 OR a > 30) AND c <= 10"), - contains( - bucketSelector( - hasScript("params.i <= 10000 || ((params.a >= 10 && params.a <= 30) && params.c <= 10)") - ) - )); - } - - @Test(expected = ParserException.class) - public void aggregationFunctionOnTheRight() { - query(SELECT_CNT_AVG_SUM_FROM_BANK_GROUP_BY_AGE + "HAVING 10 < a"); - } - - private Collection query(String sql) { - return translate(SqlParserUtils.parse(sql)); - } - - private Collection translate(SQLQueryExpr expr) { - try { - Select select = new SqlParser().parseSelect(expr); - select.getFields().forEach(field -> { + private static final String SELECT_CNT = "SELECT COUNT(*) as c "; + private static final String SELECT_CNT_AVG = "SELECT COUNT(*) as c, AVG(age) as a "; + private static final String SELECT_CNT_AVG_SUM = + "SELECT COUNT(*) as c, AVG(age) as a, SUM(income) as i "; + private static final String FROM_BANK = "FROM bank "; + private static final String GROUP_BY_AGE = "GROUP BY age "; + private static final String SELECT_CNT_FROM_BANK_GROUP_BY_AGE = + SELECT_CNT + FROM_BANK + GROUP_BY_AGE; + private static final String SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE = + SELECT_CNT_AVG + FROM_BANK + GROUP_BY_AGE; + private static final String SELECT_CNT_AVG_SUM_FROM_BANK_GROUP_BY_AGE = + SELECT_CNT_AVG_SUM + FROM_BANK + GROUP_BY_AGE; + private static final String NESTED_SELECT_COUNT = "SELECT COUNT(nested(income, 'income')) as c "; + private static final String NESTED_SELECT_CNT_FROM_BANK_GROUP_BY_AGE = + NESTED_SELECT_COUNT + FROM_BANK + GROUP_BY_AGE; + + @Test + public void singleCondition() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a > 30"), + contains(bucketSelector(hasBucketPath("c: c", "a: a"), hasScript("params.a > 30")))); + } + + @Ignore + @Test + public void singleConditionWithTwoAggExpr() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a > c"), + contains(bucketSelector(hasBucketPath("c: c", "a: a"), hasScript("params.a > params.c")))); + } + + @Test + public void singleConditionWithHavingAgg() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(age) > 30"), + contains( + bucketSelector( + hasBucketPath("c: c", "a: a", "avg_0: avg_0"), hasScript("params.avg_0 > 30")))); + } + + @Ignore + @Test + public void singleConditionWithHavingTwoAggExpr() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(age) > COUNT(*)"), + contains( + bucketSelector( + hasBucketPath("c: c", "a: a", "avg_0: avg_0", "count_0: count_0"), + hasScript("params.avg_0 > count_0")))); + } + + @Test + public void nestedSingleCondition() { + assertThat( + query(NESTED_SELECT_CNT_FROM_BANK_GROUP_BY_AGE + "HAVING c > 30"), + contains(bucketSelector(hasBucketPath("c: income@NESTED.c"), hasScript("params.c > 30")))); + } + + @Test + public void singleConditionWithOneFieldInSelect() { + assertThat( + query(SELECT_CNT_FROM_BANK_GROUP_BY_AGE + "HAVING a > 30"), + contains(bucketSelector(hasBucketPath("c: c")))); + } + + @Test + public void singleConditionWithOneFieldInSelectWithHavingAgg() { + assertThat( + query(SELECT_CNT_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) > 30"), + contains( + bucketSelector(hasBucketPath("c: c", "avg_0: avg_0"), hasScript("params.avg_0 > 30")))); + } + + @Test + public void singleConditionWithThreeFieldsInSelect() { + assertThat( + query(SELECT_CNT_AVG_SUM_FROM_BANK_GROUP_BY_AGE + "HAVING a > 30"), + contains(bucketSelector(hasBucketPath("c: c", "a: a", "i: i")))); + } + + @Test + public void singleConditionWithThreeFieldsInSelectWithHavingAgg() { + assertThat( + query(SELECT_CNT_AVG_SUM_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) > 30"), + contains( + bucketSelector( + hasBucketPath("c: c", "a: a", "i: i", "avg_0: avg_0"), + hasScript("params.avg_0 > 30")))); + } + + @Test + public void notEqualCondition() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a <> 30"), + contains(bucketSelector(hasScript("params.a != 30")))); + } + + @Test + public void notEqualConditionWithHavingAgg() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) <> 30"), + contains(bucketSelector(hasScript("params.avg_0 != 30")))); + } + + @Test + public void notCondition() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING NOT (a > 30)"), + contains(bucketSelector(hasScript("params.a <= 30")))); + } + + @Test + public void notConditionWithHavingAgg() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING NOT (AVG(a) > 30)"), + contains(bucketSelector(hasScript("params.avg_0 <= 30")))); + } + + @Test + public void andConditions() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a > 30 AND c <= 10"), + contains(bucketSelector(hasScript("params.a > 30 && params.c <= 10")))); + } + + @Test + public void andConditionsWithHavingAgg() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) > 30 AND SUM(c) <= 10"), + contains(bucketSelector(hasScript("params.avg_0 > 30 && params.sum_1 <= 10")))); + } + + @Test + public void orConditions() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a > 30 OR c <= 10"), + contains(bucketSelector(hasScript("params.a > 30 || params.c <= 10")))); + } + + @Test + public void orConditionsWithHavingAgg() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) > 30 OR SUM(c) <= 10"), + contains(bucketSelector(hasScript("params.avg_0 > 30 || params.sum_1 <= 10")))); + } + + @Test + public void betweenCondition() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a BETWEEN 30 AND 50"), + contains(bucketSelector(hasScript("params.a >= 30 && params.a <= 50")))); + } + + @Test + public void betweenConditionWithHavingAgg() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) BETWEEN 30 AND 50"), + contains(bucketSelector(hasScript("params.avg_0 >= 30 && params.avg_0 <= 50")))); + } + + @Test + public void notBetweenCondition() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a NOT BETWEEN 30 AND 50"), + contains(bucketSelector(hasScript("params.a < 30 || params.a > 50")))); + } + + @Test + public void notBetweenConditionWithHavingAgg() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) NOT BETWEEN 30 AND 50"), + contains(bucketSelector(hasScript("params.avg_0 < 30 || params.avg_0 > 50")))); + } + + @Test + public void inCondition() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a IN (30, 40, 50)"), + contains(bucketSelector(hasScript("params.a == 30 || params.a == 40 || params.a == 50")))); + } + + @Test + public void inConditionWithHavingAgg() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) IN (30, 40, 50)"), + contains( + bucketSelector( + hasScript("params.avg_0 == 30 || params.avg_0 == 40 || params.avg_0 == 50")))); + } + + @Test + public void notInCondition() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING a NOT IN (30, 40, 50)"), + contains(bucketSelector(hasScript("params.a != 30 && params.a != 40 && params.a != 50")))); + } + + @Test + public void notInConditionWithHavingAgg() { + assertThat( + query(SELECT_CNT_AVG_FROM_BANK_GROUP_BY_AGE + "HAVING AVG(a) NOT IN (30, 40, 50)"), + contains( + bucketSelector( + hasScript("params.avg_0 != 30 && params.avg_0 != 40 && params.avg_0 != 50")))); + } + + @Test + public void nestedConditions() { + assertThat( + query( + SELECT_CNT_AVG_SUM_FROM_BANK_GROUP_BY_AGE + + "HAVING i <= 10000 OR NOT (a < 10 OR a > 30) AND c <= 10"), + contains( + bucketSelector( + hasScript( + "params.i <= 10000 || ((params.a >= 10 && params.a <= 30) && params.c <=" + + " 10)")))); + } + + @Test(expected = ParserException.class) + public void aggregationFunctionOnTheRight() { + query(SELECT_CNT_AVG_SUM_FROM_BANK_GROUP_BY_AGE + "HAVING 10 < a"); + } + + private Collection query(String sql) { + return translate(SqlParserUtils.parse(sql)); + } + + private Collection translate(SQLQueryExpr expr) { + try { + Select select = new SqlParser().parseSelect(expr); + select + .getFields() + .forEach( + field -> { try { - new AggMaker() - .withWhere(select.getWhere()) - .makeFieldAgg((MethodField) field, AggregationBuilders.terms("")); + new AggMaker() + .withWhere(select.getWhere()) + .makeFieldAgg((MethodField) field, AggregationBuilders.terms("")); } catch (SqlParseException e) { - throw new RuntimeException(e); + throw new RuntimeException(e); } - }); - AggregationBuilder agg = AggregationBuilders.terms(""); - select.getHaving().explain(agg, select.getFields()); - return agg.getPipelineAggregations(); - } catch (SqlParseException e) { - throw new ParserException("Illegal sql expr: " + expr.toString()); - } - } - - @SafeVarargs - private final Matcher bucketSelector(Matcher... matchers) { - return both(Matchers. // instanceOf() has type inference problem - instanceOf(BucketSelectorPipelineAggregationBuilder.class) - ). - and(allOf(matchers)); - } - - private Matcher hasBucketPath(String... expectedBucketPath) { - Map expectedMap = Arrays.stream(expectedBucketPath). - map(e -> e.split(":")). - collect(toMap(e -> e[0].trim(), e -> e[1].trim())); - return hasFieldWithValue("bucketsPathsMap", "has bucket path", is(expectedMap)); - } - - private Matcher hasScript(String expectedCode) { - return hasFieldWithValue("script", "has script", is(new Script(expectedCode))); - } + }); + AggregationBuilder agg = AggregationBuilders.terms(""); + select.getHaving().explain(agg, select.getFields()); + return agg.getPipelineAggregations(); + } catch (SqlParseException e) { + throw new ParserException("Illegal sql expr: " + expr.toString()); + } + } + + @SafeVarargs + private final Matcher bucketSelector( + Matcher... matchers) { + return both(Matchers + . // instanceOf() has type inference problem + instanceOf(BucketSelectorPipelineAggregationBuilder.class)) + .and(allOf(matchers)); + } + + private Matcher hasBucketPath(String... expectedBucketPath) { + Map expectedMap = + Arrays.stream(expectedBucketPath) + .map(e -> e.split(":")) + .collect(toMap(e -> e[0].trim(), e -> e[1].trim())); + return hasFieldWithValue("bucketsPathsMap", "has bucket path", is(expectedMap)); + } + + private Matcher hasScript(String expectedCode) { + return hasFieldWithValue("script", "has script", is(new Script(expectedCode))); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/JSONRequestTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/JSONRequestTest.java index f546f3571a..5f17951af5 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/JSONRequestTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/JSONRequestTest.java @@ -3,13 +3,11 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; -import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyInt; import static org.mockito.Mockito.mock; @@ -24,9 +22,7 @@ import org.json.JSONObject; import org.junit.Before; import org.junit.Ignore; -import org.junit.Rule; import org.junit.Test; -import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.MockedStatic; @@ -52,360 +48,461 @@ @RunWith(MockitoJUnitRunner.Silent.class) public class JSONRequestTest { - @Mock - private ColumnTypeProvider columnTypeProvider; - - @Before - public void setup() { - when(columnTypeProvider.get(anyInt())).thenReturn(Schema.Type.DOUBLE); - } - - @Test - public void aggWithoutWhere() { - String explainSQL = explainSQL("SELECT name, COUNT(nested(projects, 'projects')) AS c " + - "FROM employee " + - "GROUP BY name " + - "HAVING c > 1"); - assertThat(explainSQL, containsString( - "\"projects@NESTED\":{\"nested\":{\"path\":\"projects\"},\"aggregations\":{\"c\":{\"value_count\":{\"field\":\"_index\"}}}")); - assertThat(explainSQL, containsString( - "\"buckets_path\":{\"c\":\"projects@NESTED.c\"}")); - } - - @Test - public void aggWithWhereOnParent() { - String explainSQL = explainSQL("SELECT name, COUNT(nested(projects, 'projects')) AS c " + - "FROM employee " + - "WHERE name LIKE '%smith%' " + - "GROUP BY name " + - "HAVING c > 1"); - - assertThat(explainSQL, containsString( - "\"projects@NESTED\":{\"nested\":{\"path\":\"projects\"},\"aggregations\":{\"c\":{\"value_count\":{\"field\":\"_index\"}}}}")); - assertThat(explainSQL, containsString( - "\"buckets_path\":{\"c\":\"projects@NESTED.c\"}")); - } - - @Test - public void aggWithWhereOnNested() { - String explainSQL = explainSQL("SELECT name, COUNT(nested(projects, 'projects')) AS c " + - "FROM employee " + - "WHERE nested(projects.name, 'projects') LIKE '%security%' " + - "GROUP BY name " + - "HAVING c > 1"); - - assertThat(explainSQL, containsString("\"aggregations\":{\"projects@NESTED\":{\"nested\":{\"path\":\"projects\"},\"aggregations\":{\"projects@FILTER\":{\"filter\":{\"bool\":{\"must\":[{\"wildcard\":{\"projects.name\":{\"wildcard\":\"*security*\",\"boost\":1.0}}}],\"adjust_pure_negative\":true,\"boost\":1.0}},\"aggregations\":{\"c\":{\"value_count\":{\"field\":\"_index\"}}}}}}")); - assertThat(explainSQL, containsString("\"buckets_path\":{\"c\":\"projects@NESTED>projects@FILTER.c\"}")); - } - - @Test - public void aggWithWhereOnParentOrNested() { - String explainSQL = explainSQL("SELECT name, COUNT(nested(projects, 'projects')) AS c " + - "FROM employee " + - "WHERE name LIKE '%smith%' OR nested(projects.name, 'projects') LIKE '%security%' " + - "GROUP BY name " + - "HAVING c > 1"); - assertThat(explainSQL, containsString( - "\"projects@NESTED\":{\"nested\":{\"path\":\"projects\"},\"aggregations\":{\"c\":{\"value_count\":{\"field\":\"_index\"}}}}")); - assertThat(explainSQL, containsString( - "\"buckets_path\":{\"c\":\"projects@NESTED.c\"}")); - } - - @Test - public void aggWithWhereOnParentAndNested() { - String explainSQL = explainSQL("SELECT name, COUNT(nested(projects, 'projects')) AS c " + - "FROM employee " + - "WHERE name LIKE '%smith%' AND nested(projects.name, 'projects') LIKE '%security%' " + - "GROUP BY name " + - "HAVING c > 1"); - assertThat(explainSQL, containsString( - "\"aggregations\":{\"projects@NESTED\":{\"nested\":{\"path\":\"projects\"},\"aggregations\":{\"projects@FILTER\":{\"filter\":{\"bool\":{\"must\":[{\"wildcard\":{\"projects.name\":{\"wildcard\":\"*security*\",\"boost\":1.0}}}],\"adjust_pure_negative\":true,\"boost\":1.0}},\"aggregations\":{\"c\":{\"value_count\":{\"field\":\"_index\"}}}}}")); - assertThat(explainSQL, containsString("\"buckets_path\":{\"c\":\"projects@NESTED>projects@FILTER.c\"}")); - } - - @Test - public void aggWithWhereOnNestedAndNested() { - String explainSQL = explainSQL("SELECT name, COUNT(nested(projects, 'projects')) AS c " + - "FROM employee " + - "WHERE nested('projects', projects.started_year > 2000 AND projects.name LIKE '%security%') " + - "GROUP BY name " + - "HAVING c > 1"); - assertThat(explainSQL, containsString("\"aggregations\":{\"projects@NESTED\":{\"nested\":{\"path\":\"projects\"},\"aggregations\":{\"projects@FILTER\":{\"filter\":{\"bool\":{\"must\":[{\"bool\":{\"must\":[{\"range\":{\"projects.started_year\":{\"from\":2000,\"to\":null,\"include_lower\":false,\"include_upper\":true,\"boost\":1.0}}},{\"wildcard\":{\"projects.name\":{\"wildcard\":\"*security*\",\"boost\":1.0}}}")); - assertThat(explainSQL, containsString("\"buckets_path\":{\"c\":\"projects@NESTED>projects@FILTER.c\"}")); - } - - @Test - public void aggWithWhereOnNestedOrNested() { - String explainSQL = explainSQL("SELECT name, COUNT(nested(projects, 'projects')) AS c " + - "FROM employee " + - "WHERE nested('projects', projects.started_year > 2000 OR projects.name LIKE '%security%') " + - "GROUP BY name " + - "HAVING c > 1"); - assertThat(explainSQL, containsString("\"aggregations\":{\"projects@NESTED\":{\"nested\":{\"path\":\"projects\"},\"aggregations\":{\"projects@FILTER\":{\"filter\":{\"bool\":{\"must\":[{\"bool\":{\"should\":[{\"range\":{\"projects.started_year\":{\"from\":2000,\"to\":null,\"include_lower\":false,\"include_upper\":true,\"boost\":1.0}}},{\"wildcard\":{\"projects.name\":{\"wildcard\":\"*security*\",\"boost\":1.0}}}")); - assertThat(explainSQL, containsString("\"buckets_path\":{\"c\":\"projects@NESTED>projects@FILTER.c\"}")); - } - - @Test - public void aggInHavingWithoutWhere() { - JSONObject explainSQL = explainSQLToJson("SELECT name " + - "FROM employee " + - "GROUP BY name " + - "HAVING COUNT(nested(projects, 'projects')) > 1"); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/projects@NESTED/aggregations/count_0/value_count"), - equalTo("{\"field\":\"_index\"}")); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/bucket_filter/bucket_selector/buckets_path"), - equalTo("{\"count_0\":\"projects@NESTED.count_0\"}")); - } - - @Test - public void aggInHavingWithWhereOnParent() { - JSONObject explainSQL = explainSQLToJson("SELECT name " + - "FROM employee " + - "WHERE name LIKE '%smith%' " + - "GROUP BY name " + - "HAVING COUNT(nested(projects, 'projects')) > 1"); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/projects@NESTED/aggregations/count_0/value_count"), - equalTo("{\"field\":\"_index\"}")); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/bucket_filter/bucket_selector/buckets_path"), - equalTo("{\"count_0\":\"projects@NESTED.count_0\"}")); + @Mock private ColumnTypeProvider columnTypeProvider; + + @Before + public void setup() { + when(columnTypeProvider.get(anyInt())).thenReturn(Schema.Type.DOUBLE); + } + + @Test + public void aggWithoutWhere() { + String explainSQL = + explainSQL( + "SELECT name, COUNT(nested(projects, 'projects')) AS c " + + "FROM employee " + + "GROUP BY name " + + "HAVING c > 1"); + assertThat( + explainSQL, + containsString( + "\"projects@NESTED\":{\"nested\":{\"path\":\"projects\"},\"aggregations\":{\"c\":{\"value_count\":{\"field\":\"_index\"}}}")); + assertThat(explainSQL, containsString("\"buckets_path\":{\"c\":\"projects@NESTED.c\"}")); + } + + @Test + public void aggWithWhereOnParent() { + String explainSQL = + explainSQL( + "SELECT name, COUNT(nested(projects, 'projects')) AS c " + + "FROM employee " + + "WHERE name LIKE '%smith%' " + + "GROUP BY name " + + "HAVING c > 1"); + + assertThat( + explainSQL, + containsString( + "\"projects@NESTED\":{\"nested\":{\"path\":\"projects\"},\"aggregations\":{\"c\":{\"value_count\":{\"field\":\"_index\"}}}}")); + assertThat(explainSQL, containsString("\"buckets_path\":{\"c\":\"projects@NESTED.c\"}")); + } + + @Test + public void aggWithWhereOnNested() { + String explainSQL = + explainSQL( + "SELECT name, COUNT(nested(projects, 'projects')) AS c " + + "FROM employee " + + "WHERE nested(projects.name, 'projects') LIKE '%security%' " + + "GROUP BY name " + + "HAVING c > 1"); + + assertThat( + explainSQL, + containsString( + "\"aggregations\":{\"projects@NESTED\":{\"nested\":{\"path\":\"projects\"},\"aggregations\":{\"projects@FILTER\":{\"filter\":{\"bool\":{\"must\":[{\"wildcard\":{\"projects.name\":{\"wildcard\":\"*security*\",\"boost\":1.0}}}],\"adjust_pure_negative\":true,\"boost\":1.0}},\"aggregations\":{\"c\":{\"value_count\":{\"field\":\"_index\"}}}}}}")); + assertThat( + explainSQL, + containsString("\"buckets_path\":{\"c\":\"projects@NESTED>projects@FILTER.c\"}")); + } + + @Test + public void aggWithWhereOnParentOrNested() { + String explainSQL = + explainSQL( + "SELECT name, COUNT(nested(projects, 'projects')) AS c FROM employee WHERE name LIKE" + + " '%smith%' OR nested(projects.name, 'projects') LIKE '%security%' GROUP BY name" + + " HAVING c > 1"); + assertThat( + explainSQL, + containsString( + "\"projects@NESTED\":{\"nested\":{\"path\":\"projects\"},\"aggregations\":{\"c\":{\"value_count\":{\"field\":\"_index\"}}}}")); + assertThat(explainSQL, containsString("\"buckets_path\":{\"c\":\"projects@NESTED.c\"}")); + } + + @Test + public void aggWithWhereOnParentAndNested() { + String explainSQL = + explainSQL( + "SELECT name, COUNT(nested(projects, 'projects')) AS c FROM employee WHERE name LIKE" + + " '%smith%' AND nested(projects.name, 'projects') LIKE '%security%' GROUP BY name" + + " HAVING c > 1"); + assertThat( + explainSQL, + containsString( + "\"aggregations\":{\"projects@NESTED\":{\"nested\":{\"path\":\"projects\"},\"aggregations\":{\"projects@FILTER\":{\"filter\":{\"bool\":{\"must\":[{\"wildcard\":{\"projects.name\":{\"wildcard\":\"*security*\",\"boost\":1.0}}}],\"adjust_pure_negative\":true,\"boost\":1.0}},\"aggregations\":{\"c\":{\"value_count\":{\"field\":\"_index\"}}}}}")); + assertThat( + explainSQL, + containsString("\"buckets_path\":{\"c\":\"projects@NESTED>projects@FILTER.c\"}")); + } + + @Test + public void aggWithWhereOnNestedAndNested() { + String explainSQL = + explainSQL( + "SELECT name, COUNT(nested(projects, 'projects')) AS c FROM employee WHERE" + + " nested('projects', projects.started_year > 2000 AND projects.name LIKE" + + " '%security%') GROUP BY name HAVING c > 1"); + assertThat( + explainSQL, + containsString( + "\"aggregations\":{\"projects@NESTED\":{\"nested\":{\"path\":\"projects\"},\"aggregations\":{\"projects@FILTER\":{\"filter\":{\"bool\":{\"must\":[{\"bool\":{\"must\":[{\"range\":{\"projects.started_year\":{\"from\":2000,\"to\":null,\"include_lower\":false,\"include_upper\":true,\"boost\":1.0}}},{\"wildcard\":{\"projects.name\":{\"wildcard\":\"*security*\",\"boost\":1.0}}}")); + assertThat( + explainSQL, + containsString("\"buckets_path\":{\"c\":\"projects@NESTED>projects@FILTER.c\"}")); + } + + @Test + public void aggWithWhereOnNestedOrNested() { + String explainSQL = + explainSQL( + "SELECT name, COUNT(nested(projects, 'projects')) AS c FROM employee WHERE" + + " nested('projects', projects.started_year > 2000 OR projects.name LIKE" + + " '%security%') GROUP BY name HAVING c > 1"); + assertThat( + explainSQL, + containsString( + "\"aggregations\":{\"projects@NESTED\":{\"nested\":{\"path\":\"projects\"},\"aggregations\":{\"projects@FILTER\":{\"filter\":{\"bool\":{\"must\":[{\"bool\":{\"should\":[{\"range\":{\"projects.started_year\":{\"from\":2000,\"to\":null,\"include_lower\":false,\"include_upper\":true,\"boost\":1.0}}},{\"wildcard\":{\"projects.name\":{\"wildcard\":\"*security*\",\"boost\":1.0}}}")); + assertThat( + explainSQL, + containsString("\"buckets_path\":{\"c\":\"projects@NESTED>projects@FILTER.c\"}")); + } + + @Test + public void aggInHavingWithoutWhere() { + JSONObject explainSQL = + explainSQLToJson( + "SELECT name " + + "FROM employee " + + "GROUP BY name " + + "HAVING COUNT(nested(projects, 'projects')) > 1"); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/projects@NESTED/aggregations/count_0/value_count"), + equalTo("{\"field\":\"_index\"}")); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/bucket_filter/bucket_selector/buckets_path"), + equalTo("{\"count_0\":\"projects@NESTED.count_0\"}")); + } + + @Test + public void aggInHavingWithWhereOnParent() { + JSONObject explainSQL = + explainSQLToJson( + "SELECT name " + + "FROM employee " + + "WHERE name LIKE '%smith%' " + + "GROUP BY name " + + "HAVING COUNT(nested(projects, 'projects')) > 1"); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/projects@NESTED/aggregations/count_0/value_count"), + equalTo("{\"field\":\"_index\"}")); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/bucket_filter/bucket_selector/buckets_path"), + equalTo("{\"count_0\":\"projects@NESTED.count_0\"}")); + } + + @Test + public void aggInHavingWithWhereOnNested() { + JSONObject explainSQL = + explainSQLToJson( + "SELECT name " + + "FROM employee " + + "WHERE nested(projects.name, 'projects') LIKE '%security%' " + + "GROUP BY name " + + "HAVING COUNT(nested(projects, 'projects')) > 1"); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/aggregations/count_0/value_count"), + equalTo("{\"field\":\"_index\"}")); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/bucket_filter/bucket_selector/buckets_path"), + equalTo("{\"count_0\":\"projects@NESTED>projects@FILTER.count_0\"}")); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/filter/bool/must"), + equalTo("[{\"wildcard\":{\"projects.name\":{\"boost\":1,\"wildcard\":\"*security*\"}}}]")); + } + + @Test + public void aggInHavingWithWhereOnParentOrNested() { + JSONObject explainSQL = + explainSQLToJson( + "SELECT name FROM employee WHERE name LIKE '%smith%' OR nested(projects.name," + + " 'projects') LIKE '%security%' GROUP BY name HAVING COUNT(nested(projects," + + " 'projects')) > 1"); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/projects@NESTED/aggregations/count_0/value_count"), + equalTo("{\"field\":\"_index\"}")); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/bucket_filter/bucket_selector/buckets_path"), + equalTo("{\"count_0\":\"projects@NESTED.count_0\"}")); + } + + @Test + public void aggInHavingWithWhereOnParentAndNested() { + JSONObject explainSQL = + explainSQLToJson( + "SELECT name FROM employee WHERE name LIKE '%smith%' AND nested(projects.name," + + " 'projects') LIKE '%security%' GROUP BY name HAVING COUNT(nested(projects," + + " 'projects')) > 1"); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/aggregations/count_0/value_count"), + equalTo("{\"field\":\"_index\"}")); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/bucket_filter/bucket_selector/buckets_path"), + equalTo("{\"count_0\":\"projects@NESTED>projects@FILTER.count_0\"}")); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/filter/bool/must"), + equalTo("[{\"wildcard\":{\"projects.name\":{\"boost\":1,\"wildcard\":\"*security*\"}}}]")); + } + + @Test + public void aggInHavingWithWhereOnNestedAndNested() { + JSONObject explainSQL = + explainSQLToJson( + "SELECT name FROM employee WHERE nested('projects', projects.started_year > 2000 AND" + + " projects.name LIKE '%security%') GROUP BY name HAVING COUNT(nested(projects," + + " 'projects')) > 1"); + + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/aggregations/count_0/value_count"), + equalTo("{\"field\":\"_index\"}")); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/bucket_filter/bucket_selector/buckets_path"), + equalTo("{\"count_0\":\"projects@NESTED>projects@FILTER.count_0\"}")); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/filter/bool/must"), + equalTo( + "[{\"bool\":{\"adjust_pure_negative\":true,\"must\":[{\"range\":{\"projects.started_year\":{\"include_lower\":false,\"include_upper\":true,\"from\":2000,\"boost\":1,\"to\":null}}},{\"wildcard\":{\"projects.name\":{\"boost\":1,\"wildcard\":\"*security*\"}}}],\"boost\":1}}]")); + } + + @Test + public void aggInHavingWithWhereOnNestedOrNested() { + JSONObject explainSQL = + explainSQLToJson( + "SELECT name FROM employee WHERE nested('projects', projects.started_year > 2000 OR" + + " projects.name LIKE '%security%') GROUP BY name HAVING COUNT(nested(projects," + + " 'projects')) > 1"); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/aggregations/count_0/value_count"), + equalTo("{\"field\":\"_index\"}")); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/bucket_filter/bucket_selector/buckets_path"), + equalTo("{\"count_0\":\"projects@NESTED>projects@FILTER.count_0\"}")); + assertThat( + query( + explainSQL, + "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/filter/bool/must"), + equalTo( + "[{\"bool\":{\"adjust_pure_negative\":true,\"should\":[{\"range\":{\"projects.started_year\":{\"include_lower\":false,\"include_upper\":true,\"from\":2000,\"boost\":1,\"to\":null}}},{\"wildcard\":{\"projects.name\":{\"boost\":1,\"wildcard\":\"*security*\"}}}],\"boost\":1}}]")); + } + + @Test + public void searchSanity() throws IOException { + String result = + explain( + String.format( + "{\"query\":\"" + + "SELECT * " + + "FROM %s " + + "WHERE firstname LIKE 'A%%' AND age > 20 " + + "GROUP BY gender " + + "ORDER BY _score\"}", + TestsConstants.TEST_INDEX_ACCOUNT)); + String expectedOutput = + Files.toString( + new File( + getResourcePath() + "src/test/resources/expectedOutput/search_explain.json"), + StandardCharsets.UTF_8) + .replaceAll("\r", ""); + + assertThat(removeSpaces(result), equalTo(removeSpaces(expectedOutput))); + } + + // This test was ignored because group by case function is not supported + @Ignore + @Test + public void aggregationQuery() throws IOException { + String result = + explain( + String.format( + "{\"query\":\"SELECT address, CASE WHEN gender='0' THEN 'aaa' ELSE 'bbb' END AS" + + " a2345, count(age) FROM %s GROUP BY" + + " terms('field'='address','execution_hint'='global_ordinals'), a2345\"}", + TestsConstants.TEST_INDEX_ACCOUNT)); + String expectedOutput = + Files.toString( + new File( + getResourcePath() + + "src/test/resources/expectedOutput/aggregation_query_explain.json"), + StandardCharsets.UTF_8) + .replaceAll("\r", ""); + + assertThat(removeSpaces(result), equalTo(removeSpaces(expectedOutput))); + } + + @Test + public void deleteSanity() throws IOException { + try (MockedStatic localClusterStateMockedStatic = + Mockito.mockStatic(LocalClusterState.class)) { + LocalClusterState state = mock(LocalClusterState.class); + localClusterStateMockedStatic.when(LocalClusterState::state).thenReturn(state); + when(state.getSettingValue(any(Settings.Key.class))).thenReturn(true); + + String result = + explain( + String.format( + "{\"query\":\"" + + "DELETE " + + "FROM %s " + + "WHERE firstname LIKE 'A%%' AND age > 20\"}", + TestsConstants.TEST_INDEX_ACCOUNT)); + String expectedOutput = + Files.toString( + new File( + getResourcePath() + "src/test/resources/expectedOutput/delete_explain.json"), + StandardCharsets.UTF_8) + .replaceAll("\r", ""); + assertThat(removeSpaces(result), equalTo(removeSpaces(expectedOutput))); } - - @Test - public void aggInHavingWithWhereOnNested() { - JSONObject explainSQL = explainSQLToJson("SELECT name " + - "FROM employee " + - "WHERE nested(projects.name, 'projects') LIKE '%security%' " + - "GROUP BY name " + - "HAVING COUNT(nested(projects, 'projects')) > 1"); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/aggregations/count_0/value_count"), - equalTo("{\"field\":\"_index\"}")); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/bucket_filter/bucket_selector/buckets_path"), - equalTo("{\"count_0\":\"projects@NESTED>projects@FILTER.count_0\"}")); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/filter/bool/must"), - equalTo("[{\"wildcard\":{\"projects.name\":{\"boost\":1,\"wildcard\":\"*security*\"}}}]")); - } - - @Test - public void aggInHavingWithWhereOnParentOrNested() { - JSONObject explainSQL = explainSQLToJson("SELECT name " + - "FROM employee " + - "WHERE name LIKE '%smith%' OR nested(projects.name, 'projects') LIKE '%security%' " + - "GROUP BY name " + - "HAVING COUNT(nested(projects, 'projects')) > 1"); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/projects@NESTED/aggregations/count_0/value_count"), - equalTo("{\"field\":\"_index\"}")); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/bucket_filter/bucket_selector/buckets_path"), - equalTo("{\"count_0\":\"projects@NESTED.count_0\"}")); + } + + @Test(expected = SQLFeatureDisabledException.class) + public void deleteShouldThrowExceptionWhenDisabled() + throws SQLFeatureDisabledException, SQLFeatureNotSupportedException, SqlParseException { + try (MockedStatic localClusterStateMockedStatic = + Mockito.mockStatic(LocalClusterState.class)) { + LocalClusterState state = mock(LocalClusterState.class); + localClusterStateMockedStatic.when(LocalClusterState::state).thenReturn(state); + when(state.getSettingValue(any(Settings.Key.class))).thenReturn(false); + + JSONObject jsonRequest = + new JSONObject( + StringUtils.format( + "{\"query\":\"" + + "DELETE " + + "FROM %s " + + "WHERE firstname LIKE 'A%%' AND age > 20\"}", + TestsConstants.TEST_INDEX_ACCOUNT)); + translate(jsonRequest.getString("query"), jsonRequest); } - - @Test - public void aggInHavingWithWhereOnParentAndNested() { - JSONObject explainSQL = explainSQLToJson("SELECT name " + - "FROM employee " + - "WHERE name LIKE '%smith%' AND nested(projects.name, 'projects') LIKE '%security%' " + - "GROUP BY name " + - "HAVING COUNT(nested(projects, 'projects')) > 1"); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/aggregations/count_0/value_count"), - equalTo("{\"field\":\"_index\"}")); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/bucket_filter/bucket_selector/buckets_path"), - equalTo("{\"count_0\":\"projects@NESTED>projects@FILTER.count_0\"}")); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/filter/bool/must"), - equalTo("[{\"wildcard\":{\"projects.name\":{\"boost\":1,\"wildcard\":\"*security*\"}}}]")); + } + + @Test + public void queryFilter() throws IOException { + /* + * Human readable format of the request defined below: + * { + * "query": "SELECT * FROM accounts WHERE age > 25", + * "filter": { + * "range": { + * "balance": { + * "lte": 30000 + * } + * } + * } + * } + */ + String result = + explain( + String.format( + "{\"query\":\"" + + "SELECT * " + + "FROM %s " + + "WHERE age > 25\"," + + "\"filter\":{\"range\":{\"balance\":{\"lte\":30000}}}}", + TestsConstants.TEST_INDEX_ACCOUNT)); + String expectedOutput = + Files.toString( + new File( + getResourcePath() + + "src/test/resources/expectedOutput/json_filter_explain.json"), + StandardCharsets.UTF_8) + .replaceAll("\r", ""); + + assertThat(removeSpaces(result), equalTo(removeSpaces(expectedOutput))); + } + + private String removeSpaces(String s) { + return s.replaceAll("\\s+", ""); + } + + private String explainSQL(String sql) { + return explain(String.format("{\"query\":\"%s\"}", sql)); + } + + private JSONObject explainSQLToJson(String sql) { + return new JSONObject(explain(String.format("{\"query\":\"%s\"}", sql))); + } + + private String query(JSONObject jsonObject, String jsonPath) { + return jsonObject.query(jsonPath).toString(); + } + + private String explain(String request) { + try { + JSONObject jsonRequest = new JSONObject(request); + String sql = jsonRequest.getString("query"); + + return translate(sql, jsonRequest); + } catch (SqlParseException | SQLFeatureNotSupportedException | SQLFeatureDisabledException e) { + throw new ParserException("Illegal sql expr in request: " + request); } - - @Test - public void aggInHavingWithWhereOnNestedAndNested() { - JSONObject explainSQL = explainSQLToJson("SELECT name " + - "FROM employee " + - "WHERE nested('projects', projects.started_year > 2000 AND projects.name LIKE '%security%') " + - "GROUP BY name " + - "HAVING COUNT(nested(projects, 'projects')) > 1"); - - assertThat( - query(explainSQL, "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/aggregations/count_0/value_count"), - equalTo("{\"field\":\"_index\"}")); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/bucket_filter/bucket_selector/buckets_path"), - equalTo("{\"count_0\":\"projects@NESTED>projects@FILTER.count_0\"}")); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/filter/bool/must"), - equalTo("[{\"bool\":{\"adjust_pure_negative\":true,\"must\":[{\"range\":{\"projects.started_year\":{\"include_lower\":false,\"include_upper\":true,\"from\":2000,\"boost\":1,\"to\":null}}},{\"wildcard\":{\"projects.name\":{\"boost\":1,\"wildcard\":\"*security*\"}}}],\"boost\":1}}]")); - } - - @Test - public void aggInHavingWithWhereOnNestedOrNested() { - JSONObject explainSQL = explainSQLToJson("SELECT name " + - "FROM employee " + - "WHERE nested('projects', projects.started_year > 2000 OR projects.name LIKE '%security%') " + - "GROUP BY name " + - "HAVING COUNT(nested(projects, 'projects')) > 1"); - assertThat( - query(explainSQL, - "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/aggregations/count_0/value_count"), - equalTo("{\"field\":\"_index\"}")); - assertThat( - query(explainSQL, "/aggregations/name/aggregations/bucket_filter/bucket_selector/buckets_path"), - equalTo("{\"count_0\":\"projects@NESTED>projects@FILTER.count_0\"}")); - assertThat( - query(explainSQL, - "/aggregations/name/aggregations/projects@NESTED/aggregations/projects@FILTER/filter/bool/must"), - equalTo("[{\"bool\":{\"adjust_pure_negative\":true,\"should\":[{\"range\":{\"projects.started_year\":{\"include_lower\":false,\"include_upper\":true,\"from\":2000,\"boost\":1,\"to\":null}}},{\"wildcard\":{\"projects.name\":{\"boost\":1,\"wildcard\":\"*security*\"}}}],\"boost\":1}}]")); - } - - @Test - public void searchSanity() throws IOException { - String result = explain(String.format("{\"query\":\"" + - "SELECT * " + - "FROM %s " + - "WHERE firstname LIKE 'A%%' AND age > 20 " + - "GROUP BY gender " + - "ORDER BY _score\"}", TestsConstants.TEST_INDEX_ACCOUNT)); - String expectedOutput = Files.toString( - new File(getResourcePath() + "src/test/resources/expectedOutput/search_explain.json"), StandardCharsets.UTF_8) - .replaceAll("\r", ""); - - assertThat(removeSpaces(result), equalTo(removeSpaces(expectedOutput))); - } - - // This test was ignored because group by case function is not supported - @Ignore - @Test - public void aggregationQuery() throws IOException { - String result = explain(String.format("{\"query\":\"" + - "SELECT address, CASE WHEN gender='0' THEN 'aaa' ELSE 'bbb' END AS a2345, count(age) " + - "FROM %s " + - "GROUP BY terms('field'='address','execution_hint'='global_ordinals'), a2345\"}", TestsConstants.TEST_INDEX_ACCOUNT)); - String expectedOutput = Files.toString( - new File(getResourcePath() + "src/test/resources/expectedOutput/aggregation_query_explain.json"), StandardCharsets.UTF_8) - .replaceAll("\r", ""); - - assertThat(removeSpaces(result), equalTo(removeSpaces(expectedOutput))); - } - - @Test - public void deleteSanity() throws IOException { - try (MockedStatic localClusterStateMockedStatic = - Mockito.mockStatic(LocalClusterState.class)) { - LocalClusterState state = mock(LocalClusterState.class); - localClusterStateMockedStatic.when(LocalClusterState::state).thenReturn(state); - when(state.getSettingValue(any(Settings.Key.class))).thenReturn(true); - - String result = explain(String.format("{\"query\":\"" + - "DELETE " + - "FROM %s " + - "WHERE firstname LIKE 'A%%' AND age > 20\"}", TestsConstants.TEST_INDEX_ACCOUNT)); - String expectedOutput = Files.toString( - new File(getResourcePath() + "src/test/resources/expectedOutput/delete_explain.json"), StandardCharsets.UTF_8) - .replaceAll("\r", ""); - assertThat(removeSpaces(result), equalTo(removeSpaces(expectedOutput))); - } - } - - @Test(expected = SQLFeatureDisabledException.class) - public void deleteShouldThrowExceptionWhenDisabled() - throws SQLFeatureDisabledException, SQLFeatureNotSupportedException, - SqlParseException { - try (MockedStatic localClusterStateMockedStatic = - Mockito.mockStatic(LocalClusterState.class)) { - LocalClusterState state = mock(LocalClusterState.class); - localClusterStateMockedStatic.when(LocalClusterState::state).thenReturn(state); - when(state.getSettingValue(any(Settings.Key.class))).thenReturn(false); - - JSONObject jsonRequest = new JSONObject(StringUtils.format("{\"query\":\"" + - "DELETE " + - "FROM %s " + - "WHERE firstname LIKE 'A%%' AND age > 20\"}", TestsConstants.TEST_INDEX_ACCOUNT)); - translate(jsonRequest.getString("query"), jsonRequest); - } - } - - @Test - public void queryFilter() throws IOException { - /* - * Human readable format of the request defined below: - * { - * "query": "SELECT * FROM accounts WHERE age > 25", - * "filter": { - * "range": { - * "balance": { - * "lte": 30000 - * } - * } - * } - * } - */ - String result = explain(String.format("{\"query\":\"" + - "SELECT * " + - "FROM %s " + - "WHERE age > 25\"," + - "\"filter\":{\"range\":{\"balance\":{\"lte\":30000}}}}", TestsConstants.TEST_INDEX_ACCOUNT)); - String expectedOutput = Files.toString( - new File(getResourcePath() + "src/test/resources/expectedOutput/json_filter_explain.json"), StandardCharsets.UTF_8) - .replaceAll("\r", ""); - - assertThat(removeSpaces(result), equalTo(removeSpaces(expectedOutput))); - } - - private String removeSpaces(String s) { - return s.replaceAll("\\s+", ""); - } - - private String explainSQL(String sql) { - return explain(String.format("{\"query\":\"%s\"}", sql)); - } - - private JSONObject explainSQLToJson(String sql) { - return new JSONObject(explain(String.format("{\"query\":\"%s\"}", sql))); - } - - private String query(JSONObject jsonObject, String jsonPath) { - return jsonObject.query(jsonPath).toString(); - } - - private String explain(String request) { - try { - JSONObject jsonRequest = new JSONObject(request); - String sql = jsonRequest.getString("query"); - - return translate(sql, jsonRequest); - } catch (SqlParseException | SQLFeatureNotSupportedException | SQLFeatureDisabledException e) { - throw new ParserException("Illegal sql expr in request: " + request); - } - } - - private String translate(String sql, JSONObject jsonRequest) - throws SQLFeatureNotSupportedException, SqlParseException, SQLFeatureDisabledException { - Client mockClient = mock(Client.class); - CheckScriptContents.stubMockClient(mockClient); - QueryAction queryAction = - OpenSearchActionFactory - .create(mockClient, new QueryActionRequest(sql, columnTypeProvider, Format.JDBC)); - - SqlRequest sqlRequest = new SqlRequest(sql, jsonRequest); - queryAction.setSqlRequest(sqlRequest); - - SqlElasticRequestBuilder requestBuilder = queryAction.explain(); - return requestBuilder.explain(); - } - - private String getResourcePath() { - String projectRoot = System.getProperty("project.root"); - if ( projectRoot!= null && projectRoot.trim().length() > 0) { - return projectRoot.trim() + "/"; - } else { - return ""; - } + } + + private String translate(String sql, JSONObject jsonRequest) + throws SQLFeatureNotSupportedException, SqlParseException, SQLFeatureDisabledException { + Client mockClient = mock(Client.class); + CheckScriptContents.stubMockClient(mockClient); + QueryAction queryAction = + OpenSearchActionFactory.create( + mockClient, new QueryActionRequest(sql, columnTypeProvider, Format.JDBC)); + + SqlRequest sqlRequest = new SqlRequest(sql, jsonRequest); + queryAction.setSqlRequest(sqlRequest); + + SqlElasticRequestBuilder requestBuilder = queryAction.explain(); + return requestBuilder.explain(); + } + + private String getResourcePath() { + String projectRoot = System.getProperty("project.root"); + if (projectRoot != null && projectRoot.trim().length() > 0) { + return projectRoot.trim() + "/"; + } else { + return ""; } + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/LocalClusterStateTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/LocalClusterStateTest.java index 00a39ce0d3..9fc04b9e3e 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/LocalClusterStateTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/LocalClusterStateTest.java @@ -3,16 +3,13 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import static org.junit.Assert.assertEquals; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -37,153 +34,160 @@ import org.opensearch.sql.legacy.util.TestsConstants; import org.opensearch.sql.opensearch.setting.OpenSearchSettings; -/** - * Local cluster state testing without covering OpenSearch logic, ex. resolve index pattern. - */ +/** Local cluster state testing without covering OpenSearch logic, ex. resolve index pattern. */ public class LocalClusterStateTest { - private static final String INDEX_NAME = TestsConstants.TEST_INDEX_BANK; - private static final String TYPE_NAME = "account"; - - private static final String MAPPING = "{\n" + - " \"opensearch-sql_test_index_bank\": {\n" + - " \"mappings\": {\n" + - " \"account\": {\n" + - " \"properties\": {\n" + - " \"address\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"age\": {\n" + - " \"type\": \"integer\"\n" + - " },\n" + - " \"city\": {\n" + - " \"type\": \"keyword\"\n" + - " },\n" + - " \"employer\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " },\n" + - " \"state\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"raw\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " },\n" + - " \"manager\": {\n" + - " \"properties\": {\n" + - " \"name\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " },\n" + - " \"address\": {\n" + - " \"type\": \"keyword\"\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " },\n" + - // ==== All required by IndexMetaData.fromXContent() ==== - " \"settings\": {\n" + - " \"index\": {\n" + - " \"number_of_shards\": 5,\n" + - " \"number_of_replicas\": 0,\n" + - " \"version\": {\n" + - " \"created\": \"6050399\"\n" + - " }\n" + - " }\n" + - " },\n" + - " \"mapping_version\": \"1\",\n" + - " \"settings_version\": \"1\"\n" + - //======================================================= - " }\n" + - "}"; - - @Mock private ClusterSettings clusterSettings; - - @Before - public void init() { - MockitoAnnotations.openMocks(this); - LocalClusterState.state(null); - mockLocalClusterState(MAPPING); - } - - @Test - public void getMappingForExistingField() { - IndexMappings indexMappings = LocalClusterState.state().getFieldMappings(new String[]{INDEX_NAME}); - Assert.assertNotNull(indexMappings); - - FieldMappings fieldMappings = indexMappings.mapping(INDEX_NAME); - Assert.assertNotNull(fieldMappings); - - Assert.assertEquals("text", fieldMappings.mapping("address").get("type")); - Assert.assertEquals("integer", fieldMappings.mapping("age").get("type")); - Assert.assertEquals("keyword", fieldMappings.mapping("city").get("type")); - Assert.assertEquals("text", fieldMappings.mapping("employer").get("type")); - - Assert.assertEquals("text", fieldMappings.mapping("manager.name").get("type")); - Assert.assertEquals("keyword", fieldMappings.mapping("manager.address").get("type")); + private static final String INDEX_NAME = TestsConstants.TEST_INDEX_BANK; + private static final String TYPE_NAME = "account"; + + private static final String MAPPING = + "{\n" + + " \"opensearch-sql_test_index_bank\": {\n" + + " \"mappings\": {\n" + + " \"account\": {\n" + + " \"properties\": {\n" + + " \"address\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"age\": {\n" + + " \"type\": \"integer\"\n" + + " },\n" + + " \"city\": {\n" + + " \"type\": \"keyword\"\n" + + " },\n" + + " \"employer\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " },\n" + + " \"state\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"raw\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " },\n" + + " \"manager\": {\n" + + " \"properties\": {\n" + + " \"name\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " },\n" + + " \"address\": {\n" + + " \"type\": \"keyword\"\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " },\n" + + + // ==== All required by IndexMetaData.fromXContent() ==== + " \"settings\": {\n" + + " \"index\": {\n" + + " \"number_of_shards\": 5,\n" + + " \"number_of_replicas\": 0,\n" + + " \"version\": {\n" + + " \"created\": \"6050399\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"mapping_version\": \"1\",\n" + + " \"settings_version\": \"1\"\n" + + + // ======================================================= + " }\n" + + "}"; + + @Mock private ClusterSettings clusterSettings; + + @Before + public void init() { + MockitoAnnotations.openMocks(this); + LocalClusterState.state(null); + mockLocalClusterState(MAPPING); + } + + @Test + public void getMappingForExistingField() { + IndexMappings indexMappings = + LocalClusterState.state().getFieldMappings(new String[] {INDEX_NAME}); + Assert.assertNotNull(indexMappings); + + FieldMappings fieldMappings = indexMappings.mapping(INDEX_NAME); + Assert.assertNotNull(fieldMappings); + + Assert.assertEquals("text", fieldMappings.mapping("address").get("type")); + Assert.assertEquals("integer", fieldMappings.mapping("age").get("type")); + Assert.assertEquals("keyword", fieldMappings.mapping("city").get("type")); + Assert.assertEquals("text", fieldMappings.mapping("employer").get("type")); + + Assert.assertEquals("text", fieldMappings.mapping("manager.name").get("type")); + Assert.assertEquals("keyword", fieldMappings.mapping("manager.address").get("type")); + } + + @Test + public void getMappingForInvalidField() { + IndexMappings indexMappings = + LocalClusterState.state().getFieldMappings(new String[] {INDEX_NAME}); + FieldMappings fieldMappings = indexMappings.mapping(INDEX_NAME); + + Assert.assertNull(fieldMappings.mapping("work-email")); + Assert.assertNull(fieldMappings.mapping("manager.home-address")); + Assert.assertNull(fieldMappings.mapping("manager.name.first")); + Assert.assertNull(fieldMappings.mapping("manager.name.first.uppercase")); + } + + @Test + public void getMappingFromCache() throws IOException { + // Mock here again for verification below and mock addListener() + ClusterService mockService = mockClusterService(MAPPING); + ClusterStateListener[] listener = new ClusterStateListener[1]; // Trick to access inside lambda + doAnswer( + invocation -> { + listener[0] = (ClusterStateListener) invocation.getArguments()[0]; + return null; + }) + .when(mockService) + .addListener(any()); + LocalClusterState.state().setClusterService(mockService); + + // 1.Actual findMappings be invoked only once + for (int i = 0; i < 10; i++) { + LocalClusterState.state().getFieldMappings(new String[] {INDEX_NAME}); } - - @Test - public void getMappingForInvalidField() { - IndexMappings indexMappings = LocalClusterState.state().getFieldMappings(new String[]{INDEX_NAME}); - FieldMappings fieldMappings = indexMappings.mapping(INDEX_NAME); - - Assert.assertNull(fieldMappings.mapping("work-email")); - Assert.assertNull(fieldMappings.mapping("manager.home-address")); - Assert.assertNull(fieldMappings.mapping("manager.name.first")); - Assert.assertNull(fieldMappings.mapping("manager.name.first.uppercase")); - } - - @Test - public void getMappingFromCache() throws IOException { - // Mock here again for verification below and mock addListener() - ClusterService mockService = mockClusterService(MAPPING); - ClusterStateListener[] listener = new ClusterStateListener[1]; // Trick to access inside lambda - doAnswer(invocation -> { - listener[0] = (ClusterStateListener) invocation.getArguments()[0]; - return null; - }).when(mockService).addListener(any()); - LocalClusterState.state().setClusterService(mockService); - - // 1.Actual findMappings be invoked only once - for (int i = 0; i < 10; i++) { - LocalClusterState.state().getFieldMappings(new String[]{INDEX_NAME}); - } - verify(mockService.state().metadata(), times(1)).findMappings(eq(new String[]{INDEX_NAME}), any()); - - // 2.Fire cluster state change event - Assert.assertNotNull(listener[0]); - ClusterChangedEvent mockEvent = mock(ClusterChangedEvent.class); - when(mockEvent.metadataChanged()).thenReturn(true); - listener[0].clusterChanged(mockEvent); - - // 3.Cache should be invalidated and call findMapping another time only - for (int i = 0; i < 5; i++) { - LocalClusterState.state().getFieldMappings(new String[]{INDEX_NAME}); - } - verify(mockService.state().metadata(), times(2)).findMappings(eq(new String[]{INDEX_NAME}), any()); + verify(mockService.state().metadata(), times(1)) + .findMappings(eq(new String[] {INDEX_NAME}), any()); + + // 2.Fire cluster state change event + Assert.assertNotNull(listener[0]); + ClusterChangedEvent mockEvent = mock(ClusterChangedEvent.class); + when(mockEvent.metadataChanged()).thenReturn(true); + listener[0].clusterChanged(mockEvent); + + // 3.Cache should be invalidated and call findMapping another time only + for (int i = 0; i < 5; i++) { + LocalClusterState.state().getFieldMappings(new String[] {INDEX_NAME}); } - - @Test - public void getDefaultValueForQuerySlowLog() { - when(clusterSettings.get(ClusterName.CLUSTER_NAME_SETTING)).thenReturn(ClusterName.DEFAULT); - OpenSearchSettings settings = new OpenSearchSettings(clusterSettings); - assertEquals(Integer.valueOf(2), settings.getSettingValue(Settings.Key.SQL_SLOWLOG)); - } - + verify(mockService.state().metadata(), times(2)) + .findMappings(eq(new String[] {INDEX_NAME}), any()); + } + + @Test + public void getDefaultValueForQuerySlowLog() { + when(clusterSettings.get(ClusterName.CLUSTER_NAME_SETTING)).thenReturn(ClusterName.DEFAULT); + OpenSearchSettings settings = new OpenSearchSettings(clusterSettings); + assertEquals(Integer.valueOf(2), settings.getSettingValue(Settings.Key.SQL_SLOWLOG)); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/MathFunctionsTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/MathFunctionsTest.java index b52dd3efc6..ad01cb0e0f 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/MathFunctionsTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/MathFunctionsTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import static org.junit.Assert.assertTrue; @@ -17,437 +16,291 @@ public class MathFunctionsTest { - private static SqlParser parser; - - @BeforeClass - public static void init() { parser = new SqlParser(); } - - /** Tests for case insensitivity when calling SQL functions */ - @Test - public void lowerCaseInSelect() { - String query = "SELECT abs(age) " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.abs(doc['age'].value)")); - } - - @Test - public void upperCaseInSelect() { - String query = "SELECT ABS(age) " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.abs(doc['age'].value)")); - } - - @Test - public void lowerCaseInWhere() { - String query = "SELECT * " + - "FROM bank " + - "WHERE sqrt(age) > 5"; - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "Math.sqrt(doc['age'].value)")); - assertTrue( - CheckScriptContents.scriptHasPattern( - scriptFilter, - "sqrt_\\d+ > 5")); - } - - @Test - public void upperCaseInWhere() { - String query = "SELECT * " + - "FROM bank " + - "WHERE SQRT(age) > 5"; - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "Math.sqrt(doc['age'].value)")); - assertTrue( - CheckScriptContents.scriptHasPattern( - scriptFilter, - "sqrt_\\d+ > 5")); - } - - /** Tests for constant functions */ - @Test - public void eulersNumberInSelect() { - String query = "SELECT E() " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.E")); - } - - @Test - public void eulersNumberInWhere() { - String query = "SELECT * " + - "FROM bank " + - "WHERE E() > 2"; - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "Math.E")); - assertTrue( - CheckScriptContents.scriptHasPattern( - scriptFilter, - "E_\\d+ > 2")); - } - - @Test - public void piInSelect() { - String query = "SELECT PI() " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.PI")); - } - - @Test - public void piInWhere() { - String query = "SELECT * " + - "FROM bank " + - "WHERE PI() < 4"; - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "Math.PI")); - assertTrue( - CheckScriptContents.scriptHasPattern( - scriptFilter, - "PI_\\d+ < 4")); - } - - /** Tests for general math functions */ - @Test - public void expm1WithPropertyArgument() { - String query = "SELECT * " + - "FROM bank " + - "WHERE expm1(age) > 10"; - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "Math.expm1(doc['age'].value)")); - assertTrue( - CheckScriptContents.scriptHasPattern( - scriptFilter, - "expm1_\\d+ > 10")); - } - - @Test - public void expm1WithValueArgument() { - String query = "SELECT * " + - "FROM bank " + - "WHERE expm1(5) > 10"; - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "Math.expm1(5)")); - assertTrue( - CheckScriptContents.scriptHasPattern( - scriptFilter, - "expm1_\\d+ > 10")); - } - - - /** Tests for trigonometric functions */ - @Test - public void degreesWithPropertyArgument() { - String query = "SELECT degrees(age) " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.toDegrees(doc['age'].value)")); - } - - @Test - public void degreesWithValueArgument() { - String query = "SELECT degrees(10) " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.toDegrees(10)")); - } - - @Test - public void radiansWithPropertyArgument() { - String query = "SELECT radians(age) " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.toRadians(doc['age'].value)")); - } - - @Test - public void radiansWithValueArgument() { - String query = "SELECT radians(180) " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.toRadians(180)")); - } - - @Test - public void sinWithPropertyArgument() { - String query = "SELECT sin(radians(age)) " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.toRadians(doc['age'].value)")); - assertTrue( - CheckScriptContents.scriptHasPattern( - scriptField, - "Math.sin\\(radians_\\d+\\)")); - } - - @Test - public void sinWithValueArgument() { - String query = "SELECT sin(radians(180)) " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.toRadians(180)")); - assertTrue( - CheckScriptContents.scriptHasPattern( - scriptField, - "Math.sin\\(radians_\\d+\\)")); - } - - @Test - public void atanWithPropertyArgument() { - String query = "SELECT atan(age) " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.atan(doc['age'].value)")); - } - - @Test - public void atanWithValueArgument() { - String query = "SELECT atan(1) " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.atan(1)")); - } - - @Test - public void atanWithFunctionArgument() { - String query = "SELECT atan(PI() / 2) " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.PI")); - assertTrue( - CheckScriptContents.scriptHasPattern( - scriptField, - "PI_\\d+ / 2")); - assertTrue( - CheckScriptContents.scriptHasPattern( - scriptField, - "Math.atan\\(divide_\\d+\\)")); - } - - @Test - public void coshWithPropertyArgument() { - String query = "SELECT cosh(age) " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.cosh(doc['age'].value)")); - } - - @Test - public void coshWithValueArgument() { - String query = "SELECT cosh(0) " + - "FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.cosh(0)")); - } - - @Test - public void powerWithPropertyArgument() { - String query = "SELECT POWER(age, 2) FROM bank WHERE POWER(balance, 3) > 0"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.pow(doc['age'].value, 2)")); - - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "Math.pow(doc['balance'].value, 3)")); - } - - @Test - public void atan2WithPropertyArgument() { - String query = "SELECT ATAN2(age, 2) FROM bank WHERE ATAN2(balance, 3) > 0"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.atan2(doc['age'].value, 2)")); - - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "Math.atan2(doc['balance'].value, 3)")); - } - - @Test - public void cotWithPropertyArgument() { - String query = "SELECT COT(age) FROM bank WHERE COT(balance) > 0"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "1 / Math.tan(doc['age'].value)")); - - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "1 / Math.tan(doc['balance'].value)")); - } - - @Test - public void signWithFunctionPropertyArgument() { - String query = "SELECT SIGN(age) FROM bank WHERE SIGNUM(balance) = 1"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue(CheckScriptContents.scriptContainsString( - scriptField, - "Math.signum(doc['age'].value)")); - - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "Math.signum(doc['balance'].value)")); - } - - @Test - public void logWithOneParam() { - String query = "SELECT LOG(age) FROM bank WHERE LOG(age) = 5.0"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.log(doc['age'].value)")); - - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "Math.log(doc['age'].value)")); - } - - @Test - public void logWithTwoParams() { - String query = "SELECT LOG(3, age) FROM bank WHERE LOG(3, age) = 5.0"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.log(doc['age'].value)/Math.log(3)")); - - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "Math.log(doc['age'].value)/Math.log(3)")); - } - - @Test - public void log10Test() { - String query = "SELECT LOG10(age) FROM accounts"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.log10(doc['age'].value)" - ) - ); - } - - @Test - public void lnTest() { - String query = "SELECT LN(age) FROM age WHERE LN(age) = 5.0"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Math.log(doc['age'].value)")); - - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "Math.log(doc['age'].value)")); - } - - @Test - public void randWithoutParamTest() { - String query = "SELECT RAND() FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "new Random().nextDouble()" - ) - ); - } - - @Test - public void randWithOneParamTest() { - String query = "SELECT RAND(age) FROM bank"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "new Random(doc['age'].value).nextDouble()" - ) - ); - } + private static SqlParser parser; + + @BeforeClass + public static void init() { + parser = new SqlParser(); + } + + /** Tests for case insensitivity when calling SQL functions */ + @Test + public void lowerCaseInSelect() { + String query = "SELECT abs(age) " + "FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "Math.abs(doc['age'].value)")); + } + + @Test + public void upperCaseInSelect() { + String query = "SELECT ABS(age) " + "FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "Math.abs(doc['age'].value)")); + } + + @Test + public void lowerCaseInWhere() { + String query = "SELECT * " + "FROM bank " + "WHERE sqrt(age) > 5"; + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue( + CheckScriptContents.scriptContainsString(scriptFilter, "Math.sqrt(doc['age'].value)")); + assertTrue(CheckScriptContents.scriptHasPattern(scriptFilter, "sqrt_\\d+ > 5")); + } + + @Test + public void upperCaseInWhere() { + String query = "SELECT * " + "FROM bank " + "WHERE SQRT(age) > 5"; + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue( + CheckScriptContents.scriptContainsString(scriptFilter, "Math.sqrt(doc['age'].value)")); + assertTrue(CheckScriptContents.scriptHasPattern(scriptFilter, "sqrt_\\d+ > 5")); + } + + /** Tests for constant functions */ + @Test + public void eulersNumberInSelect() { + String query = "SELECT E() " + "FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "Math.E")); + } + + @Test + public void eulersNumberInWhere() { + String query = "SELECT * " + "FROM bank " + "WHERE E() > 2"; + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue(CheckScriptContents.scriptContainsString(scriptFilter, "Math.E")); + assertTrue(CheckScriptContents.scriptHasPattern(scriptFilter, "E_\\d+ > 2")); + } + + @Test + public void piInSelect() { + String query = "SELECT PI() " + "FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "Math.PI")); + } + + @Test + public void piInWhere() { + String query = "SELECT * " + "FROM bank " + "WHERE PI() < 4"; + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue(CheckScriptContents.scriptContainsString(scriptFilter, "Math.PI")); + assertTrue(CheckScriptContents.scriptHasPattern(scriptFilter, "PI_\\d+ < 4")); + } + + /** Tests for general math functions */ + @Test + public void expm1WithPropertyArgument() { + String query = "SELECT * " + "FROM bank " + "WHERE expm1(age) > 10"; + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue( + CheckScriptContents.scriptContainsString(scriptFilter, "Math.expm1(doc['age'].value)")); + assertTrue(CheckScriptContents.scriptHasPattern(scriptFilter, "expm1_\\d+ > 10")); + } + + @Test + public void expm1WithValueArgument() { + String query = "SELECT * " + "FROM bank " + "WHERE expm1(5) > 10"; + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue(CheckScriptContents.scriptContainsString(scriptFilter, "Math.expm1(5)")); + assertTrue(CheckScriptContents.scriptHasPattern(scriptFilter, "expm1_\\d+ > 10")); + } + + /** Tests for trigonometric functions */ + @Test + public void degreesWithPropertyArgument() { + String query = "SELECT degrees(age) " + "FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString(scriptField, "Math.toDegrees(doc['age'].value)")); + } + + @Test + public void degreesWithValueArgument() { + String query = "SELECT degrees(10) " + "FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "Math.toDegrees(10)")); + } + + @Test + public void radiansWithPropertyArgument() { + String query = "SELECT radians(age) " + "FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString(scriptField, "Math.toRadians(doc['age'].value)")); + } + + @Test + public void radiansWithValueArgument() { + String query = "SELECT radians(180) " + "FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "Math.toRadians(180)")); + } + + @Test + public void sinWithPropertyArgument() { + String query = "SELECT sin(radians(age)) " + "FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString(scriptField, "Math.toRadians(doc['age'].value)")); + assertTrue(CheckScriptContents.scriptHasPattern(scriptField, "Math.sin\\(radians_\\d+\\)")); + } + + @Test + public void sinWithValueArgument() { + String query = "SELECT sin(radians(180)) " + "FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "Math.toRadians(180)")); + assertTrue(CheckScriptContents.scriptHasPattern(scriptField, "Math.sin\\(radians_\\d+\\)")); + } + + @Test + public void atanWithPropertyArgument() { + String query = "SELECT atan(age) " + "FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString(scriptField, "Math.atan(doc['age'].value)")); + } + + @Test + public void atanWithValueArgument() { + String query = "SELECT atan(1) " + "FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "Math.atan(1)")); + } + + @Test + public void atanWithFunctionArgument() { + String query = "SELECT atan(PI() / 2) " + "FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "Math.PI")); + assertTrue(CheckScriptContents.scriptHasPattern(scriptField, "PI_\\d+ / 2")); + assertTrue(CheckScriptContents.scriptHasPattern(scriptField, "Math.atan\\(divide_\\d+\\)")); + } + + @Test + public void coshWithPropertyArgument() { + String query = "SELECT cosh(age) " + "FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString(scriptField, "Math.cosh(doc['age'].value)")); + } + + @Test + public void coshWithValueArgument() { + String query = "SELECT cosh(0) " + "FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "Math.cosh(0)")); + } + + @Test + public void powerWithPropertyArgument() { + String query = "SELECT POWER(age, 2) FROM bank WHERE POWER(balance, 3) > 0"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString(scriptField, "Math.pow(doc['age'].value, 2)")); + + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptFilter, "Math.pow(doc['balance'].value, 3)")); + } + + @Test + public void atan2WithPropertyArgument() { + String query = "SELECT ATAN2(age, 2) FROM bank WHERE ATAN2(balance, 3) > 0"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString(scriptField, "Math.atan2(doc['age'].value, 2)")); + + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptFilter, "Math.atan2(doc['balance'].value, 3)")); + } + + @Test + public void cotWithPropertyArgument() { + String query = "SELECT COT(age) FROM bank WHERE COT(balance) > 0"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString(scriptField, "1 / Math.tan(doc['age'].value)")); + + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptFilter, "1 / Math.tan(doc['balance'].value)")); + } + + @Test + public void signWithFunctionPropertyArgument() { + String query = "SELECT SIGN(age) FROM bank WHERE SIGNUM(balance) = 1"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString(scriptField, "Math.signum(doc['age'].value)")); + + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptFilter, "Math.signum(doc['balance'].value)")); + } + + @Test + public void logWithOneParam() { + String query = "SELECT LOG(age) FROM bank WHERE LOG(age) = 5.0"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "Math.log(doc['age'].value)")); + + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue( + CheckScriptContents.scriptContainsString(scriptFilter, "Math.log(doc['age'].value)")); + } + + @Test + public void logWithTwoParams() { + String query = "SELECT LOG(3, age) FROM bank WHERE LOG(3, age) = 5.0"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptField, "Math.log(doc['age'].value)/Math.log(3)")); + + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptFilter, "Math.log(doc['age'].value)/Math.log(3)")); + } + + @Test + public void log10Test() { + String query = "SELECT LOG10(age) FROM accounts"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString(scriptField, "Math.log10(doc['age'].value)")); + } + + @Test + public void lnTest() { + String query = "SELECT LN(age) FROM age WHERE LN(age) = 5.0"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "Math.log(doc['age'].value)")); + + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue( + CheckScriptContents.scriptContainsString(scriptFilter, "Math.log(doc['age'].value)")); + } + + @Test + public void randWithoutParamTest() { + String query = "SELECT RAND() FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "new Random().nextDouble()")); + } + + @Test + public void randWithOneParamTest() { + String query = "SELECT RAND(age) FROM bank"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptField, "new Random(doc['age'].value).nextDouble()")); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/NestedFieldProjectionTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/NestedFieldProjectionTest.java index 63af01caaa..d5d9194036 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/NestedFieldProjectionTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/NestedFieldProjectionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import static org.hamcrest.MatcherAssert.assertThat; @@ -52,372 +51,284 @@ public class NestedFieldProjectionTest { - @Test - public void regression() { - assertThat(query("SELECT region FROM team"), is(anything())); - assertThat(query("SELECT region FROM team WHERE nested(employees.age) = 30"), is(anything())); - assertThat(query("SELECT * FROM team WHERE region = 'US'"), is(anything())); - } - - @Test - public void nestedFieldSelectAll() { - assertThat( - query("SELECT nested(employees.*) FROM team"), - source( - boolQuery( - filter( - boolQuery( - must( - nestedQuery( - path("employees"), - innerHits("employees.*") - ) - ) - ) - ) - ) - ) - ); - } - - @Test - public void nestedFieldInSelect() { - assertThat( - query("SELECT nested(employees.firstname) FROM team"), - source( - boolQuery( - filter( - boolQuery( - must( - nestedQuery( - path("employees"), - innerHits("employees.firstname") - ) - ) - ) - ) - ) - ) - ); - } - - @Test - public void regularAndNestedFieldInSelect() { - assertThat( - query("SELECT region, nested(employees.firstname) FROM team"), - source( - boolQuery( - filter( - boolQuery( - must( - nestedQuery( - path("employees"), - innerHits("employees.firstname") - ) - ) - ) - ) - ), - fetchSource("region") - ) - ); - } - - /* - // Should be integration test - @Test - public void nestedFieldInWhereSelectAll() {} - */ - - @Test - public void nestedFieldInSelectAndWhere() { - assertThat( - query("SELECT nested(employees.firstname) " + - " FROM team " + - " WHERE nested(employees.age) = 30"), - source( - boolQuery( - filter( - boolQuery( - must( - nestedQuery( - path("employees"), - innerHits("employees.firstname") - ) - ) - ) - ) - ) - ) - ); - } - - @Test - public void regularAndNestedFieldInSelectAndWhere() { - assertThat( - query("SELECT region, nested(employees.firstname) " + - " FROM team " + - " WHERE nested(employees.age) = 30"), - source( - boolQuery( - filter( - boolQuery( - must( - nestedQuery( - innerHits("employees.firstname") - ) - ) - ) - ) - ), - fetchSource("region") - ) - ); - } - - @Test - public void multipleSameNestedFields() { - assertThat( - query("SELECT nested(employees.firstname), nested(employees.lastname) " + - " FROM team " + - " WHERE nested(\"employees\", employees.age = 30 AND employees.firstname LIKE 'John')"), - source( - boolQuery( - filter( - boolQuery( - must( - nestedQuery( - path("employees"), - innerHits("employees.firstname", "employees.lastname") - ) - ) - ) - ) - ) - ) - ); + @Test + public void regression() { + assertThat(query("SELECT region FROM team"), is(anything())); + assertThat(query("SELECT region FROM team WHERE nested(employees.age) = 30"), is(anything())); + assertThat(query("SELECT * FROM team WHERE region = 'US'"), is(anything())); + } + + @Test + public void nestedFieldSelectAll() { + assertThat( + query("SELECT nested(employees.*) FROM team"), + source( + boolQuery( + filter( + boolQuery(must(nestedQuery(path("employees"), innerHits("employees.*")))))))); + } + + @Test + public void nestedFieldInSelect() { + assertThat( + query("SELECT nested(employees.firstname) FROM team"), + source( + boolQuery( + filter( + boolQuery( + must(nestedQuery(path("employees"), innerHits("employees.firstname")))))))); + } + + @Test + public void regularAndNestedFieldInSelect() { + assertThat( + query("SELECT region, nested(employees.firstname) FROM team"), + source( + boolQuery( + filter( + boolQuery( + must(nestedQuery(path("employees"), innerHits("employees.firstname")))))), + fetchSource("region"))); + } + + /* + // Should be integration test + @Test + public void nestedFieldInWhereSelectAll() {} + */ + + @Test + public void nestedFieldInSelectAndWhere() { + assertThat( + query( + "SELECT nested(employees.firstname) " + + " FROM team " + + " WHERE nested(employees.age) = 30"), + source( + boolQuery( + filter( + boolQuery( + must(nestedQuery(path("employees"), innerHits("employees.firstname")))))))); + } + + @Test + public void regularAndNestedFieldInSelectAndWhere() { + assertThat( + query( + "SELECT region, nested(employees.firstname) " + + " FROM team " + + " WHERE nested(employees.age) = 30"), + source( + boolQuery(filter(boolQuery(must(nestedQuery(innerHits("employees.firstname")))))), + fetchSource("region"))); + } + + @Test + public void multipleSameNestedFields() { + assertThat( + query( + "SELECT nested(employees.firstname), nested(employees.lastname) FROM team WHERE" + + " nested(\"employees\", employees.age = 30 AND employees.firstname LIKE 'John')"), + source( + boolQuery( + filter( + boolQuery( + must( + nestedQuery( + path("employees"), + innerHits("employees.firstname", "employees.lastname")))))))); + } + + @Test + public void multipleDifferentNestedFields() { + assertThat( + query( + "SELECT region, nested(employees.firstname), nested(manager.name) " + + " FROM team " + + " WHERE nested(employees.age) = 30 AND nested(manager.age) = 50"), + source( + boolQuery( + filter( + boolQuery( + must( + boolQuery( + must( + nestedQuery( + path("employees"), innerHits("employees.firstname")), + nestedQuery(path("manager"), innerHits("manager.name")))))))), + fetchSource("region"))); + } + + @Test + public void leftJoinWithSelectAll() { + assertThat( + query("SELECT * FROM team AS t LEFT JOIN t.projects AS p "), + source( + boolQuery( + filter( + boolQuery( + should( + boolQuery(mustNot(nestedQuery(path("projects")))), + nestedQuery(path("projects"), innerHits("projects.*")))))))); + } + + @Test + public void leftJoinWithSpecificFields() { + assertThat( + query("SELECT t.name, p.name, p.started_year FROM team AS t LEFT JOIN t.projects AS p "), + source( + boolQuery( + filter( + boolQuery( + should( + boolQuery(mustNot(nestedQuery(path("projects")))), + nestedQuery( + path("projects"), + innerHits("projects.name", "projects.started_year")))))), + fetchSource("name"))); + } + + private Matcher source(Matcher queryMatcher) { + return featureValueOf("query", queryMatcher, SearchSourceBuilder::query); + } + + private Matcher source( + Matcher queryMatcher, Matcher fetchSourceMatcher) { + return allOf( + featureValueOf("query", queryMatcher, SearchSourceBuilder::query), + featureValueOf("fetchSource", fetchSourceMatcher, SearchSourceBuilder::fetchSource)); + } + + /** + * Asserting instanceOf and continue other chained matchers of subclass requires explicity cast + */ + @SuppressWarnings("unchecked") + private Matcher boolQuery(Matcher matcher) { + return (Matcher) allOf(instanceOf(BoolQueryBuilder.class), matcher); + } + + @SafeVarargs + @SuppressWarnings("unchecked") + private final Matcher nestedQuery(Matcher... matchers) { + return (Matcher) + both(is(Matchers.instanceOf(NestedQueryBuilder.class))) + .and(allOf(matchers)); + } + + @SafeVarargs + private final FeatureMatcher> filter( + Matcher... matchers) { + return hasClauses("filter", BoolQueryBuilder::filter, matchers); + } + + @SafeVarargs + private final FeatureMatcher> must( + Matcher... matchers) { + return hasClauses("must", BoolQueryBuilder::must, matchers); + } + + @SafeVarargs + private final FeatureMatcher> mustNot( + Matcher... matchers) { + return hasClauses("must_not", BoolQueryBuilder::mustNot, matchers); + } + + @SafeVarargs + private final FeatureMatcher> should( + Matcher... matchers) { + return hasClauses("should", BoolQueryBuilder::should, matchers); + } + + /** Hide contains() assertion to simplify */ + @SafeVarargs + private final FeatureMatcher> hasClauses( + String name, + Function> func, + Matcher... matchers) { + return new FeatureMatcher>( + contains(matchers), name, name) { + @Override + protected List featureValueOf(BoolQueryBuilder query) { + return func.apply(query); + } + }; + } + + private Matcher path(String expected) { + return HasFieldWithValue.hasFieldWithValue("path", "path", is(equalTo(expected))); + } + + /** Skip intermediate property along the path. Hide arrayContaining assertion to simplify. */ + private FeatureMatcher innerHits(String... expected) { + return featureValueOf( + "innerHits", + arrayContaining(expected), + (nestedQuery -> nestedQuery.innerHit().getFetchSourceContext().includes())); + } + + @SuppressWarnings("unchecked") + private Matcher fetchSource(String... expected) { + if (expected.length == 0) { + return anyOf( + is(nullValue()), + featureValueOf("includes", is(nullValue()), FetchSourceContext::includes), + featureValueOf("includes", is(emptyArray()), FetchSourceContext::includes)); } - - @Test - public void multipleDifferentNestedFields() { - assertThat( - query("SELECT region, nested(employees.firstname), nested(manager.name) " + - " FROM team " + - " WHERE nested(employees.age) = 30 AND nested(manager.age) = 50"), - source( - boolQuery( - filter( - boolQuery( - must( - boolQuery( - must( - nestedQuery( - path("employees"), - innerHits("employees.firstname") - ), - nestedQuery( - path("manager"), - innerHits("manager.name") - ) - ) - ) - ) - ) - ) - ), - fetchSource("region") - ) - ); - } - - - @Test - public void leftJoinWithSelectAll() { - assertThat( - query("SELECT * FROM team AS t LEFT JOIN t.projects AS p "), - source( - boolQuery( - filter( - boolQuery( - should( - boolQuery( - mustNot( - nestedQuery( - path("projects") - ) - ) - ), - nestedQuery( - path("projects"), - innerHits("projects.*") - ) - ) - ) - ) - ) - ) - ); - } - - @Test - public void leftJoinWithSpecificFields() { - assertThat( - query("SELECT t.name, p.name, p.started_year FROM team AS t LEFT JOIN t.projects AS p "), - source( - boolQuery( - filter( - boolQuery( - should( - boolQuery( - mustNot( - nestedQuery( - path("projects") - ) - ) - ), - nestedQuery( - path("projects"), - innerHits("projects.name", "projects.started_year") - ) - ) - ) - ) - ), - fetchSource("name") - ) - ); - } - - private Matcher source(Matcher queryMatcher) { - return featureValueOf("query", queryMatcher, SearchSourceBuilder::query); - } - - private Matcher source(Matcher queryMatcher, - Matcher fetchSourceMatcher) { - return allOf( - featureValueOf("query", queryMatcher, SearchSourceBuilder::query), - featureValueOf("fetchSource", fetchSourceMatcher, SearchSourceBuilder::fetchSource) - ); - } - - /** Asserting instanceOf and continue other chained matchers of subclass requires explicity cast */ - @SuppressWarnings("unchecked") - private Matcher boolQuery(Matcher matcher) { - return (Matcher) allOf(instanceOf(BoolQueryBuilder.class), matcher); + return featureValueOf( + "includes", contains(expected), fetchSource -> Arrays.asList(fetchSource.includes())); + } + + private FeatureMatcher featureValueOf( + String name, Matcher subMatcher, Function getter) { + return new FeatureMatcher(subMatcher, name, name) { + @Override + protected U featureValueOf(T actual) { + return getter.apply(actual); + } + }; + } + + private SearchSourceBuilder query(String sql) { + SQLQueryExpr expr = parseSql(sql); + if (sql.contains("nested")) { + return translate(expr).source(); } - @SafeVarargs - @SuppressWarnings("unchecked") - private final Matcher nestedQuery(Matcher... matchers) { - return (Matcher) both(is(Matchers.instanceOf(NestedQueryBuilder.class))). - and(allOf(matchers)); + expr = rewrite(expr); + return translate(expr).source(); + } + + private SearchRequest translate(SQLQueryExpr expr) { + try { + Client mockClient = Mockito.mock(Client.class); + SearchRequestBuilder request = new SearchRequestBuilder(mockClient, SearchAction.INSTANCE); + Select select = new SqlParser().parseSelect(expr); + + DefaultQueryAction action = new DefaultQueryAction(mockClient, select); + action.initialize(request); + action.setFields(select.getFields()); + + if (select.getWhere() != null) { + request.setQuery(QueryMaker.explain(select.getWhere(), select.isQuery)); + } + new NestedFieldProjection(request).project(select.getFields(), select.getNestedJoinType()); + return request.request(); + } catch (SqlParseException e) { + throw new ParserException("Illegal sql expr: " + expr.toString()); } + } - @SafeVarargs - private final FeatureMatcher> filter(Matcher... matchers) { - return hasClauses("filter", BoolQueryBuilder::filter, matchers); + private SQLQueryExpr parseSql(String sql) { + ElasticSqlExprParser parser = new ElasticSqlExprParser(sql); + SQLExpr expr = parser.expr(); + if (parser.getLexer().token() != Token.EOF) { + throw new ParserException("Illegal sql: " + sql); } + return (SQLQueryExpr) expr; + } - @SafeVarargs - private final FeatureMatcher> must(Matcher... matchers) { - return hasClauses("must", BoolQueryBuilder::must, matchers); - } - - @SafeVarargs - private final FeatureMatcher> mustNot(Matcher... matchers) { - return hasClauses("must_not", BoolQueryBuilder::mustNot, matchers); - } - - @SafeVarargs - private final FeatureMatcher> should(Matcher... matchers) { - return hasClauses("should", BoolQueryBuilder::should, matchers); - } - - /** Hide contains() assertion to simplify */ - @SafeVarargs - private final FeatureMatcher> hasClauses(String name, - Function> func, - Matcher... matchers) { - return new FeatureMatcher>(contains(matchers), name, name) { - @Override - protected List featureValueOf(BoolQueryBuilder query) { - return func.apply(query); - } - }; - } - - private Matcher path(String expected) { - return HasFieldWithValue.hasFieldWithValue("path", "path", is(equalTo(expected))); - } - - /** Skip intermediate property along the path. Hide arrayContaining assertion to simplify. */ - private FeatureMatcher innerHits(String... expected) { - return featureValueOf("innerHits", - arrayContaining(expected), - (nestedQuery -> nestedQuery.innerHit().getFetchSourceContext().includes())); - } - - @SuppressWarnings("unchecked") - private Matcher fetchSource(String... expected) { - if (expected.length == 0) { - return anyOf(is(nullValue()), - featureValueOf("includes", is(nullValue()), FetchSourceContext::includes), - featureValueOf("includes", is(emptyArray()), FetchSourceContext::includes)); - } - return featureValueOf("includes", contains(expected), fetchSource -> Arrays.asList(fetchSource.includes())); - } - - private FeatureMatcher featureValueOf(String name, Matcher subMatcher, Function getter) { - return new FeatureMatcher(subMatcher, name, name) { - @Override - protected U featureValueOf(T actual) { - return getter.apply(actual); - } - }; - } - - private SearchSourceBuilder query(String sql) { - SQLQueryExpr expr = parseSql(sql); - if (sql.contains("nested")) { - return translate(expr).source(); - } - - expr = rewrite(expr); - return translate(expr).source(); - } - - private SearchRequest translate(SQLQueryExpr expr) { - try { - Client mockClient = Mockito.mock(Client.class); - SearchRequestBuilder request = new SearchRequestBuilder(mockClient, SearchAction.INSTANCE); - Select select = new SqlParser().parseSelect(expr); - - DefaultQueryAction action = new DefaultQueryAction(mockClient, select); - action.initialize(request); - action.setFields(select.getFields()); - - if (select.getWhere() != null) { - request.setQuery(QueryMaker.explain(select.getWhere(), select.isQuery)); - } - new NestedFieldProjection(request).project(select.getFields(), select.getNestedJoinType()); - return request.request(); - } - catch (SqlParseException e) { - throw new ParserException("Illegal sql expr: " + expr.toString()); - } - } - - private SQLQueryExpr parseSql(String sql) { - ElasticSqlExprParser parser = new ElasticSqlExprParser(sql); - SQLExpr expr = parser.expr(); - if (parser.getLexer().token() != Token.EOF) { - throw new ParserException("Illegal sql: " + sql); - } - return (SQLQueryExpr) expr; - } - - private SQLQueryExpr rewrite(SQLQueryExpr expr) { - expr.accept(new NestedFieldRewriter()); - return expr; - } + private SQLQueryExpr rewrite(SQLQueryExpr expr) { + expr.accept(new NestedFieldRewriter()); + return expr; + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/NestedFieldRewriterTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/NestedFieldRewriterTest.java index 58a6f7e244..8cf99a0a40 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/NestedFieldRewriterTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/NestedFieldRewriterTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import static java.util.stream.IntStream.range; @@ -29,630 +28,607 @@ public class NestedFieldRewriterTest { - @Test - public void regression() { - noImpact("SELECT * FROM team"); - noImpact("SELECT region FROM team/test, employees/test"); - noImpact("SELECT manager.name FROM team WHERE region = 'US' ORDER BY COUNT(*)"); - noImpact("SELECT COUNT(*) FROM team GROUP BY region"); - } - - @Test - public void selectWithoutFrom() { - // Expect no exception thrown - query("SELECT now()"); - } - - @Test - public void selectAll() { - same( - query("SELECT * FROM team t, t.employees"), - query("SELECT *, nested(employees.*, 'employees') FROM team") - ); - } - - @Test - public void selectAllWithGroupBy() { - same( - query("SELECT * FROM team t, t.employees e GROUP BY e.firstname"), - query("SELECT * FROM team GROUP BY nested(employees.firstname, 'employees')") - ); - } - - @Test - public void selectAllWithCondition() { - same( - query("SELECT * FROM team t, t.employees e WHERE e.age = 26"), - query("SELECT *, nested(employees.*, 'employees') FROM team WHERE nested(employees.age, 'employees') = 26") - ); - } - - @Test - public void singleCondition() { - same( - query("SELECT region FROM team t, t.employees e WHERE e.age = 26"), - query("SELECT region FROM team WHERE nested(employees.age, 'employees') = 26") - ); - } - - @Test - public void mixedWithObjectType() { - same( - query("SELECT region FROM team t, t.employees e WHERE e.age > 30 OR manager.age = 50"), - query("SELECT region FROM team WHERE nested(employees.age, 'employees') > 30 OR manager.age = 50") - ); - } - - @Test - public void noAlias() { - same( - query("SELECT region FROM team t, t.employees WHERE employees.age = 26"), - query("SELECT region FROM team WHERE nested(employees.age, 'employees') = 26") - ); - } - - @Test(expected = AssertionError.class) - public void multipleRegularTables() { - same( - query("SELECT region FROM team t, t.employees e, company c WHERE e.age = 26"), - query("SELECT region FROM team, company WHERE nested(employees.age) = 26") - ); - } - - @Test - public void eraseParentAlias() { - same( - query("SELECT t.age FROM team t, t.employees e WHERE t.region = 'US' AND age > 26"), - query("SELECT age FROM team WHERE region = 'US' AND age > 26") - ); - noImpact("SELECT t.age FROM team t WHERE t.region = 'US'"); - } - - @Test - public void select() { - same( - query("SELECT e.age FROM team t, t.employees e"), - query("SELECT nested(employees.age, 'employees' ) FROM team") - ); - } - - @Test - public void aggregationInSelect() { - same( - query("SELECT AVG(e.age) FROM team t, t.employees e"), - query("SELECT AVG(nested(employees.age, 'employees')) FROM team") - ); - } - - @Test - public void multipleAggregationsInSelect() { - same( - query("SELECT COUNT(*), AVG(e.age) FROM team t, t.employees e"), - query("SELECT COUNT(*), AVG(nested(employees.age, 'employees')) FROM team") - ); - } - - @Test - public void groupBy() { - same( - query("SELECT e.firstname, COUNT(*) FROM team t, t.employees e GROUP BY e.firstname"), - query("SELECT nested(employees.firstname, 'employees'), COUNT(*) FROM team GROUP BY nested(employees.firstname, 'employees')") - ); - } - - @Test - public void multipleFieldsInGroupBy() { - same( - query("SELECT COUNT(*) FROM team t, t.employees e GROUP BY t.manager, e.age"), - query("SELECT COUNT(*) FROM team GROUP BY manager, nested(employees.age, 'employees')") - ); - } - - @Test - public void orderBy() { - same( - query("SELECT region FROM team t, t.employees e ORDER BY e.age"), - query("SELECT region FROM team ORDER BY nested(employees.age)") - ); - } - - @Test - public void multipleConditions() { - same( - query("SELECT region " + - "FROM team t, t.manager m, t.employees e " + - "WHERE t.department = 'IT' AND " + - " (e.age = 26 OR (e.firstname = 'John' AND e.lastname = 'Smith')) AND " + - " t.region = 'US' AND " + - " (m.name = 'Alice' AND m.age = 50)"), - query("SELECT region " + - "FROM team " + - "WHERE department = 'IT' AND " + - " nested(\"employees\", employees.age = 26 OR (employees.firstname = 'John' AND employees.lastname = 'Smith')) AND " + - " region = 'US' AND " + - " nested(\"manager\", manager.name = 'Alice' AND manager.age = 50)") - ); - } - - @Test - public void multipleFieldsInFrom() { - same( - query("SELECT region FROM team/test t, t.manager m, t.employees e WHERE m.age = 30 AND e.age = 26"), - query("SELECT region FROM team/test WHERE nested(manager.age, 'manager') = 30 " + - "AND nested(employees.age, 'employees') = 26") - ); - } - - @Test - public void unionAll() { - // NLPchina doesn't support UNION (intersection) - same( - query("SELECT region FROM team t, t.employees e WHERE e.age = 26 " + - "UNION ALL " + - "SELECT region FROM team t, t.employees e WHERE e.firstname = 'John'"), - query("SELECT region FROM team WHERE nested(employees.age, 'employees') = 26 " + - "UNION ALL " + - "SELECT region FROM team WHERE nested(employees.firstname, 'employees') = 'John'") - ); - } - - @Test - public void minus() { - same( - query("SELECT region FROM team t, t.employees e WHERE e.age = 26 " + - "MINUS " + - "SELECT region FROM team t, t.employees e WHERE e.firstname = 'John'"), - query("SELECT region FROM team WHERE nested(employees.age, 'employees') = 26 " + - "MINUS " + - "SELECT region FROM team WHERE nested(employees.firstname, 'employees') = 'John'") - ); - } - - public void join() { - // TODO - } - - @Test - public void subQuery() { - // Subquery only support IN and TERMS - same( - query("SELECT region FROM team t, t.employees e " + - " WHERE e.age IN " + - " (SELECT t1.manager.age FROM team t1, t1.employees e1 WHERE e1.age > 0)"), - query("SELECT region FROM team " + - " WHERE nested(employees.age, 'employees') IN " + - " (SELECT manager.age FROM team WHERE nested(employees.age, 'employees') > 0)") - ); - } - - @Test - public void subQueryWitSameAlias() { - // Inner alias e shadow outer alias e of nested field - same( - query("SELECT name FROM team t, t.employees e " + - " WHERE e.age IN " + - " (SELECT e.age FROM team e, e.manager m WHERE e.age > 0 OR m.name = 'Alice')"), - query("SELECT name FROM team " + - " WHERE nested(employees.age, 'employees') IN " + - " (SELECT age FROM team WHERE age > 0 OR nested(manager.name, 'manager') = 'Alice')") - ); - } - - @Test - public void isNotNull() { - same( - query("SELECT e.name " + - "FROM employee as e, e.projects as p " + - "WHERE p IS NOT MISSING"), - query("SELECT name " + - "FROM employee " + - "WHERE nested(projects, 'projects') IS NOT MISSING") - ); - } - - @Test - public void isNotNullAndCondition() { - same( - query("SELECT e.name " + - "FROM employee as e, e.projects as p " + - "WHERE p IS NOT MISSING AND p.name LIKE 'security'"), - query("SELECT name " + - "FROM employee " + - "WHERE nested('projects', projects IS NOT MISSING AND projects.name LIKE 'security')") - ); - } - - @Test - public void multiCondition() { - same( - query("SELECT e.name FROM employee as e, e.projects as p WHERE p.year = 2016 and p.name LIKE 'security'"), - query("SELECT name FROM employee WHERE nested('projects', projects.year = 2016 AND projects.name LIKE 'security')") - ); - } - - @Test - public void nestedAndParentCondition() { - same( - query("SELECT name " + - "FROM employee " + - "WHERE nested(projects, 'projects') IS NOT MISSING AND name LIKE 'security'"), - query("SELECT e.name " + - "FROM employee e, e.projects p " + - "WHERE p IS NOT MISSING AND e.name LIKE 'security'") - ); - } - - @Test - public void aggWithWhereOnParent() { - same( - query("SELECT e.name, COUNT(p) as c " + - "FROM employee AS e, e.projects AS p " + - "WHERE e.name like '%smith%' " + - "GROUP BY e.name " + - "HAVING c > 1"), - query("SELECT name, COUNT(nested(projects, 'projects')) AS c " + - "FROM employee " + - "WHERE name LIKE '%smith%' " + - "GROUP BY name " + - "HAVING c > 1") - ); - - } - - @Test - public void aggWithWhereOnNested() { - same( - query("SELECT e.name, COUNT(p) as c " + - "FROM employee AS e, e.projects AS p " + - "WHERE p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING c > 1"), - query("SELECT name, COUNT(nested(projects, 'projects')) AS c " + - "FROM employee " + - "WHERE nested(projects.name, 'projects') LIKE '%security%' " + - "GROUP BY name " + - "HAVING c > 1") - ); - } - - @Test - public void aggWithWhereOnParentOrNested() { - same( - query("SELECT e.name, COUNT(p) as c " + - "FROM employee AS e, e.projects AS p " + - "WHERE e.name like '%smith%' or p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING c > 1"), - query("SELECT name, COUNT(nested(projects, 'projects')) AS c " + - "FROM employee " + - "WHERE name LIKE '%smith%' OR nested(projects.name, 'projects') LIKE '%security%' " + - "GROUP BY name " + - "HAVING c > 1") - ); - } - - @Test - public void aggWithWhereOnParentAndNested() { - same( - query("SELECT e.name, COUNT(p) as c " + - "FROM employee AS e, e.projects AS p " + - "WHERE e.name like '%smith%' AND p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING c > 1"), - query("SELECT name, COUNT(nested(projects, 'projects')) AS c " + - "FROM employee " + - "WHERE name LIKE '%smith%' AND nested(projects.name, 'projects') LIKE '%security%' " + - "GROUP BY name " + - "HAVING c > 1") - ); - } - - @Test - public void aggWithWhereOnNestedAndNested() { - same( - query("SELECT e.name, COUNT(p) as c " + - "FROM employee AS e, e.projects AS p " + - "WHERE p.started_year > 1990 AND p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING c > 1"), - query("SELECT name, COUNT(nested(projects, 'projects')) AS c " + - "FROM employee " + - "WHERE nested('projects', projects.started_year > 1990 AND projects.name LIKE '%security%') " + - "GROUP BY name " + - "HAVING c > 1") - ); - } - - @Test - public void aggWithWhereOnNestedOrNested() { - same( - query("SELECT e.name, COUNT(p) as c " + - "FROM employee AS e, e.projects AS p " + - "WHERE p.started_year > 1990 OR p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING c > 1"), - query("SELECT name, COUNT(nested(projects, 'projects')) AS c " + - "FROM employee " + - "WHERE nested('projects', projects.started_year > 1990 OR projects.name LIKE '%security%') " + - "GROUP BY name " + - "HAVING c > 1") - ); - } - - @Test - public void aggInHavingWithWhereOnParent() { - same( - query("SELECT e.name " + - "FROM employee AS e, e.projects AS p " + - "WHERE e.name like '%smith%' " + - "GROUP BY e.name " + - "HAVING COUNT(p) > 1"), - query("SELECT name " + - "FROM employee " + - "WHERE name LIKE '%smith%' " + - "GROUP BY name " + - "HAVING COUNT(nested(projects, 'projects')) > 1") - ); - - } - - @Test - public void aggInHavingWithWhereOnNested() { - same( - query("SELECT e.name " + - "FROM employee AS e, e.projects AS p " + - "WHERE p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING COUNT(p) > 1"), - query("SELECT name " + - "FROM employee " + - "WHERE nested(projects.name, 'projects') LIKE '%security%' " + - "GROUP BY name " + - "HAVING COUNT(nested(projects, 'projects')) > 1") - ); - } - - @Test - public void aggInHavingWithWhereOnParentOrNested() { - same( - query("SELECT e.name " + - "FROM employee AS e, e.projects AS p " + - "WHERE e.name like '%smith%' or p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING COUNT(p) > 1"), - query("SELECT name " + - "FROM employee " + - "WHERE name LIKE '%smith%' OR nested(projects.name, 'projects') LIKE '%security%' " + - "GROUP BY name " + - "HAVING COUNT(nested(projects, 'projects')) > 1") - ); - } - - @Test - public void aggInHavingWithWhereOnParentAndNested() { - same( - query("SELECT e.name " + - "FROM employee AS e, e.projects AS p " + - "WHERE e.name like '%smith%' AND p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING COUNT(p) > 1"), - query("SELECT name " + - "FROM employee " + - "WHERE name LIKE '%smith%' AND nested(projects.name, 'projects') LIKE '%security%' " + - "GROUP BY name " + - "HAVING COUNT(nested(projects, 'projects')) > 1") - ); - } - - @Test - public void aggInHavingWithWhereOnNestedAndNested() { - same( - query("SELECT e.name " + - "FROM employee AS e, e.projects AS p " + - "WHERE p.started_year > 1990 AND p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING COUNT(p) > 1"), - query("SELECT name " + - "FROM employee " + - "WHERE nested('projects', projects.started_year > 1990 AND projects.name LIKE '%security%') " + - "GROUP BY name " + - "HAVING COUNT(nested(projects, 'projects')) > 1") - ); - } - - @Test - public void aggInHavingWithWhereOnNestedOrNested() { - same( - query("SELECT e.name " + - "FROM employee AS e, e.projects AS p " + - "WHERE p.started_year > 1990 OR p.name LIKE '%security%' " + - "GROUP BY e.name " + - "HAVING COUNT(p) > 1"), - query("SELECT name " + - "FROM employee " + - "WHERE nested('projects', projects.started_year > 1990 OR projects.name LIKE '%security%') " + - "GROUP BY name " + - "HAVING COUNT(nested(projects, 'projects')) > 1") - ); - } - - @Test - public void notIsNotNull() { - same( - query("SELECT name " + - "FROM employee " + - "WHERE not (nested(projects, 'projects') IS NOT MISSING)"), - query("SELECT e.name " + - "FROM employee as e, e.projects as p " + - "WHERE not (p IS NOT MISSING)") - ); - } - - @Test - public void notIsNotNullAndCondition() { - same( - query("SELECT e.name " + - "FROM employee as e, e.projects as p " + - "WHERE not (p IS NOT MISSING AND p.name LIKE 'security')"), - query("SELECT name " + - "FROM employee " + - "WHERE not nested('projects', projects IS NOT MISSING AND projects.name LIKE 'security')") - ); - } - - @Test - public void notMultiCondition() { - same( - query("SELECT name " + - "FROM employee " + - "WHERE not nested('projects', projects.year = 2016 AND projects.name LIKE 'security')"), - query("SELECT e.name " + - "FROM employee as e, e.projects as p " + - "WHERE not (p.year = 2016 and p.name LIKE 'security')") - ); - } - - @Test - public void notNestedAndParentCondition() { - same( - query("SELECT name " + - "FROM employee " + - "WHERE (not nested(projects, 'projects') IS NOT MISSING) AND name LIKE 'security'"), - query("SELECT e.name " + - "FROM employee e, e.projects p " + - "WHERE not (p IS NOT MISSING) AND e.name LIKE 'security'") - ); - } - - private void noImpact(String sql) { - same(parse(sql), rewrite(parse(sql))); - } - - /** - * The intention for this assert method is: - * - * 1) MySqlSelectQueryBlock.equals() doesn't call super.equals(). - * But select items, from, where and group by are all held by parent class SQLSelectQueryBlock. - * - * 2) SQLSelectGroupByClause doesn't implement equals() at all.. - * MySqlSelectGroupByExpr compares identity of expression.. - * - * 3) MySqlUnionQuery doesn't implement equals() at all - */ - private void same(SQLQueryExpr actual, SQLQueryExpr expected) { - assertEquals(expected.getClass(), actual.getClass()); - - SQLSelect expectedQuery = expected.getSubQuery(); - SQLSelect actualQuery = actual.getSubQuery(); - assertEquals(expectedQuery.getOrderBy(), actualQuery.getOrderBy()); - assertQuery(expectedQuery, actualQuery); - } - - private void assertQuery(SQLSelect expected, SQLSelect actual) { - SQLSelectQuery expectedQuery = expected.getQuery(); - SQLSelectQuery actualQuery = actual.getQuery(); - if (actualQuery instanceof SQLSelectQueryBlock) { - assertQueryBlock( - (SQLSelectQueryBlock) expectedQuery, - (SQLSelectQueryBlock) actualQuery - ); - } - else if (actualQuery instanceof SQLUnionQuery) { - assertQueryBlock( - (SQLSelectQueryBlock) ((SQLUnionQuery) expectedQuery).getLeft(), - (SQLSelectQueryBlock) ((SQLUnionQuery) actualQuery).getLeft() - ); - assertQueryBlock( - (SQLSelectQueryBlock) ((SQLUnionQuery) expectedQuery).getRight(), - (SQLSelectQueryBlock) ((SQLUnionQuery) actualQuery).getRight() - ); - assertEquals( - ((SQLUnionQuery) expectedQuery).getOperator(), - ((SQLUnionQuery) actualQuery).getOperator() - ); - } - else { - throw new IllegalStateException("Unsupported test SQL"); - } - } - - private void assertQueryBlock(SQLSelectQueryBlock expected, SQLSelectQueryBlock actual) { - assertEquals("SELECT", expected.getSelectList(), actual.getSelectList()); - assertEquals("INTO", expected.getInto(), actual.getInto()); - assertEquals("WHERE", expected.getWhere(), actual.getWhere()); - if (actual.getWhere() instanceof SQLInSubQueryExpr) { - assertQuery( - ((SQLInSubQueryExpr) expected.getWhere()).getSubQuery(), - ((SQLInSubQueryExpr) actual.getWhere()).getSubQuery() - ); - } - assertEquals("PARENTHESIZED", expected.isParenthesized(), actual.isParenthesized()); - assertEquals("DISTION", expected.getDistionOption(), actual.getDistionOption()); - assertFrom(expected, actual); - if (!(expected.getGroupBy() == null && actual.getGroupBy() == null)) { - assertGroupBy(expected.getGroupBy(), actual.getGroupBy()); - } - } - - private void assertFrom(SQLSelectQueryBlock expected, SQLSelectQueryBlock actual) { - // Only 2 tables JOIN at most is supported - if (expected.getFrom() instanceof SQLExprTableSource) { - assertTable(expected.getFrom(), actual.getFrom()); - } else { - assertEquals(actual.getFrom().getClass(), SQLJoinTableSource.class); - assertTable( - ((SQLJoinTableSource) expected.getFrom()).getLeft(), - ((SQLJoinTableSource) actual.getFrom()).getLeft() - ); - assertTable( - ((SQLJoinTableSource) expected.getFrom()).getRight(), - ((SQLJoinTableSource) actual.getFrom()).getRight() - ); - assertEquals( - ((SQLJoinTableSource) expected.getFrom()).getJoinType(), - ((SQLJoinTableSource) actual.getFrom()).getJoinType() - ); - } - } - - private void assertGroupBy(SQLSelectGroupByClause expected, SQLSelectGroupByClause actual) { - assertEquals("HAVING", expected.getHaving(), actual.getHaving()); - - List expectedGroupby = expected.getItems(); - List actualGroupby = actual.getItems(); - assertEquals(expectedGroupby.size(), actualGroupby.size()); - range(0, expectedGroupby.size()). - forEach(i -> assertEquals( - ((MySqlSelectGroupByExpr) expectedGroupby.get(i)).getExpr(), - ((MySqlSelectGroupByExpr) actualGroupby.get(i)).getExpr()) - ); - } - - private void assertTable(SQLTableSource expect, SQLTableSource actual) { - assertEquals(SQLExprTableSource.class, expect.getClass()); - assertEquals(SQLExprTableSource.class, actual.getClass()); - assertEquals(((SQLExprTableSource) expect).getExpr(), ((SQLExprTableSource) actual).getExpr()); - assertEquals(expect.getAlias(), actual.getAlias()); - } - - /** - * Walk through extra rewrite logic if NOT found "nested" in SQL query statement. - * Otherwise return as before so that original logic be compared with result of rewrite. - * - * @param sql Test sql - * @return Node parsed out of sql - */ - private SQLQueryExpr query(String sql) { - SQLQueryExpr expr = SqlParserUtils.parse(sql); - if (sql.contains("nested")) { - return expr; - } - return rewrite(expr); - } - - private SQLQueryExpr rewrite(SQLQueryExpr expr) { - expr.accept(new NestedFieldRewriter()); - return expr; - } - + @Test + public void regression() { + noImpact("SELECT * FROM team"); + noImpact("SELECT region FROM team/test, employees/test"); + noImpact("SELECT manager.name FROM team WHERE region = 'US' ORDER BY COUNT(*)"); + noImpact("SELECT COUNT(*) FROM team GROUP BY region"); + } + + @Test + public void selectWithoutFrom() { + // Expect no exception thrown + query("SELECT now()"); + } + + @Test + public void selectAll() { + same( + query("SELECT * FROM team t, t.employees"), + query("SELECT *, nested(employees.*, 'employees') FROM team")); + } + + @Test + public void selectAllWithGroupBy() { + same( + query("SELECT * FROM team t, t.employees e GROUP BY e.firstname"), + query("SELECT * FROM team GROUP BY nested(employees.firstname, 'employees')")); + } + + @Test + public void selectAllWithCondition() { + same( + query("SELECT * FROM team t, t.employees e WHERE e.age = 26"), + query( + "SELECT *, nested(employees.*, 'employees') FROM team WHERE nested(employees.age," + + " 'employees') = 26")); + } + + @Test + public void singleCondition() { + same( + query("SELECT region FROM team t, t.employees e WHERE e.age = 26"), + query("SELECT region FROM team WHERE nested(employees.age, 'employees') = 26")); + } + + @Test + public void mixedWithObjectType() { + same( + query("SELECT region FROM team t, t.employees e WHERE e.age > 30 OR manager.age = 50"), + query( + "SELECT region FROM team WHERE nested(employees.age, 'employees') > 30 OR manager.age =" + + " 50")); + } + + @Test + public void noAlias() { + same( + query("SELECT region FROM team t, t.employees WHERE employees.age = 26"), + query("SELECT region FROM team WHERE nested(employees.age, 'employees') = 26")); + } + + @Test(expected = AssertionError.class) + public void multipleRegularTables() { + same( + query("SELECT region FROM team t, t.employees e, company c WHERE e.age = 26"), + query("SELECT region FROM team, company WHERE nested(employees.age) = 26")); + } + + @Test + public void eraseParentAlias() { + same( + query("SELECT t.age FROM team t, t.employees e WHERE t.region = 'US' AND age > 26"), + query("SELECT age FROM team WHERE region = 'US' AND age > 26")); + noImpact("SELECT t.age FROM team t WHERE t.region = 'US'"); + } + + @Test + public void select() { + same( + query("SELECT e.age FROM team t, t.employees e"), + query("SELECT nested(employees.age, 'employees' ) FROM team")); + } + + @Test + public void aggregationInSelect() { + same( + query("SELECT AVG(e.age) FROM team t, t.employees e"), + query("SELECT AVG(nested(employees.age, 'employees')) FROM team")); + } + + @Test + public void multipleAggregationsInSelect() { + same( + query("SELECT COUNT(*), AVG(e.age) FROM team t, t.employees e"), + query("SELECT COUNT(*), AVG(nested(employees.age, 'employees')) FROM team")); + } + + @Test + public void groupBy() { + same( + query("SELECT e.firstname, COUNT(*) FROM team t, t.employees e GROUP BY e.firstname"), + query( + "SELECT nested(employees.firstname, 'employees'), COUNT(*) FROM team GROUP BY" + + " nested(employees.firstname, 'employees')")); + } + + @Test + public void multipleFieldsInGroupBy() { + same( + query("SELECT COUNT(*) FROM team t, t.employees e GROUP BY t.manager, e.age"), + query("SELECT COUNT(*) FROM team GROUP BY manager, nested(employees.age, 'employees')")); + } + + @Test + public void orderBy() { + same( + query("SELECT region FROM team t, t.employees e ORDER BY e.age"), + query("SELECT region FROM team ORDER BY nested(employees.age)")); + } + + @Test + public void multipleConditions() { + same( + query( + "SELECT region " + + "FROM team t, t.manager m, t.employees e " + + "WHERE t.department = 'IT' AND " + + " (e.age = 26 OR (e.firstname = 'John' AND e.lastname = 'Smith')) AND " + + " t.region = 'US' AND " + + " (m.name = 'Alice' AND m.age = 50)"), + query( + "SELECT region FROM team WHERE department = 'IT' AND nested(\"employees\"," + + " employees.age = 26 OR (employees.firstname = 'John' AND employees.lastname =" + + " 'Smith')) AND region = 'US' AND nested(\"manager\", manager.name =" + + " 'Alice' AND manager.age = 50)")); + } + + @Test + public void multipleFieldsInFrom() { + same( + query( + "SELECT region FROM team/test t, t.manager m, t.employees e WHERE m.age = 30 AND e.age" + + " = 26"), + query( + "SELECT region FROM team/test WHERE nested(manager.age, 'manager') = 30 " + + "AND nested(employees.age, 'employees') = 26")); + } + + @Test + public void unionAll() { + // NLPchina doesn't support UNION (intersection) + same( + query( + "SELECT region FROM team t, t.employees e WHERE e.age = 26 " + + "UNION ALL " + + "SELECT region FROM team t, t.employees e WHERE e.firstname = 'John'"), + query( + "SELECT region FROM team WHERE nested(employees.age, 'employees') = 26 UNION ALL SELECT" + + " region FROM team WHERE nested(employees.firstname, 'employees') = 'John'")); + } + + @Test + public void minus() { + same( + query( + "SELECT region FROM team t, t.employees e WHERE e.age = 26 " + + "MINUS " + + "SELECT region FROM team t, t.employees e WHERE e.firstname = 'John'"), + query( + "SELECT region FROM team WHERE nested(employees.age, 'employees') = 26 MINUS SELECT" + + " region FROM team WHERE nested(employees.firstname, 'employees') = 'John'")); + } + + public void join() { + // TODO + } + + @Test + public void subQuery() { + // Subquery only support IN and TERMS + same( + query( + "SELECT region FROM team t, t.employees e " + + " WHERE e.age IN " + + " (SELECT t1.manager.age FROM team t1, t1.employees e1 WHERE e1.age > 0)"), + query( + "SELECT region FROM team WHERE nested(employees.age, 'employees') IN (SELECT" + + " manager.age FROM team WHERE nested(employees.age, 'employees') > 0)")); + } + + @Test + public void subQueryWitSameAlias() { + // Inner alias e shadow outer alias e of nested field + same( + query( + "SELECT name FROM team t, t.employees e WHERE e.age IN (SELECT e.age FROM team e," + + " e.manager m WHERE e.age > 0 OR m.name = 'Alice')"), + query( + "SELECT name FROM team WHERE nested(employees.age, 'employees') IN (SELECT age" + + " FROM team WHERE age > 0 OR nested(manager.name, 'manager') = 'Alice')")); + } + + @Test + public void isNotNull() { + same( + query("SELECT e.name " + "FROM employee as e, e.projects as p " + "WHERE p IS NOT MISSING"), + query( + "SELECT name " + + "FROM employee " + + "WHERE nested(projects, 'projects') IS NOT MISSING")); + } + + @Test + public void isNotNullAndCondition() { + same( + query( + "SELECT e.name " + + "FROM employee as e, e.projects as p " + + "WHERE p IS NOT MISSING AND p.name LIKE 'security'"), + query( + "SELECT name FROM employee WHERE nested('projects', projects IS NOT MISSING AND" + + " projects.name LIKE 'security')")); + } + + @Test + public void multiCondition() { + same( + query( + "SELECT e.name FROM employee as e, e.projects as p WHERE p.year = 2016 and p.name LIKE" + + " 'security'"), + query( + "SELECT name FROM employee WHERE nested('projects', projects.year = 2016 AND" + + " projects.name LIKE 'security')")); + } + + @Test + public void nestedAndParentCondition() { + same( + query( + "SELECT name " + + "FROM employee " + + "WHERE nested(projects, 'projects') IS NOT MISSING AND name LIKE 'security'"), + query( + "SELECT e.name " + + "FROM employee e, e.projects p " + + "WHERE p IS NOT MISSING AND e.name LIKE 'security'")); + } + + @Test + public void aggWithWhereOnParent() { + same( + query( + "SELECT e.name, COUNT(p) as c " + + "FROM employee AS e, e.projects AS p " + + "WHERE e.name like '%smith%' " + + "GROUP BY e.name " + + "HAVING c > 1"), + query( + "SELECT name, COUNT(nested(projects, 'projects')) AS c " + + "FROM employee " + + "WHERE name LIKE '%smith%' " + + "GROUP BY name " + + "HAVING c > 1")); + } + + @Test + public void aggWithWhereOnNested() { + same( + query( + "SELECT e.name, COUNT(p) as c " + + "FROM employee AS e, e.projects AS p " + + "WHERE p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING c > 1"), + query( + "SELECT name, COUNT(nested(projects, 'projects')) AS c " + + "FROM employee " + + "WHERE nested(projects.name, 'projects') LIKE '%security%' " + + "GROUP BY name " + + "HAVING c > 1")); + } + + @Test + public void aggWithWhereOnParentOrNested() { + same( + query( + "SELECT e.name, COUNT(p) as c " + + "FROM employee AS e, e.projects AS p " + + "WHERE e.name like '%smith%' or p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING c > 1"), + query( + "SELECT name, COUNT(nested(projects, 'projects')) AS c FROM employee WHERE name LIKE" + + " '%smith%' OR nested(projects.name, 'projects') LIKE '%security%' GROUP BY name" + + " HAVING c > 1")); + } + + @Test + public void aggWithWhereOnParentAndNested() { + same( + query( + "SELECT e.name, COUNT(p) as c " + + "FROM employee AS e, e.projects AS p " + + "WHERE e.name like '%smith%' AND p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING c > 1"), + query( + "SELECT name, COUNT(nested(projects, 'projects')) AS c FROM employee WHERE name LIKE" + + " '%smith%' AND nested(projects.name, 'projects') LIKE '%security%' GROUP BY name" + + " HAVING c > 1")); + } + + @Test + public void aggWithWhereOnNestedAndNested() { + same( + query( + "SELECT e.name, COUNT(p) as c " + + "FROM employee AS e, e.projects AS p " + + "WHERE p.started_year > 1990 AND p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING c > 1"), + query( + "SELECT name, COUNT(nested(projects, 'projects')) AS c FROM employee WHERE" + + " nested('projects', projects.started_year > 1990 AND projects.name LIKE" + + " '%security%') GROUP BY name HAVING c > 1")); + } + + @Test + public void aggWithWhereOnNestedOrNested() { + same( + query( + "SELECT e.name, COUNT(p) as c " + + "FROM employee AS e, e.projects AS p " + + "WHERE p.started_year > 1990 OR p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING c > 1"), + query( + "SELECT name, COUNT(nested(projects, 'projects')) AS c FROM employee WHERE" + + " nested('projects', projects.started_year > 1990 OR projects.name LIKE" + + " '%security%') GROUP BY name HAVING c > 1")); + } + + @Test + public void aggInHavingWithWhereOnParent() { + same( + query( + "SELECT e.name " + + "FROM employee AS e, e.projects AS p " + + "WHERE e.name like '%smith%' " + + "GROUP BY e.name " + + "HAVING COUNT(p) > 1"), + query( + "SELECT name " + + "FROM employee " + + "WHERE name LIKE '%smith%' " + + "GROUP BY name " + + "HAVING COUNT(nested(projects, 'projects')) > 1")); + } + + @Test + public void aggInHavingWithWhereOnNested() { + same( + query( + "SELECT e.name " + + "FROM employee AS e, e.projects AS p " + + "WHERE p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING COUNT(p) > 1"), + query( + "SELECT name " + + "FROM employee " + + "WHERE nested(projects.name, 'projects') LIKE '%security%' " + + "GROUP BY name " + + "HAVING COUNT(nested(projects, 'projects')) > 1")); + } + + @Test + public void aggInHavingWithWhereOnParentOrNested() { + same( + query( + "SELECT e.name " + + "FROM employee AS e, e.projects AS p " + + "WHERE e.name like '%smith%' or p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING COUNT(p) > 1"), + query( + "SELECT name FROM employee WHERE name LIKE '%smith%' OR nested(projects.name," + + " 'projects') LIKE '%security%' GROUP BY name HAVING COUNT(nested(projects," + + " 'projects')) > 1")); + } + + @Test + public void aggInHavingWithWhereOnParentAndNested() { + same( + query( + "SELECT e.name " + + "FROM employee AS e, e.projects AS p " + + "WHERE e.name like '%smith%' AND p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING COUNT(p) > 1"), + query( + "SELECT name FROM employee WHERE name LIKE '%smith%' AND nested(projects.name," + + " 'projects') LIKE '%security%' GROUP BY name HAVING COUNT(nested(projects," + + " 'projects')) > 1")); + } + + @Test + public void aggInHavingWithWhereOnNestedAndNested() { + same( + query( + "SELECT e.name " + + "FROM employee AS e, e.projects AS p " + + "WHERE p.started_year > 1990 AND p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING COUNT(p) > 1"), + query( + "SELECT name FROM employee WHERE nested('projects', projects.started_year > 1990 AND" + + " projects.name LIKE '%security%') GROUP BY name HAVING COUNT(nested(projects," + + " 'projects')) > 1")); + } + + @Test + public void aggInHavingWithWhereOnNestedOrNested() { + same( + query( + "SELECT e.name " + + "FROM employee AS e, e.projects AS p " + + "WHERE p.started_year > 1990 OR p.name LIKE '%security%' " + + "GROUP BY e.name " + + "HAVING COUNT(p) > 1"), + query( + "SELECT name FROM employee WHERE nested('projects', projects.started_year > 1990 OR" + + " projects.name LIKE '%security%') GROUP BY name HAVING COUNT(nested(projects," + + " 'projects')) > 1")); + } + + @Test + public void notIsNotNull() { + same( + query( + "SELECT name " + + "FROM employee " + + "WHERE not (nested(projects, 'projects') IS NOT MISSING)"), + query( + "SELECT e.name " + + "FROM employee as e, e.projects as p " + + "WHERE not (p IS NOT MISSING)")); + } + + @Test + public void notIsNotNullAndCondition() { + same( + query( + "SELECT e.name " + + "FROM employee as e, e.projects as p " + + "WHERE not (p IS NOT MISSING AND p.name LIKE 'security')"), + query( + "SELECT name FROM employee WHERE not nested('projects', projects IS NOT MISSING AND" + + " projects.name LIKE 'security')")); + } + + @Test + public void notMultiCondition() { + same( + query( + "SELECT name FROM employee WHERE not nested('projects', projects.year = 2016 AND" + + " projects.name LIKE 'security')"), + query( + "SELECT e.name " + + "FROM employee as e, e.projects as p " + + "WHERE not (p.year = 2016 and p.name LIKE 'security')")); + } + + @Test + public void notNestedAndParentCondition() { + same( + query( + "SELECT name FROM employee WHERE (not nested(projects, 'projects') IS NOT MISSING) AND" + + " name LIKE 'security'"), + query( + "SELECT e.name " + + "FROM employee e, e.projects p " + + "WHERE not (p IS NOT MISSING) AND e.name LIKE 'security'")); + } + + private void noImpact(String sql) { + same(parse(sql), rewrite(parse(sql))); + } + + /** + * The intention for this assert method is: + * + *

1) MySqlSelectQueryBlock.equals() doesn't call super.equals(). But select items, from, where + * and group by are all held by parent class SQLSelectQueryBlock. + * + *

2) SQLSelectGroupByClause doesn't implement equals() at all.. MySqlSelectGroupByExpr + * compares identity of expression.. + * + *

3) MySqlUnionQuery doesn't implement equals() at all + */ + private void same(SQLQueryExpr actual, SQLQueryExpr expected) { + assertEquals(expected.getClass(), actual.getClass()); + + SQLSelect expectedQuery = expected.getSubQuery(); + SQLSelect actualQuery = actual.getSubQuery(); + assertEquals(expectedQuery.getOrderBy(), actualQuery.getOrderBy()); + assertQuery(expectedQuery, actualQuery); + } + + private void assertQuery(SQLSelect expected, SQLSelect actual) { + SQLSelectQuery expectedQuery = expected.getQuery(); + SQLSelectQuery actualQuery = actual.getQuery(); + if (actualQuery instanceof SQLSelectQueryBlock) { + assertQueryBlock((SQLSelectQueryBlock) expectedQuery, (SQLSelectQueryBlock) actualQuery); + } else if (actualQuery instanceof SQLUnionQuery) { + assertQueryBlock( + (SQLSelectQueryBlock) ((SQLUnionQuery) expectedQuery).getLeft(), + (SQLSelectQueryBlock) ((SQLUnionQuery) actualQuery).getLeft()); + assertQueryBlock( + (SQLSelectQueryBlock) ((SQLUnionQuery) expectedQuery).getRight(), + (SQLSelectQueryBlock) ((SQLUnionQuery) actualQuery).getRight()); + assertEquals( + ((SQLUnionQuery) expectedQuery).getOperator(), + ((SQLUnionQuery) actualQuery).getOperator()); + } else { + throw new IllegalStateException("Unsupported test SQL"); + } + } + + private void assertQueryBlock(SQLSelectQueryBlock expected, SQLSelectQueryBlock actual) { + assertEquals("SELECT", expected.getSelectList(), actual.getSelectList()); + assertEquals("INTO", expected.getInto(), actual.getInto()); + assertEquals("WHERE", expected.getWhere(), actual.getWhere()); + if (actual.getWhere() instanceof SQLInSubQueryExpr) { + assertQuery( + ((SQLInSubQueryExpr) expected.getWhere()).getSubQuery(), + ((SQLInSubQueryExpr) actual.getWhere()).getSubQuery()); + } + assertEquals("PARENTHESIZED", expected.isParenthesized(), actual.isParenthesized()); + assertEquals("DISTION", expected.getDistionOption(), actual.getDistionOption()); + assertFrom(expected, actual); + if (!(expected.getGroupBy() == null && actual.getGroupBy() == null)) { + assertGroupBy(expected.getGroupBy(), actual.getGroupBy()); + } + } + + private void assertFrom(SQLSelectQueryBlock expected, SQLSelectQueryBlock actual) { + // Only 2 tables JOIN at most is supported + if (expected.getFrom() instanceof SQLExprTableSource) { + assertTable(expected.getFrom(), actual.getFrom()); + } else { + assertEquals(actual.getFrom().getClass(), SQLJoinTableSource.class); + assertTable( + ((SQLJoinTableSource) expected.getFrom()).getLeft(), + ((SQLJoinTableSource) actual.getFrom()).getLeft()); + assertTable( + ((SQLJoinTableSource) expected.getFrom()).getRight(), + ((SQLJoinTableSource) actual.getFrom()).getRight()); + assertEquals( + ((SQLJoinTableSource) expected.getFrom()).getJoinType(), + ((SQLJoinTableSource) actual.getFrom()).getJoinType()); + } + } + + private void assertGroupBy(SQLSelectGroupByClause expected, SQLSelectGroupByClause actual) { + assertEquals("HAVING", expected.getHaving(), actual.getHaving()); + + List expectedGroupby = expected.getItems(); + List actualGroupby = actual.getItems(); + assertEquals(expectedGroupby.size(), actualGroupby.size()); + range(0, expectedGroupby.size()) + .forEach( + i -> + assertEquals( + ((MySqlSelectGroupByExpr) expectedGroupby.get(i)).getExpr(), + ((MySqlSelectGroupByExpr) actualGroupby.get(i)).getExpr())); + } + + private void assertTable(SQLTableSource expect, SQLTableSource actual) { + assertEquals(SQLExprTableSource.class, expect.getClass()); + assertEquals(SQLExprTableSource.class, actual.getClass()); + assertEquals(((SQLExprTableSource) expect).getExpr(), ((SQLExprTableSource) actual).getExpr()); + assertEquals(expect.getAlias(), actual.getAlias()); + } + + /** + * Walk through extra rewrite logic if NOT found "nested" in SQL query statement. Otherwise return + * as before so that original logic be compared with result of rewrite. + * + * @param sql Test sql + * @return Node parsed out of sql + */ + private SQLQueryExpr query(String sql) { + SQLQueryExpr expr = SqlParserUtils.parse(sql); + if (sql.contains("nested")) { + return expr; + } + return rewrite(expr); + } + + private SQLQueryExpr rewrite(SQLQueryExpr expr) { + expr.accept(new NestedFieldRewriter()); + return expr; + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/OpenSearchClientTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/OpenSearchClientTest.java index 2a654774d4..2dd5cc16ac 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/OpenSearchClientTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/OpenSearchClientTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import static org.mockito.Matchers.any; @@ -27,39 +26,42 @@ public class OpenSearchClientTest { - @Mock - protected Client client; + @Mock protected Client client; - @Before - public void init() { - MockitoAnnotations.initMocks(this); - ActionFuture mockFuture = mock(ActionFuture.class); - when(client.multiSearch(any())).thenReturn(mockFuture); + @Before + public void init() { + MockitoAnnotations.initMocks(this); + ActionFuture mockFuture = mock(ActionFuture.class); + when(client.multiSearch(any())).thenReturn(mockFuture); - MultiSearchResponse response = mock(MultiSearchResponse.class); - when(mockFuture.actionGet()).thenReturn(response); + MultiSearchResponse response = mock(MultiSearchResponse.class); + when(mockFuture.actionGet()).thenReturn(response); - MultiSearchResponse.Item item0 = new MultiSearchResponse.Item(mock(SearchResponse.class), null); - MultiSearchResponse.Item item1 = new MultiSearchResponse.Item(mock(SearchResponse.class), new Exception()); - MultiSearchResponse.Item[] itemsRetry0 = new MultiSearchResponse.Item[]{item0, item1}; - MultiSearchResponse.Item[] itemsRetry1 = new MultiSearchResponse.Item[]{item0}; - when(response.getResponses()).thenAnswer(new Answer() { - private int callCnt; + MultiSearchResponse.Item item0 = new MultiSearchResponse.Item(mock(SearchResponse.class), null); + MultiSearchResponse.Item item1 = + new MultiSearchResponse.Item(mock(SearchResponse.class), new Exception()); + MultiSearchResponse.Item[] itemsRetry0 = new MultiSearchResponse.Item[] {item0, item1}; + MultiSearchResponse.Item[] itemsRetry1 = new MultiSearchResponse.Item[] {item0}; + when(response.getResponses()) + .thenAnswer( + new Answer() { + private int callCnt; - @Override - public MultiSearchResponse.Item[] answer(InvocationOnMock invocation) { + @Override + public MultiSearchResponse.Item[] answer(InvocationOnMock invocation) { return callCnt++ == 0 ? itemsRetry0 : itemsRetry1; - } - }); - } - - @Test - public void multiSearchRetryOneTime() { - OpenSearchClient openSearchClient = new OpenSearchClient(client); - MultiSearchResponse.Item[] res = openSearchClient.multiSearch(new MultiSearchRequest().add(new SearchRequest()).add(new SearchRequest())); - Assert.assertEquals(res.length, 2); - Assert.assertFalse(res[0].isFailure()); - Assert.assertFalse(res[1].isFailure()); - } + } + }); + } + @Test + public void multiSearchRetryOneTime() { + OpenSearchClient openSearchClient = new OpenSearchClient(client); + MultiSearchResponse.Item[] res = + openSearchClient.multiSearch( + new MultiSearchRequest().add(new SearchRequest()).add(new SearchRequest())); + Assert.assertEquals(res.length, 2); + Assert.assertFalse(res[0].isFailure()); + Assert.assertFalse(res[1].isFailure()); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/PreparedStatementRequestTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/PreparedStatementRequestTest.java index 0b714ed41c..8a31c530e3 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/PreparedStatementRequestTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/PreparedStatementRequestTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import java.util.ArrayList; @@ -15,64 +14,68 @@ public class PreparedStatementRequestTest { - @Test - public void testSubstitute() { - String sqlTemplate = "select * from table_name where number_param > ? and string_param = 'Amazon.com' " + - "and test_str = '''test escape? \\'' and state in (?,?) and null_param = ? and double_param = ? " + - "and question_mark = '?'"; - List params = new ArrayList<>(); - params.add(new PreparedStatementRequest.PreparedStatementParameter(10)); - params.add(new PreparedStatementRequest.StringParameter("WA")); - params.add(new PreparedStatementRequest.StringParameter("")); - params.add(new PreparedStatementRequest.NullParameter()); - params.add(new PreparedStatementRequest.PreparedStatementParameter(2.0)); - PreparedStatementRequest psr = new PreparedStatementRequest(sqlTemplate, new JSONObject(), params); - String generatedSql = psr.getSql(); - - String expectedSql = "select * from table_name where number_param > 10 and string_param = 'Amazon.com' " + - "and test_str = '''test escape? \\'' and state in ('WA','') and null_param = null " + - "and double_param = 2.0 and question_mark = '?'"; - Assert.assertEquals(expectedSql, generatedSql); - } - - @Test - public void testStringParameter() { - PreparedStatementRequest.StringParameter param; - param = new PreparedStatementRequest.StringParameter("test string"); - Assert.assertEquals("'test string'", param.getSqlSubstitutionValue()); - - param = new PreparedStatementRequest.StringParameter("test ' single ' quote '"); - Assert.assertEquals("'test \\' single \\' quote \\''", param.getSqlSubstitutionValue()); - - param = new PreparedStatementRequest.StringParameter("test line \n break \n char"); - Assert.assertEquals("'test line \\n break \\n char'", param.getSqlSubstitutionValue()); - - param = new PreparedStatementRequest.StringParameter("test carriage \r return \r char"); - Assert.assertEquals("'test carriage \\r return \\r char'", param.getSqlSubstitutionValue()); - - param = new PreparedStatementRequest.StringParameter("test \\ backslash \\ char"); - Assert.assertEquals("'test \\\\ backslash \\\\ char'", param.getSqlSubstitutionValue()); - - param = new PreparedStatementRequest.StringParameter("test single ' quote ' char"); - Assert.assertEquals("'test single \\' quote \\' char'", param.getSqlSubstitutionValue()); - - param = new PreparedStatementRequest.StringParameter("test double \" quote \" char"); - Assert.assertEquals("'test double \\\" quote \\\" char'", param.getSqlSubstitutionValue()); - } - - @Test(expected = IllegalStateException.class) - public void testSubstitute_parameterNumberNotMatch() { - String sqlTemplate = "select * from table_name where param1 = ? and param2 = ?"; - List params = new ArrayList<>(); - params.add(new PreparedStatementRequest.StringParameter("value")); - - PreparedStatementRequest psr = new PreparedStatementRequest(sqlTemplate, new JSONObject(), params); - } - - @Test - public void testSubstitute_nullSql() { - PreparedStatementRequest psr = new PreparedStatementRequest(null, new JSONObject(), null); - - Assert.assertNull(psr.getSql()); - } + @Test + public void testSubstitute() { + String sqlTemplate = + "select * from table_name where number_param > ? and string_param = 'Amazon.com' and" + + " test_str = '''test escape? \\'' and state in (?,?) and null_param = ? and" + + " double_param = ? and question_mark = '?'"; + List params = new ArrayList<>(); + params.add(new PreparedStatementRequest.PreparedStatementParameter(10)); + params.add(new PreparedStatementRequest.StringParameter("WA")); + params.add(new PreparedStatementRequest.StringParameter("")); + params.add(new PreparedStatementRequest.NullParameter()); + params.add(new PreparedStatementRequest.PreparedStatementParameter(2.0)); + PreparedStatementRequest psr = + new PreparedStatementRequest(sqlTemplate, new JSONObject(), params); + String generatedSql = psr.getSql(); + + String expectedSql = + "select * from table_name where number_param > 10 and string_param = 'Amazon.com' " + + "and test_str = '''test escape? \\'' and state in ('WA','') and null_param = null " + + "and double_param = 2.0 and question_mark = '?'"; + Assert.assertEquals(expectedSql, generatedSql); + } + + @Test + public void testStringParameter() { + PreparedStatementRequest.StringParameter param; + param = new PreparedStatementRequest.StringParameter("test string"); + Assert.assertEquals("'test string'", param.getSqlSubstitutionValue()); + + param = new PreparedStatementRequest.StringParameter("test ' single ' quote '"); + Assert.assertEquals("'test \\' single \\' quote \\''", param.getSqlSubstitutionValue()); + + param = new PreparedStatementRequest.StringParameter("test line \n break \n char"); + Assert.assertEquals("'test line \\n break \\n char'", param.getSqlSubstitutionValue()); + + param = new PreparedStatementRequest.StringParameter("test carriage \r return \r char"); + Assert.assertEquals("'test carriage \\r return \\r char'", param.getSqlSubstitutionValue()); + + param = new PreparedStatementRequest.StringParameter("test \\ backslash \\ char"); + Assert.assertEquals("'test \\\\ backslash \\\\ char'", param.getSqlSubstitutionValue()); + + param = new PreparedStatementRequest.StringParameter("test single ' quote ' char"); + Assert.assertEquals("'test single \\' quote \\' char'", param.getSqlSubstitutionValue()); + + param = new PreparedStatementRequest.StringParameter("test double \" quote \" char"); + Assert.assertEquals("'test double \\\" quote \\\" char'", param.getSqlSubstitutionValue()); + } + + @Test(expected = IllegalStateException.class) + public void testSubstitute_parameterNumberNotMatch() { + String sqlTemplate = "select * from table_name where param1 = ? and param2 = ?"; + List params = new ArrayList<>(); + params.add(new PreparedStatementRequest.StringParameter("value")); + + PreparedStatementRequest psr = + new PreparedStatementRequest(sqlTemplate, new JSONObject(), params); + } + + @Test + public void testSubstitute_nullSql() { + PreparedStatementRequest psr = new PreparedStatementRequest(null, new JSONObject(), null); + + Assert.assertNull(psr.getSql()); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/QueryFunctionsTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/QueryFunctionsTest.java index 0ebf89e296..b5a82f6737 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/QueryFunctionsTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/QueryFunctionsTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import static org.hamcrest.MatcherAssert.assertThat; @@ -37,272 +36,178 @@ public class QueryFunctionsTest { - private static final String SELECT_ALL = "SELECT *"; - private static final String FROM_ACCOUNTS = "FROM " + TestsConstants.TEST_INDEX_ACCOUNT + "/account"; - private static final String FROM_NESTED = "FROM " + TestsConstants.TEST_INDEX_NESTED_TYPE + "/nestedType"; - private static final String FROM_PHRASE = "FROM " + TestsConstants.TEST_INDEX_PHRASE + "/phrase"; - - @Test - public void query() { - assertThat( - query( - FROM_ACCOUNTS, - "WHERE QUERY('CA')" - ), - contains( - queryStringQuery("CA") - ) - ); - } - - @Test - public void matchQueryRegularField() { - assertThat( - query( - FROM_ACCOUNTS, - "WHERE MATCH_QUERY(firstname, 'Ayers')" - ), - contains( - matchQuery("firstname", "Ayers") - ) - ); - } - - @Test - public void matchQueryNestedField() { - assertThat( - query( - FROM_NESTED, - "WHERE MATCH_QUERY(NESTED(comment.data), 'aa')" - ), - contains( - nestedQuery("comment", matchQuery("comment.data", "aa"), ScoreMode.None) - ) - ); - } - - @Test - public void scoreQuery() { - assertThat( - query( - FROM_ACCOUNTS, - "WHERE SCORE(MATCH_QUERY(firstname, 'Ayers'), 10)" - ), - contains( - constantScoreQuery( - matchQuery("firstname", "Ayers") - ).boost(10) - ) - ); - } - - @Test - public void scoreQueryWithNestedField() { - assertThat( - query( - FROM_NESTED, - "WHERE SCORE(MATCH_QUERY(NESTED(comment.data), 'ab'), 10)" - ), - contains( - constantScoreQuery( - nestedQuery("comment", matchQuery("comment.data", "ab"), ScoreMode.None) - ).boost(10) - ) - ); - } - - @Test - public void wildcardQueryRegularField() { - assertThat( - query( - FROM_ACCOUNTS, - "WHERE WILDCARD_QUERY(city.keyword, 'B*')" - ), - contains( - wildcardQuery("city.keyword", "B*") - ) - ); - } - - @Test - public void wildcardQueryNestedField() { - assertThat( - query( - FROM_NESTED, - "WHERE WILDCARD_QUERY(nested(comment.data), 'a*')" - ), - contains( - nestedQuery("comment", wildcardQuery("comment.data", "a*"), ScoreMode.None) - ) - ); - } - - @Test - public void matchPhraseQueryDefault() { - assertThat( - query( - FROM_PHRASE, - "WHERE MATCH_PHRASE(phrase, 'brown fox')" - ), - contains( - matchPhraseQuery("phrase", "brown fox") - ) - ); - } - - @Test - public void matchPhraseQueryWithSlop() { - assertThat( - query( - FROM_PHRASE, - "WHERE MATCH_PHRASE(phrase, 'brown fox', slop=2)" - ), - contains( - matchPhraseQuery("phrase", "brown fox").slop(2) - ) - ); - } - - @Test - public void multiMatchQuerySingleField() { - assertThat( - query( - FROM_ACCOUNTS, - "WHERE MULTI_MATCH(query='Ayers', fields='firstname')" - ), - contains( - multiMatchQuery("Ayers").field("firstname") - ) - ); - } - - @Test - public void multiMatchQueryWildcardField() { - assertThat( - query( - FROM_ACCOUNTS, - "WHERE MULTI_MATCH(query='Ay', fields='*name', type='phrase_prefix')" - ), - contains( - multiMatchQuery("Ay"). - field("*name"). - type(MultiMatchQueryBuilder.Type.PHRASE_PREFIX) - ) - ); - } - - @Test - public void numberLiteralInSelectField() { - String query = "SELECT 2 AS number FROM bank WHERE age > 20"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "def assign" - ) - ); - } - - @Test - public void ifFunctionWithConditionStatement() { - String query = "SELECT IF(age > 35, 'elastic', 'search') AS Ages FROM accounts"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "boolean cond = doc['age'].value > 35;" - ) - ); - } - - @Test - public void ifFunctionWithEquationConditionStatement() { - String query = "SELECT IF(age = 35, 'elastic', 'search') AS Ages FROM accounts"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "boolean cond = doc['age'].value == 35;" - ) - ); - } - - @Test - public void ifFunctionWithConstantConditionStatement() { - String query = "SELECT IF(1 = 2, 'elastic', 'search') FROM accounts"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "boolean cond = 1 == 2;" - ) - ); - } - - @Test - public void ifNull() { - String query = "SELECT IFNULL(lastname, 'Unknown') FROM accounts"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "doc['lastname'].size()==0" - ) - ); - } - - @Test - public void isNullWithMathExpr() { - String query = "SELECT ISNULL(1+1) FROM accounts"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "catch(ArithmeticException e)" - ) - ); - - } - - @Test(expected = SQLFeatureNotSupportedException.class) - public void emptyQueryShouldThrowSQLFeatureNotSupportedException() - throws SQLFeatureNotSupportedException, SqlParseException, SQLFeatureDisabledException { - OpenSearchActionFactory.create(Mockito.mock(Client.class), ""); - } - - @Test(expected = SQLFeatureNotSupportedException.class) - public void emptyNewLineQueryShouldThrowSQLFeatureNotSupportedException() - throws SQLFeatureNotSupportedException, SqlParseException, SQLFeatureDisabledException { - OpenSearchActionFactory.create(Mockito.mock(Client.class), "\n"); - } - - @Test(expected = SQLFeatureNotSupportedException.class) - public void emptyNewLineQueryShouldThrowSQLFeatureNotSupportedException2() - throws SQLFeatureNotSupportedException, SqlParseException, SQLFeatureDisabledException { - OpenSearchActionFactory.create(Mockito.mock(Client.class), "\r\n"); - } - - @Test(expected = SQLFeatureNotSupportedException.class) - public void queryWithoutSpaceShouldSQLFeatureNotSupportedException() - throws SQLFeatureNotSupportedException, SqlParseException, SQLFeatureDisabledException { - OpenSearchActionFactory.create(Mockito.mock(Client.class), "SELE"); - } - - @Test(expected = SQLFeatureNotSupportedException.class) - public void spacesOnlyQueryShouldThrowSQLFeatureNotSupportedException() - throws SQLFeatureNotSupportedException, SqlParseException, SQLFeatureDisabledException { - OpenSearchActionFactory.create(Mockito.mock(Client.class), " "); - } - - private String query(String from, String... statements) { - return explain(SELECT_ALL + " " + from + " " + String.join(" ", statements)); - } - - private String query(String sql) { - return explain(sql); - } - - private Matcher contains(AbstractQueryBuilder queryBuilder) { - return containsString(Strings.toString(XContentType.JSON, queryBuilder, false, false)); - } + private static final String SELECT_ALL = "SELECT *"; + private static final String FROM_ACCOUNTS = + "FROM " + TestsConstants.TEST_INDEX_ACCOUNT + "/account"; + private static final String FROM_NESTED = + "FROM " + TestsConstants.TEST_INDEX_NESTED_TYPE + "/nestedType"; + private static final String FROM_PHRASE = "FROM " + TestsConstants.TEST_INDEX_PHRASE + "/phrase"; + + @Test + public void query() { + assertThat(query(FROM_ACCOUNTS, "WHERE QUERY('CA')"), contains(queryStringQuery("CA"))); + } + + @Test + public void matchQueryRegularField() { + assertThat( + query(FROM_ACCOUNTS, "WHERE MATCH_QUERY(firstname, 'Ayers')"), + contains(matchQuery("firstname", "Ayers"))); + } + + @Test + public void matchQueryNestedField() { + assertThat( + query(FROM_NESTED, "WHERE MATCH_QUERY(NESTED(comment.data), 'aa')"), + contains(nestedQuery("comment", matchQuery("comment.data", "aa"), ScoreMode.None))); + } + + @Test + public void scoreQuery() { + assertThat( + query(FROM_ACCOUNTS, "WHERE SCORE(MATCH_QUERY(firstname, 'Ayers'), 10)"), + contains(constantScoreQuery(matchQuery("firstname", "Ayers")).boost(10))); + } + + @Test + public void scoreQueryWithNestedField() { + assertThat( + query(FROM_NESTED, "WHERE SCORE(MATCH_QUERY(NESTED(comment.data), 'ab'), 10)"), + contains( + constantScoreQuery( + nestedQuery("comment", matchQuery("comment.data", "ab"), ScoreMode.None)) + .boost(10))); + } + + @Test + public void wildcardQueryRegularField() { + assertThat( + query(FROM_ACCOUNTS, "WHERE WILDCARD_QUERY(city.keyword, 'B*')"), + contains(wildcardQuery("city.keyword", "B*"))); + } + + @Test + public void wildcardQueryNestedField() { + assertThat( + query(FROM_NESTED, "WHERE WILDCARD_QUERY(nested(comment.data), 'a*')"), + contains(nestedQuery("comment", wildcardQuery("comment.data", "a*"), ScoreMode.None))); + } + + @Test + public void matchPhraseQueryDefault() { + assertThat( + query(FROM_PHRASE, "WHERE MATCH_PHRASE(phrase, 'brown fox')"), + contains(matchPhraseQuery("phrase", "brown fox"))); + } + + @Test + public void matchPhraseQueryWithSlop() { + assertThat( + query(FROM_PHRASE, "WHERE MATCH_PHRASE(phrase, 'brown fox', slop=2)"), + contains(matchPhraseQuery("phrase", "brown fox").slop(2))); + } + + @Test + public void multiMatchQuerySingleField() { + assertThat( + query(FROM_ACCOUNTS, "WHERE MULTI_MATCH(query='Ayers', fields='firstname')"), + contains(multiMatchQuery("Ayers").field("firstname"))); + } + + @Test + public void multiMatchQueryWildcardField() { + assertThat( + query(FROM_ACCOUNTS, "WHERE MULTI_MATCH(query='Ay', fields='*name', type='phrase_prefix')"), + contains( + multiMatchQuery("Ay").field("*name").type(MultiMatchQueryBuilder.Type.PHRASE_PREFIX))); + } + + @Test + public void numberLiteralInSelectField() { + String query = "SELECT 2 AS number FROM bank WHERE age > 20"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "def assign")); + } + + @Test + public void ifFunctionWithConditionStatement() { + String query = "SELECT IF(age > 35, 'elastic', 'search') AS Ages FROM accounts"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptField, "boolean cond = doc['age'].value > 35;")); + } + + @Test + public void ifFunctionWithEquationConditionStatement() { + String query = "SELECT IF(age = 35, 'elastic', 'search') AS Ages FROM accounts"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptField, "boolean cond = doc['age'].value == 35;")); + } + + @Test + public void ifFunctionWithConstantConditionStatement() { + String query = "SELECT IF(1 = 2, 'elastic', 'search') FROM accounts"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "boolean cond = 1 == 2;")); + } + + @Test + public void ifNull() { + String query = "SELECT IFNULL(lastname, 'Unknown') FROM accounts"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue(CheckScriptContents.scriptContainsString(scriptField, "doc['lastname'].size()==0")); + } + + @Test + public void isNullWithMathExpr() { + String query = "SELECT ISNULL(1+1) FROM accounts"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString(scriptField, "catch(ArithmeticException e)")); + } + + @Test(expected = SQLFeatureNotSupportedException.class) + public void emptyQueryShouldThrowSQLFeatureNotSupportedException() + throws SQLFeatureNotSupportedException, SqlParseException, SQLFeatureDisabledException { + OpenSearchActionFactory.create(Mockito.mock(Client.class), ""); + } + + @Test(expected = SQLFeatureNotSupportedException.class) + public void emptyNewLineQueryShouldThrowSQLFeatureNotSupportedException() + throws SQLFeatureNotSupportedException, SqlParseException, SQLFeatureDisabledException { + OpenSearchActionFactory.create(Mockito.mock(Client.class), "\n"); + } + + @Test(expected = SQLFeatureNotSupportedException.class) + public void emptyNewLineQueryShouldThrowSQLFeatureNotSupportedException2() + throws SQLFeatureNotSupportedException, SqlParseException, SQLFeatureDisabledException { + OpenSearchActionFactory.create(Mockito.mock(Client.class), "\r\n"); + } + + @Test(expected = SQLFeatureNotSupportedException.class) + public void queryWithoutSpaceShouldSQLFeatureNotSupportedException() + throws SQLFeatureNotSupportedException, SqlParseException, SQLFeatureDisabledException { + OpenSearchActionFactory.create(Mockito.mock(Client.class), "SELE"); + } + + @Test(expected = SQLFeatureNotSupportedException.class) + public void spacesOnlyQueryShouldThrowSQLFeatureNotSupportedException() + throws SQLFeatureNotSupportedException, SqlParseException, SQLFeatureDisabledException { + OpenSearchActionFactory.create(Mockito.mock(Client.class), " "); + } + + private String query(String from, String... statements) { + return explain(SELECT_ALL + " " + from + " " + String.join(" ", statements)); + } + + private String query(String sql) { + return explain(sql); + } + + private Matcher contains(AbstractQueryBuilder queryBuilder) { + return containsString(Strings.toString(XContentType.JSON, queryBuilder, false, false)); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/SqlRequestFactoryTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/SqlRequestFactoryTest.java index f93461724d..63fcd98524 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/SqlRequestFactoryTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/SqlRequestFactoryTest.java @@ -3,12 +3,10 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import static java.util.Collections.emptyList; import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.spy; import org.junit.Assert; import org.junit.Before; @@ -29,168 +27,186 @@ @RunWith(MockitoJUnitRunner.class) public class SqlRequestFactoryTest { - @Mock - private RestRequest restRequest; - - @Mock - private OpenSearchSettings settings; - - @Before - public void setup() { - // Force return empty list to avoid ClusterSettings be invoked which is a final class and hard to mock. - // In this case, default value in Setting will be returned all the time. - doReturn(emptyList()).when(settings).getSettings(); - LocalClusterState.state().setPluginSettings(settings); - } - - @Ignore("RestRequest is a final method, and Mockito 1.x cannot mock it." + - "Ignore this test case till we can upgrade to Mockito 2.x") - @Test - public void testGenerateSqlRequest_fromUrlParams() { - String sql = "select * from table"; - Mockito.when(restRequest.method()).thenReturn(RestRequest.Method.GET); - Mockito.when(restRequest.param("sql")).thenReturn(sql); - - SqlRequest sqlRequest = SqlRequestFactory.getSqlRequest(restRequest); - - Assert.assertFalse(sqlRequest instanceof PreparedStatementRequest); - Assert.assertEquals(sql, sqlRequest.getSql()); - } - - @Test - public void testGenerateSqlRequest_sqlRequestFromPayload() { - String payload = "{ \"query\": \"select * from my_table\" }"; - - Mockito.when(this.restRequest.content()).thenReturn(new BytesArray(payload)); - Mockito.when(this.restRequest.method()).thenReturn(RestRequest.Method.POST); - - SqlRequest sqlRequest = SqlRequestFactory.getSqlRequest(this.restRequest); - Assert.assertFalse(sqlRequest instanceof PreparedStatementRequest); - Assert.assertEquals("select * from my_table", sqlRequest.getSql()); - } - - @Test - public void testGenerateSqlRequest_preparedStatementFromPayload() { - String payload = "{\n" + - " \"query\": \"select * from my_table where int_param = ? and double_param = ? and string_param = ? and date_param = ? and null_param = ?\",\n" + - " \"parameters\": [\n" + - " {\n" + - " \"type\": \"integer\",\n" + - " \"value\": 1\n" + - " },\n" + - " {\n" + - " \"type\": \"double\",\n" + - " \"value\": \"2.0\"\n" + - " },\n" + - " {\n" + - " \"type\": \"string\",\n" + - " \"value\": \"string_value\"\n" + - " },\n" + - " {\n" + - " \"type\": \"date\",\n" + - " \"value\": \"2000-01-01\"\n" + - " },\n" + - " {\n" + - " \"type\": \"null\",\n" + - " \"value\": null\n" + - " }\n" + - " ]\n" + - "}"; - Mockito.when(this.restRequest.content()).thenReturn(new BytesArray(payload)); - Mockito.when(this.restRequest.method()).thenReturn(RestRequest.Method.POST); - - SqlRequest sqlRequest = SqlRequestFactory.getSqlRequest(this.restRequest); - - Assert.assertTrue(sqlRequest instanceof PreparedStatementRequest); - PreparedStatementRequest preparedStatementRequest = (PreparedStatementRequest) sqlRequest; - Assert.assertEquals("select * from my_table where int_param = ? and double_param = ? and string_param = ? and date_param = ? and null_param = ?", preparedStatementRequest.getPreparedStatement()); - Assert.assertEquals("select * from my_table where int_param = 1 and double_param = 2.0 and string_param = 'string_value' and date_param = '2000-01-01' and null_param = null", preparedStatementRequest.getSql()); - Assert.assertEquals(5, preparedStatementRequest.getParameters().size()); - Assert.assertTrue(preparedStatementRequest.getParameters().get(0).getValue() instanceof Long); - Assert.assertTrue(preparedStatementRequest.getParameters().get(1).getValue() instanceof Double); - Assert.assertTrue(preparedStatementRequest.getParameters().get(2) instanceof PreparedStatementRequest.StringParameter); - Assert.assertTrue(preparedStatementRequest.getParameters().get(3) instanceof PreparedStatementRequest.StringParameter); - Assert.assertTrue(preparedStatementRequest.getParameters().get(4) instanceof PreparedStatementRequest.NullParameter); - } - - @Test - public void testGenerateSqlRequest_prearedStatementFromPayload2() { - // type not covered in above test case - String payload = "{\n" + - " \"query\": \"select * from my_table where long_param = ? and float_param = ? and keyword_param = ? and boolean_param = ? and byte_param = ?\",\n" + - " \"parameters\": [\n" + - " {\n" + - " \"type\": \"long\",\n" + - " \"value\": 1\n" + - " },\n" + - " {\n" + - " \"type\": \"float\",\n" + - " \"value\": \"2.0\"\n" + - " },\n" + - " {\n" + - " \"type\": \"keyword\",\n" + - " \"value\": \"string_value\"\n" + - " },\n" + - " {\n" + - " \"type\": \"boolean\",\n" + - " \"value\": true\n" + - " },\n" + - " {\n" + - " \"type\": \"byte\",\n" + - " \"value\": 91\n" + - " }\n" + - " ]\n" + - "}"; - Mockito.when(this.restRequest.content()).thenReturn(new BytesArray(payload)); - Mockito.when(this.restRequest.method()).thenReturn(RestRequest.Method.POST); - SqlRequest sqlRequest = SqlRequestFactory.getSqlRequest(this.restRequest); - - Assert.assertTrue(sqlRequest instanceof PreparedStatementRequest); - PreparedStatementRequest preparedStatementRequest = (PreparedStatementRequest) sqlRequest; - Assert.assertEquals(5, preparedStatementRequest.getParameters().size()); - Assert.assertTrue(preparedStatementRequest.getParameters().get(0).getValue() instanceof Long); - Assert.assertTrue(preparedStatementRequest.getParameters().get(1).getValue() instanceof Double); - Assert.assertTrue(preparedStatementRequest.getParameters().get(2) instanceof PreparedStatementRequest.StringParameter); - System.out.println(preparedStatementRequest.getParameters().get(3)); - Assert.assertTrue(preparedStatementRequest.getParameters().get(3).getValue() instanceof Boolean); - Assert.assertTrue(preparedStatementRequest.getParameters().get(4).getValue() instanceof Long); - - } - - @Test(expected = IllegalArgumentException.class) - public void testGenerateSqlRequest_unsupportedHttpMethod() { - Mockito.when(this.restRequest.method()).thenReturn(RestRequest.Method.PUT); - SqlRequest sqlRequest = SqlRequestFactory.getSqlRequest(this.restRequest); - } - - @Test(expected = IllegalArgumentException.class) - public void testGenerateSqlRequest_invalidJson() { - String payload = "{\n" + - " \"query\": \"select * from my_table where param1 = ?\",\n"; - Mockito.when(this.restRequest.content()).thenReturn(new BytesArray(payload)); - Mockito.when(this.restRequest.method()).thenReturn(RestRequest.Method.POST); - - SqlRequest sqlRequest = SqlRequestFactory.getSqlRequest(this.restRequest); - } - - @Test(expected = IllegalArgumentException.class) - public void testGenerateSqlRequest_unsupportedType() { - String payload = "{\n" + - " \"query\": \"select * from my_table where param1 = ?\",\n" + - " \"parameters\": [\n" + - " {\n" + - " \"type\": \"unsupported_type\",\n" + - " \"value\": 1\n" + - " },\n" + - " {\n" + - " \"type\": \"string\",\n" + - " \"value\": \"string_value\"\n" + - " }\n" + - " ]\n" + - "}"; - Mockito.when(this.restRequest.content()).thenReturn(new BytesArray(payload)); - Mockito.when(this.restRequest.method()).thenReturn(RestRequest.Method.POST); - - SqlRequest sqlRequest = SqlRequestFactory.getSqlRequest(this.restRequest); - } + @Mock private RestRequest restRequest; + + @Mock private OpenSearchSettings settings; + + @Before + public void setup() { + // Force return empty list to avoid ClusterSettings be invoked which is a final class and hard + // to mock. + // In this case, default value in Setting will be returned all the time. + doReturn(emptyList()).when(settings).getSettings(); + LocalClusterState.state().setPluginSettings(settings); + } + + @Ignore( + "RestRequest is a final method, and Mockito 1.x cannot mock it." + + "Ignore this test case till we can upgrade to Mockito 2.x") + @Test + public void testGenerateSqlRequest_fromUrlParams() { + String sql = "select * from table"; + Mockito.when(restRequest.method()).thenReturn(RestRequest.Method.GET); + Mockito.when(restRequest.param("sql")).thenReturn(sql); + + SqlRequest sqlRequest = SqlRequestFactory.getSqlRequest(restRequest); + + Assert.assertFalse(sqlRequest instanceof PreparedStatementRequest); + Assert.assertEquals(sql, sqlRequest.getSql()); + } + + @Test + public void testGenerateSqlRequest_sqlRequestFromPayload() { + String payload = "{ \"query\": \"select * from my_table\" }"; + + Mockito.when(this.restRequest.content()).thenReturn(new BytesArray(payload)); + Mockito.when(this.restRequest.method()).thenReturn(RestRequest.Method.POST); + + SqlRequest sqlRequest = SqlRequestFactory.getSqlRequest(this.restRequest); + Assert.assertFalse(sqlRequest instanceof PreparedStatementRequest); + Assert.assertEquals("select * from my_table", sqlRequest.getSql()); + } + + @Test + public void testGenerateSqlRequest_preparedStatementFromPayload() { + String payload = + "{\n" + + " \"query\": \"select * from my_table where int_param = ? and double_param = ? and" + + " string_param = ? and date_param = ? and null_param = ?\",\n" + + " \"parameters\": [\n" + + " {\n" + + " \"type\": \"integer\",\n" + + " \"value\": 1\n" + + " },\n" + + " {\n" + + " \"type\": \"double\",\n" + + " \"value\": \"2.0\"\n" + + " },\n" + + " {\n" + + " \"type\": \"string\",\n" + + " \"value\": \"string_value\"\n" + + " },\n" + + " {\n" + + " \"type\": \"date\",\n" + + " \"value\": \"2000-01-01\"\n" + + " },\n" + + " {\n" + + " \"type\": \"null\",\n" + + " \"value\": null\n" + + " }\n" + + " ]\n" + + "}"; + Mockito.when(this.restRequest.content()).thenReturn(new BytesArray(payload)); + Mockito.when(this.restRequest.method()).thenReturn(RestRequest.Method.POST); + + SqlRequest sqlRequest = SqlRequestFactory.getSqlRequest(this.restRequest); + + Assert.assertTrue(sqlRequest instanceof PreparedStatementRequest); + PreparedStatementRequest preparedStatementRequest = (PreparedStatementRequest) sqlRequest; + Assert.assertEquals( + "select * from my_table where int_param = ? and double_param = ? and string_param = ? and" + + " date_param = ? and null_param = ?", + preparedStatementRequest.getPreparedStatement()); + Assert.assertEquals( + "select * from my_table where int_param = 1 and double_param = 2.0 and string_param =" + + " 'string_value' and date_param = '2000-01-01' and null_param = null", + preparedStatementRequest.getSql()); + Assert.assertEquals(5, preparedStatementRequest.getParameters().size()); + Assert.assertTrue(preparedStatementRequest.getParameters().get(0).getValue() instanceof Long); + Assert.assertTrue(preparedStatementRequest.getParameters().get(1).getValue() instanceof Double); + Assert.assertTrue( + preparedStatementRequest.getParameters().get(2) + instanceof PreparedStatementRequest.StringParameter); + Assert.assertTrue( + preparedStatementRequest.getParameters().get(3) + instanceof PreparedStatementRequest.StringParameter); + Assert.assertTrue( + preparedStatementRequest.getParameters().get(4) + instanceof PreparedStatementRequest.NullParameter); + } + + @Test + public void testGenerateSqlRequest_prearedStatementFromPayload2() { + // type not covered in above test case + String payload = + "{\n" + + " \"query\": \"select * from my_table where long_param = ? and float_param = ? and" + + " keyword_param = ? and boolean_param = ? and byte_param = ?\",\n" + + " \"parameters\": [\n" + + " {\n" + + " \"type\": \"long\",\n" + + " \"value\": 1\n" + + " },\n" + + " {\n" + + " \"type\": \"float\",\n" + + " \"value\": \"2.0\"\n" + + " },\n" + + " {\n" + + " \"type\": \"keyword\",\n" + + " \"value\": \"string_value\"\n" + + " },\n" + + " {\n" + + " \"type\": \"boolean\",\n" + + " \"value\": true\n" + + " },\n" + + " {\n" + + " \"type\": \"byte\",\n" + + " \"value\": 91\n" + + " }\n" + + " ]\n" + + "}"; + Mockito.when(this.restRequest.content()).thenReturn(new BytesArray(payload)); + Mockito.when(this.restRequest.method()).thenReturn(RestRequest.Method.POST); + SqlRequest sqlRequest = SqlRequestFactory.getSqlRequest(this.restRequest); + + Assert.assertTrue(sqlRequest instanceof PreparedStatementRequest); + PreparedStatementRequest preparedStatementRequest = (PreparedStatementRequest) sqlRequest; + Assert.assertEquals(5, preparedStatementRequest.getParameters().size()); + Assert.assertTrue(preparedStatementRequest.getParameters().get(0).getValue() instanceof Long); + Assert.assertTrue(preparedStatementRequest.getParameters().get(1).getValue() instanceof Double); + Assert.assertTrue( + preparedStatementRequest.getParameters().get(2) + instanceof PreparedStatementRequest.StringParameter); + System.out.println(preparedStatementRequest.getParameters().get(3)); + Assert.assertTrue( + preparedStatementRequest.getParameters().get(3).getValue() instanceof Boolean); + Assert.assertTrue(preparedStatementRequest.getParameters().get(4).getValue() instanceof Long); + } + + @Test(expected = IllegalArgumentException.class) + public void testGenerateSqlRequest_unsupportedHttpMethod() { + Mockito.when(this.restRequest.method()).thenReturn(RestRequest.Method.PUT); + SqlRequest sqlRequest = SqlRequestFactory.getSqlRequest(this.restRequest); + } + + @Test(expected = IllegalArgumentException.class) + public void testGenerateSqlRequest_invalidJson() { + String payload = "{\n" + " \"query\": \"select * from my_table where param1 = ?\",\n"; + Mockito.when(this.restRequest.content()).thenReturn(new BytesArray(payload)); + Mockito.when(this.restRequest.method()).thenReturn(RestRequest.Method.POST); + + SqlRequest sqlRequest = SqlRequestFactory.getSqlRequest(this.restRequest); + } + + @Test(expected = IllegalArgumentException.class) + public void testGenerateSqlRequest_unsupportedType() { + String payload = + "{\n" + + " \"query\": \"select * from my_table where param1 = ?\",\n" + + " \"parameters\": [\n" + + " {\n" + + " \"type\": \"unsupported_type\",\n" + + " \"value\": 1\n" + + " },\n" + + " {\n" + + " \"type\": \"string\",\n" + + " \"value\": \"string_value\"\n" + + " }\n" + + " ]\n" + + "}"; + Mockito.when(this.restRequest.content()).thenReturn(new BytesArray(payload)); + Mockito.when(this.restRequest.method()).thenReturn(RestRequest.Method.POST); + + SqlRequest sqlRequest = SqlRequestFactory.getSqlRequest(this.restRequest); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/SqlRequestParamTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/SqlRequestParamTest.java index 103d43d95c..3c47832761 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/SqlRequestParamTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/SqlRequestParamTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import static org.junit.Assert.assertEquals; @@ -20,52 +19,52 @@ import org.opensearch.sql.legacy.request.SqlRequestParam; public class SqlRequestParamTest { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); + @Rule public ExpectedException exceptionRule = ExpectedException.none(); - @Test - public void shouldReturnTrueIfPrettyParamsIsTrue() { - assertTrue(SqlRequestParam.isPrettyFormat(ImmutableMap.of(QUERY_PARAMS_PRETTY, "true"))); - } + @Test + public void shouldReturnTrueIfPrettyParamsIsTrue() { + assertTrue(SqlRequestParam.isPrettyFormat(ImmutableMap.of(QUERY_PARAMS_PRETTY, "true"))); + } - @Test - public void shouldReturnTrueIfPrettyParamsIsEmpty() { - assertTrue(SqlRequestParam.isPrettyFormat(ImmutableMap.of(QUERY_PARAMS_PRETTY, ""))); - } + @Test + public void shouldReturnTrueIfPrettyParamsIsEmpty() { + assertTrue(SqlRequestParam.isPrettyFormat(ImmutableMap.of(QUERY_PARAMS_PRETTY, ""))); + } - @Test - public void shouldReturnFalseIfNoPrettyParams() { - assertFalse(SqlRequestParam.isPrettyFormat(ImmutableMap.of())); - } + @Test + public void shouldReturnFalseIfNoPrettyParams() { + assertFalse(SqlRequestParam.isPrettyFormat(ImmutableMap.of())); + } - @Test - public void shouldReturnFalseIfPrettyParamsIsUnknownValue() { - assertFalse(SqlRequestParam.isPrettyFormat(ImmutableMap.of(QUERY_PARAMS_PRETTY, "unknown"))); - } + @Test + public void shouldReturnFalseIfPrettyParamsIsUnknownValue() { + assertFalse(SqlRequestParam.isPrettyFormat(ImmutableMap.of(QUERY_PARAMS_PRETTY, "unknown"))); + } - @Test - public void shouldReturnJSONIfFormatParamsIsJSON() { - assertEquals(Format.JSON, SqlRequestParam.getFormat(ImmutableMap.of(QUERY_PARAMS_FORMAT, "json"))); - } + @Test + public void shouldReturnJSONIfFormatParamsIsJSON() { + assertEquals( + Format.JSON, SqlRequestParam.getFormat(ImmutableMap.of(QUERY_PARAMS_FORMAT, "json"))); + } - @Test - public void shouldReturnDefaultFormatIfNoFormatParams() { - assertEquals(Format.JDBC, SqlRequestParam.getFormat(ImmutableMap.of())); - } + @Test + public void shouldReturnDefaultFormatIfNoFormatParams() { + assertEquals(Format.JDBC, SqlRequestParam.getFormat(ImmutableMap.of())); + } - @Test - public void shouldThrowExceptionIfFormatParamsIsEmpty() { - exceptionRule.expect(IllegalArgumentException.class); - exceptionRule.expectMessage("Failed to create executor due to unknown response format: "); + @Test + public void shouldThrowExceptionIfFormatParamsIsEmpty() { + exceptionRule.expect(IllegalArgumentException.class); + exceptionRule.expectMessage("Failed to create executor due to unknown response format: "); - assertEquals(Format.JDBC, SqlRequestParam.getFormat(ImmutableMap.of(QUERY_PARAMS_FORMAT, ""))); - } + assertEquals(Format.JDBC, SqlRequestParam.getFormat(ImmutableMap.of(QUERY_PARAMS_FORMAT, ""))); + } - @Test - public void shouldThrowExceptionIfFormatParamsIsNotSupported() { - exceptionRule.expect(IllegalArgumentException.class); - exceptionRule.expectMessage("Failed to create executor due to unknown response format: xml"); + @Test + public void shouldThrowExceptionIfFormatParamsIsNotSupported() { + exceptionRule.expect(IllegalArgumentException.class); + exceptionRule.expectMessage("Failed to create executor due to unknown response format: xml"); - SqlRequestParam.getFormat(ImmutableMap.of(QUERY_PARAMS_FORMAT, "xml")); - } + SqlRequestParam.getFormat(ImmutableMap.of(QUERY_PARAMS_FORMAT, "xml")); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/StringOperatorsTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/StringOperatorsTest.java index b2d13f3ead..27b8e7f2c6 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/StringOperatorsTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/StringOperatorsTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import static org.junit.Assert.assertTrue; @@ -17,196 +16,154 @@ public class StringOperatorsTest { - private static SqlParser parser; - - @BeforeClass - public static void init() { parser = new SqlParser(); } - - @Test - public void substringTest() { - String query = "SELECT substring(lastname, 2, 1) FROM accounts WHERE substring(lastname, 2, 1) = 'a' " + - "GROUP BY substring(lastname, 2, 1) ORDER BY substring(lastname, 2, 1)"; - - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "doc['lastname'].value.substring(1, end)")); - - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "doc['lastname'].value.substring(1, end)" - ) - ); - } - - @Test - public void substringIndexOutOfBoundTest() { - String query = "SELECT substring('sampleName', 0, 20) FROM accounts"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "def end = (int) Math.min(0 + 20, 'sampleName'.length())" - ) - ); - } - - @Test - public void lengthTest() { - String query = "SELECT length(lastname) FROM accounts WHERE length(lastname) = 5 " + - "GROUP BY length(lastname) ORDER BY length(lastname)"; - - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "doc['lastname'].value.length()" - ) - ); - - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "doc['lastname'].value.length()" - ) - ); - } - - @Test - public void replaceTest() { - String query = "SELECT replace(lastname, 'a', 'A') FROM accounts WHERE replace(lastname, 'a', 'A') = 'aba' " + - "GROUP BY replace(lastname, 'a', 'A') ORDER BY replace(lastname, 'a', 'A')"; - - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "doc['lastname'].value.replace('a','A')" - ) - ); - - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "doc['lastname'].value.replace('a','A')" - ) - ); - } - - @Test - public void locateTest() { - String query = "SELECT locate('a', lastname, 1) FROM accounts WHERE locate('a', lastname, 1) = 4 " + - "GROUP BY locate('a', lastname, 1) ORDER BY locate('a', lastname, 1)"; - - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "doc['lastname'].value.indexOf('a',0)+1" - ) - ); - - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "doc['lastname'].value.indexOf('a',0)+1" - ) - ); - } - - @Test - public void ltrimTest() { - String query = "SELECT ltrim(lastname) FROM accounts WHERE ltrim(lastname) = 'abc' " + - "GROUP BY ltrim(lastname) ORDER BY ltrim(lastname)"; - - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Character.isWhitespace(doc['lastname'].value.charAt(pos))" - ) - ); - - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "Character.isWhitespace(doc['lastname'].value.charAt(pos))" - ) - ); - } - - @Test - public void rtrimTest() { - String query = "SELECT rtrim(lastname) FROM accounts WHERE rtrim(lastname) = 'cba' " + - "GROUP BY rtrim(lastname) ORDER BY rtrim(lastname)"; - - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "Character.isWhitespace(doc['lastname'].value.charAt(pos))" - ) - ); - - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "Character.isWhitespace(doc['lastname'].value.charAt(pos))" - ) - ); - } - - @Test - public void asciiTest() { - String query = "SELECT ascii(lastname) FROM accounts WHERE ascii(lastname) = 108 " + - "GROUP BY ascii(lastname) ORDER BY ascii(lastname)"; - - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "(int) doc['lastname'].value.charAt(0)" - ) - ); - - ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptFilter, - "(int) doc['lastname'].value.charAt(0)" - ) - ); - } - - @Test - public void left() { - String query = "SELECT left(lastname, 1) FROM accounts"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "doc['lastname'].value.substring(0, len)" - ) - ); - } - - @Test - public void right() { - String query = "SELECT right(lastname, 2) FROM accounts"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "doc['lastname'].value.substring(start)" - ) - ); - } + private static SqlParser parser; + + @BeforeClass + public static void init() { + parser = new SqlParser(); + } + + @Test + public void substringTest() { + String query = + "SELECT substring(lastname, 2, 1) FROM accounts WHERE substring(lastname, 2, 1) = 'a' " + + "GROUP BY substring(lastname, 2, 1) ORDER BY substring(lastname, 2, 1)"; + + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptField, "doc['lastname'].value.substring(1, end)")); + + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptFilter, "doc['lastname'].value.substring(1, end)")); + } + + @Test + public void substringIndexOutOfBoundTest() { + String query = "SELECT substring('sampleName', 0, 20) FROM accounts"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptField, "def end = (int) Math.min(0 + 20, 'sampleName'.length())")); + } + + @Test + public void lengthTest() { + String query = + "SELECT length(lastname) FROM accounts WHERE length(lastname) = 5 " + + "GROUP BY length(lastname) ORDER BY length(lastname)"; + + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString(scriptField, "doc['lastname'].value.length()")); + + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue( + CheckScriptContents.scriptContainsString(scriptFilter, "doc['lastname'].value.length()")); + } + + @Test + public void replaceTest() { + String query = + "SELECT replace(lastname, 'a', 'A') FROM accounts WHERE replace(lastname, 'a', 'A') = 'aba'" + + " GROUP BY replace(lastname, 'a', 'A') ORDER BY replace(lastname, 'a', 'A')"; + + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptField, "doc['lastname'].value.replace('a','A')")); + + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptFilter, "doc['lastname'].value.replace('a','A')")); + } + + @Test + public void locateTest() { + String query = + "SELECT locate('a', lastname, 1) FROM accounts WHERE locate('a', lastname, 1) = 4 " + + "GROUP BY locate('a', lastname, 1) ORDER BY locate('a', lastname, 1)"; + + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptField, "doc['lastname'].value.indexOf('a',0)+1")); + + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptFilter, "doc['lastname'].value.indexOf('a',0)+1")); + } + + @Test + public void ltrimTest() { + String query = + "SELECT ltrim(lastname) FROM accounts WHERE ltrim(lastname) = 'abc' " + + "GROUP BY ltrim(lastname) ORDER BY ltrim(lastname)"; + + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptField, "Character.isWhitespace(doc['lastname'].value.charAt(pos))")); + + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptFilter, "Character.isWhitespace(doc['lastname'].value.charAt(pos))")); + } + + @Test + public void rtrimTest() { + String query = + "SELECT rtrim(lastname) FROM accounts WHERE rtrim(lastname) = 'cba' " + + "GROUP BY rtrim(lastname) ORDER BY rtrim(lastname)"; + + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptField, "Character.isWhitespace(doc['lastname'].value.charAt(pos))")); + + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptFilter, "Character.isWhitespace(doc['lastname'].value.charAt(pos))")); + } + + @Test + public void asciiTest() { + String query = + "SELECT ascii(lastname) FROM accounts WHERE ascii(lastname) = 108 " + + "GROUP BY ascii(lastname) ORDER BY ascii(lastname)"; + + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptField, "(int) doc['lastname'].value.charAt(0)")); + + ScriptFilter scriptFilter = CheckScriptContents.getScriptFilterFromQuery(query, parser); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptFilter, "(int) doc['lastname'].value.charAt(0)")); + } + + @Test + public void left() { + String query = "SELECT left(lastname, 1) FROM accounts"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptField, "doc['lastname'].value.substring(0, len)")); + } + + @Test + public void right() { + String query = "SELECT right(lastname, 2) FROM accounts"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + assertTrue( + CheckScriptContents.scriptContainsString( + scriptField, "doc['lastname'].value.substring(start)")); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/WhereWithBoolConditionTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/WhereWithBoolConditionTest.java index e7df57ce31..de6f2c8dda 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/WhereWithBoolConditionTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/WhereWithBoolConditionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest; import static org.hamcrest.MatcherAssert.assertThat; @@ -25,58 +24,60 @@ import org.opensearch.sql.legacy.util.TestsConstants; import org.opensearch.sql.legacy.utils.StringUtils; - public class WhereWithBoolConditionTest { - @Test - public void whereWithBoolCompilationTest() - throws SQLFeatureNotSupportedException, SqlParseException, SQLFeatureDisabledException { - query(StringUtils.format("SELECT * FROM %s WHERE male = false", TestsConstants.TEST_INDEX_BANK)); - } + @Test + public void whereWithBoolCompilationTest() + throws SQLFeatureNotSupportedException, SqlParseException, SQLFeatureDisabledException { + query( + StringUtils.format("SELECT * FROM %s WHERE male = false", TestsConstants.TEST_INDEX_BANK)); + } - @Test - public void selectAllTest() - throws SQLFeatureNotSupportedException, SqlParseException, IOException, - SQLFeatureDisabledException { - String expectedOutput = Files.toString( - new File(getResourcePath() + "src/test/resources/expectedOutput/select_where_true.json"), StandardCharsets.UTF_8) - .replaceAll("\r", ""); + @Test + public void selectAllTest() + throws SQLFeatureNotSupportedException, + SqlParseException, + IOException, + SQLFeatureDisabledException { + String expectedOutput = + Files.toString( + new File( + getResourcePath() + "src/test/resources/expectedOutput/select_where_true.json"), + StandardCharsets.UTF_8) + .replaceAll("\r", ""); - assertThat(removeSpaces( - query( - StringUtils.format( - "SELECT * " + - "FROM %s " + - "WHERE male = true", - TestsConstants.TEST_INDEX_BANK)) - ), - equalTo(removeSpaces(expectedOutput)) - ); - } + assertThat( + removeSpaces( + query( + StringUtils.format( + "SELECT * " + "FROM %s " + "WHERE male = true", + TestsConstants.TEST_INDEX_BANK))), + equalTo(removeSpaces(expectedOutput))); + } - private String query(String query) - throws SQLFeatureNotSupportedException, SqlParseException, SQLFeatureDisabledException { - return explain(query); - } + private String query(String query) + throws SQLFeatureNotSupportedException, SqlParseException, SQLFeatureDisabledException { + return explain(query); + } - private String explain(String sql) - throws SQLFeatureNotSupportedException, SqlParseException, SQLFeatureDisabledException { - Client mockClient = Mockito.mock(Client.class); - CheckScriptContents.stubMockClient(mockClient); - QueryAction queryAction = OpenSearchActionFactory.create(mockClient, sql); - return queryAction.explain().explain(); - } + private String explain(String sql) + throws SQLFeatureNotSupportedException, SqlParseException, SQLFeatureDisabledException { + Client mockClient = Mockito.mock(Client.class); + CheckScriptContents.stubMockClient(mockClient); + QueryAction queryAction = OpenSearchActionFactory.create(mockClient, sql); + return queryAction.explain().explain(); + } - private String removeSpaces(String s) { - return s.replaceAll("\\s+", ""); - } + private String removeSpaces(String s) { + return s.replaceAll("\\s+", ""); + } - private String getResourcePath() { - String projectRoot = System.getProperty("project.root"); - if ( projectRoot!= null && projectRoot.trim().length() > 0) { - return projectRoot.trim() + "/"; - } else { - return ""; - } + private String getResourcePath() { + String projectRoot = System.getProperty("project.root"); + if (projectRoot != null && projectRoot.trim().length() > 0) { + return projectRoot.trim() + "/"; + } else { + return ""; } + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/cursor/DefaultCursorTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/cursor/DefaultCursorTest.java index cfb70dc83c..1b9662035d 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/cursor/DefaultCursorTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/cursor/DefaultCursorTest.java @@ -3,13 +3,12 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.cursor; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.emptyOrNullString; import static org.hamcrest.Matchers.startsWith; import static org.junit.Assert.assertEquals; -import static org.hamcrest.MatcherAssert.assertThat; import java.util.ArrayList; import java.util.Collections; @@ -19,40 +18,39 @@ public class DefaultCursorTest { - @Test - public void checkCursorType() { - DefaultCursor cursor = new DefaultCursor(); - assertEquals(cursor.getType(), CursorType.DEFAULT); - } - - - @Test - public void cursorShouldStartWithCursorTypeID() { - DefaultCursor cursor = new DefaultCursor(); - cursor.setRowsLeft(50); - cursor.setScrollId("dbdskbcdjksbcjkdsbcjk+//"); - cursor.setIndexPattern("myIndex"); - cursor.setFetchSize(500); - cursor.setFieldAliasMap(Collections.emptyMap()); - cursor.setColumns(new ArrayList<>()); - assertThat(cursor.generateCursorId(), startsWith(cursor.getType().getId()+ ":") ); - } - - @Test - public void nullCursorWhenRowLeftIsLessThanEqualZero() { - DefaultCursor cursor = new DefaultCursor(); - assertThat(cursor.generateCursorId(), emptyOrNullString()); - - cursor.setRowsLeft(-10); - assertThat(cursor.generateCursorId(), emptyOrNullString()); - } - - @Test - public void nullCursorWhenScrollIDIsNullOrEmpty() { - DefaultCursor cursor = new DefaultCursor(); - assertThat(cursor.generateCursorId(), emptyOrNullString()); - - cursor.setScrollId(""); - assertThat(cursor.generateCursorId(), emptyOrNullString()); - } + @Test + public void checkCursorType() { + DefaultCursor cursor = new DefaultCursor(); + assertEquals(cursor.getType(), CursorType.DEFAULT); + } + + @Test + public void cursorShouldStartWithCursorTypeID() { + DefaultCursor cursor = new DefaultCursor(); + cursor.setRowsLeft(50); + cursor.setScrollId("dbdskbcdjksbcjkdsbcjk+//"); + cursor.setIndexPattern("myIndex"); + cursor.setFetchSize(500); + cursor.setFieldAliasMap(Collections.emptyMap()); + cursor.setColumns(new ArrayList<>()); + assertThat(cursor.generateCursorId(), startsWith(cursor.getType().getId() + ":")); + } + + @Test + public void nullCursorWhenRowLeftIsLessThanEqualZero() { + DefaultCursor cursor = new DefaultCursor(); + assertThat(cursor.generateCursorId(), emptyOrNullString()); + + cursor.setRowsLeft(-10); + assertThat(cursor.generateCursorId(), emptyOrNullString()); + } + + @Test + public void nullCursorWhenScrollIDIsNullOrEmpty() { + DefaultCursor cursor = new DefaultCursor(); + assertThat(cursor.generateCursorId(), emptyOrNullString()); + + cursor.setScrollId(""); + assertThat(cursor.generateCursorId(), emptyOrNullString()); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/domain/ColumnTypeProviderTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/domain/ColumnTypeProviderTest.java index 205c63ad1d..6599d576b3 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/domain/ColumnTypeProviderTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/domain/ColumnTypeProviderTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.domain; import static org.junit.Assert.assertEquals; @@ -18,28 +17,29 @@ import org.opensearch.sql.legacy.executor.format.Schema; public class ColumnTypeProviderTest { - @Test - public void singleESDataTypeShouldReturnCorrectSchemaType() { - assertEquals(Schema.Type.LONG, new ColumnTypeProvider(OpenSearchDataType.LONG).get(0)); - } - - @Test - public void productTypeShouldReturnCorrectSchemaType() { - ColumnTypeProvider columnTypeProvider = - new ColumnTypeProvider(new Product(ImmutableList.of(OpenSearchDataType.LONG, OpenSearchDataType.SHORT))); - assertEquals(Schema.Type.LONG, columnTypeProvider.get(0)); - assertEquals(Schema.Type.SHORT, columnTypeProvider.get(1)); - } - - @Test - public void unSupportedTypeShouldReturnDefaultSchemaType() { - ColumnTypeProvider columnTypeProvider = new ColumnTypeProvider(SetOperator.UNION); - assertEquals(COLUMN_DEFAULT_TYPE, columnTypeProvider.get(0)); - } - - @Test - public void providerWithoutColumnTypeShouldReturnDefaultSchemaType() { - ColumnTypeProvider columnTypeProvider = new ColumnTypeProvider(); - assertEquals(COLUMN_DEFAULT_TYPE, columnTypeProvider.get(0)); - } + @Test + public void singleESDataTypeShouldReturnCorrectSchemaType() { + assertEquals(Schema.Type.LONG, new ColumnTypeProvider(OpenSearchDataType.LONG).get(0)); + } + + @Test + public void productTypeShouldReturnCorrectSchemaType() { + ColumnTypeProvider columnTypeProvider = + new ColumnTypeProvider( + new Product(ImmutableList.of(OpenSearchDataType.LONG, OpenSearchDataType.SHORT))); + assertEquals(Schema.Type.LONG, columnTypeProvider.get(0)); + assertEquals(Schema.Type.SHORT, columnTypeProvider.get(1)); + } + + @Test + public void unSupportedTypeShouldReturnDefaultSchemaType() { + ColumnTypeProvider columnTypeProvider = new ColumnTypeProvider(SetOperator.UNION); + assertEquals(COLUMN_DEFAULT_TYPE, columnTypeProvider.get(0)); + } + + @Test + public void providerWithoutColumnTypeShouldReturnDefaultSchemaType() { + ColumnTypeProvider columnTypeProvider = new ColumnTypeProvider(); + assertEquals(COLUMN_DEFAULT_TYPE, columnTypeProvider.get(0)); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/DeleteResultSetTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/DeleteResultSetTest.java index 31388e79e3..533c2b2989 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/DeleteResultSetTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/DeleteResultSetTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.executor; import static org.hamcrest.MatcherAssert.assertThat; @@ -23,53 +22,52 @@ import org.opensearch.sql.legacy.executor.format.DeleteResultSet; import org.opensearch.sql.legacy.executor.format.Schema; - public class DeleteResultSetTest { - @Mock - NodeClient client; + @Mock NodeClient client; - @Mock - Delete deleteQuery; - - @Test - public void testDeleteResponseForJdbcFormat() throws IOException { + @Mock Delete deleteQuery; - String jsonDeleteResponse = "{\n" + - " \"took\" : 73,\n" + - " \"timed_out\" : false,\n" + - " \"total\" : 1,\n" + - " \"updated\" : 0,\n" + - " \"created\" : 0,\n" + - " \"deleted\" : 10,\n" + - " \"batches\" : 1,\n" + - " \"version_conflicts\" : 0,\n" + - " \"noops\" : 0,\n" + - " \"retries\" : {\n" + - " \"bulk\" : 0,\n" + - " \"search\" : 0\n" + - " },\n" + - " \"throttled_millis\" : 0,\n" + - " \"requests_per_second\" : -1.0,\n" + - " \"throttled_until_millis\" : 0,\n" + - " \"failures\" : [ ]\n" + - "}\n"; + @Test + public void testDeleteResponseForJdbcFormat() throws IOException { - XContentType xContentType = XContentType.JSON; - XContentParser parser = xContentType.xContent().createParser( - NamedXContentRegistry.EMPTY, - DeprecationHandler.THROW_UNSUPPORTED_OPERATION, - jsonDeleteResponse - ); + String jsonDeleteResponse = + "{\n" + + " \"took\" : 73,\n" + + " \"timed_out\" : false,\n" + + " \"total\" : 1,\n" + + " \"updated\" : 0,\n" + + " \"created\" : 0,\n" + + " \"deleted\" : 10,\n" + + " \"batches\" : 1,\n" + + " \"version_conflicts\" : 0,\n" + + " \"noops\" : 0,\n" + + " \"retries\" : {\n" + + " \"bulk\" : 0,\n" + + " \"search\" : 0\n" + + " },\n" + + " \"throttled_millis\" : 0,\n" + + " \"requests_per_second\" : -1.0,\n" + + " \"throttled_until_millis\" : 0,\n" + + " \"failures\" : [ ]\n" + + "}\n"; - BulkByScrollResponse deleteResponse = BulkByScrollResponse.fromXContent(parser); - DeleteResultSet deleteResultSet = new DeleteResultSet(client, deleteQuery, deleteResponse); - Schema schema = deleteResultSet.getSchema(); - DataRows dataRows = deleteResultSet.getDataRows(); + XContentType xContentType = XContentType.JSON; + XContentParser parser = + xContentType + .xContent() + .createParser( + NamedXContentRegistry.EMPTY, + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + jsonDeleteResponse); - assertThat(schema.getHeaders().size(), equalTo(1)); - assertThat(dataRows.getSize(), equalTo(1L)); - assertThat(dataRows.iterator().next().getData(DeleteResultSet.DELETED), equalTo(10L)); - } + BulkByScrollResponse deleteResponse = BulkByScrollResponse.fromXContent(parser); + DeleteResultSet deleteResultSet = new DeleteResultSet(client, deleteQuery, deleteResponse); + Schema schema = deleteResultSet.getSchema(); + DataRows dataRows = deleteResultSet.getDataRows(); + assertThat(schema.getHeaders().size(), equalTo(1)); + assertThat(dataRows.getSize(), equalTo(1L)); + assertThat(dataRows.iterator().next().getData(DeleteResultSet.DELETED), equalTo(10L)); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/format/BindingTupleResultSetTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/format/BindingTupleResultSetTest.java index d76aa84a5d..fa385fa14b 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/format/BindingTupleResultSetTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/format/BindingTupleResultSetTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.executor.format; import static org.hamcrest.MatcherAssert.assertThat; @@ -27,53 +26,65 @@ public class BindingTupleResultSetTest { - @Test - public void buildDataRowsFromBindingTupleShouldPass() { - assertThat(row( + @Test + public void buildDataRowsFromBindingTupleShouldPass() { + assertThat( + row( Arrays.asList( ColumnNode.builder().name("age").type(Schema.Type.INTEGER).build(), ColumnNode.builder().name("gender").type(Schema.Type.TEXT).build()), - Arrays.asList(BindingTuple.from(ImmutableMap.of("age", 31, "gender", "m")), + Arrays.asList( + BindingTuple.from(ImmutableMap.of("age", 31, "gender", "m")), BindingTuple.from(ImmutableMap.of("age", 31, "gender", "f")), BindingTuple.from(ImmutableMap.of("age", 39, "gender", "m")), BindingTuple.from(ImmutableMap.of("age", 39, "gender", "f")))), - containsInAnyOrder(rowContents(allOf(hasEntry("age", 31), hasEntry("gender", (Object) "m"))), - rowContents(allOf(hasEntry("age", 31), hasEntry("gender", (Object) "f"))), - rowContents(allOf(hasEntry("age", 39), hasEntry("gender", (Object) "m"))), - rowContents(allOf(hasEntry("age", 39), hasEntry("gender", (Object) "f"))))); - } + containsInAnyOrder( + rowContents(allOf(hasEntry("age", 31), hasEntry("gender", (Object) "m"))), + rowContents(allOf(hasEntry("age", 31), hasEntry("gender", (Object) "f"))), + rowContents(allOf(hasEntry("age", 39), hasEntry("gender", (Object) "m"))), + rowContents(allOf(hasEntry("age", 39), hasEntry("gender", (Object) "f"))))); + } - @Test - public void buildDataRowsFromBindingTupleIncludeLongValueShouldPass() { - assertThat(row( + @Test + public void buildDataRowsFromBindingTupleIncludeLongValueShouldPass() { + assertThat( + row( Arrays.asList( ColumnNode.builder().name("longValue").type(Schema.Type.LONG).build(), ColumnNode.builder().name("gender").type(Schema.Type.TEXT).build()), Arrays.asList( BindingTuple.from(ImmutableMap.of("longValue", Long.MAX_VALUE, "gender", "m")), BindingTuple.from(ImmutableMap.of("longValue", Long.MIN_VALUE, "gender", "f")))), - containsInAnyOrder( - rowContents(allOf(hasEntry("longValue", Long.MAX_VALUE), hasEntry("gender", (Object) "m"))), - rowContents(allOf(hasEntry("longValue", Long.MIN_VALUE), hasEntry("gender", (Object) "f"))))); - } + containsInAnyOrder( + rowContents( + allOf(hasEntry("longValue", Long.MAX_VALUE), hasEntry("gender", (Object) "m"))), + rowContents( + allOf(hasEntry("longValue", Long.MIN_VALUE), hasEntry("gender", (Object) "f"))))); + } - @Test - public void buildDataRowsFromBindingTupleIncludeDateShouldPass() { - assertThat(row( + @Test + public void buildDataRowsFromBindingTupleIncludeDateShouldPass() { + assertThat( + row( Arrays.asList( ColumnNode.builder().alias("dateValue").type(Schema.Type.DATE).build(), ColumnNode.builder().alias("gender").type(Schema.Type.TEXT).build()), Arrays.asList( BindingTuple.from(ImmutableMap.of("dateValue", 1529712000000L, "gender", "m")))), - containsInAnyOrder( - rowContents(allOf(hasEntry("dateValue", "2018-06-23 00:00:00.000"), hasEntry("gender", (Object) "m"))))); - } + containsInAnyOrder( + rowContents( + allOf( + hasEntry("dateValue", "2018-06-23 00:00:00.000"), + hasEntry("gender", (Object) "m"))))); + } - private static Matcher rowContents(Matcher> matcher) { - return featureValueOf("DataRows.Row", matcher, DataRows.Row::getContents); - } + private static Matcher rowContents(Matcher> matcher) { + return featureValueOf("DataRows.Row", matcher, DataRows.Row::getContents); + } - private List row(List columnNodes, List bindingTupleList) { - return ImmutableList.copyOf(BindingTupleResultSet.buildDataRows(columnNodes, bindingTupleList).iterator()); - } + private List row( + List columnNodes, List bindingTupleList) { + return ImmutableList.copyOf( + BindingTupleResultSet.buildDataRows(columnNodes, bindingTupleList).iterator()); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/format/CSVResultsExtractorTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/format/CSVResultsExtractorTest.java index b3afff2ce1..be6029f9af 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/format/CSVResultsExtractorTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/format/CSVResultsExtractorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.executor.format; import static org.hamcrest.MatcherAssert.assertThat; @@ -19,21 +18,25 @@ import org.opensearch.sql.legacy.expression.domain.BindingTuple; public class CSVResultsExtractorTest { - private final CSVResultsExtractor csvResultsExtractor = new CSVResultsExtractor(false, false); - - @Test - public void extractResultsFromBindingTupleListShouldPass() throws CsvExtractorException { - CSVResult csvResult = csv(Arrays.asList(BindingTuple.from(ImmutableMap.of("age", 31, "gender", "m")), - BindingTuple.from(ImmutableMap.of("age", 31, "gender", "f")), - BindingTuple.from(ImmutableMap.of("age", 39, "gender", "m")), - BindingTuple.from(ImmutableMap.of("age", 39, "gender", "f"))), - Arrays.asList("age", "gender")); - - assertThat(csvResult.getHeaders(), contains("age", "gender")); - assertThat(csvResult.getLines(), contains("31,m", "31,f", "39,m", "39,f")); - } - - private CSVResult csv(List bindingTupleList, List fieldNames) throws CsvExtractorException { - return csvResultsExtractor.extractResults(bindingTupleList, false, ",", fieldNames); - } + private final CSVResultsExtractor csvResultsExtractor = new CSVResultsExtractor(false, false); + + @Test + public void extractResultsFromBindingTupleListShouldPass() throws CsvExtractorException { + CSVResult csvResult = + csv( + Arrays.asList( + BindingTuple.from(ImmutableMap.of("age", 31, "gender", "m")), + BindingTuple.from(ImmutableMap.of("age", 31, "gender", "f")), + BindingTuple.from(ImmutableMap.of("age", 39, "gender", "m")), + BindingTuple.from(ImmutableMap.of("age", 39, "gender", "f"))), + Arrays.asList("age", "gender")); + + assertThat(csvResult.getHeaders(), contains("age", "gender")); + assertThat(csvResult.getLines(), contains("31,m", "31,f", "39,m", "39,f")); + } + + private CSVResult csv(List bindingTupleList, List fieldNames) + throws CsvExtractorException { + return csvResultsExtractor.extractResults(bindingTupleList, false, ",", fieldNames); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/join/ElasticUtilsTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/join/ElasticUtilsTest.java index 2160affda0..34c9b941d5 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/join/ElasticUtilsTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/executor/join/ElasticUtilsTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.executor.join; import java.io.IOException; @@ -23,30 +22,28 @@ @RunWith(MockitoJUnitRunner.class) public class ElasticUtilsTest { - @Mock - MetaSearchResult metaSearchResult; - - /** - * test handling {@link TotalHits} correctly. - */ - @Test - public void hitsAsStringResult() throws IOException { - final SearchHits searchHits = new SearchHits(new SearchHit[]{}, new TotalHits(1, Relation.EQUAL_TO), 0); - final String result = ElasticUtils.hitsAsStringResult(searchHits, metaSearchResult); - - Assert.assertEquals(1, new JSONObject(result).query("/hits/total/value")); - Assert.assertEquals(Relation.EQUAL_TO.toString(), new JSONObject(result).query("/hits/total/relation")); - } - - /** - * test handling {@link TotalHits} with null value correctly. - */ - @Test - public void test_hitsAsStringResult_withNullTotalHits() throws IOException { - final SearchHits searchHits = new SearchHits(new SearchHit[]{}, null, 0); - final String result = ElasticUtils.hitsAsStringResult(searchHits, metaSearchResult); - - Assert.assertEquals(0, new JSONObject(result).query("/hits/total/value")); - Assert.assertEquals(Relation.EQUAL_TO.toString(), new JSONObject(result).query("/hits/total/relation")); - } + @Mock MetaSearchResult metaSearchResult; + + /** test handling {@link TotalHits} correctly. */ + @Test + public void hitsAsStringResult() throws IOException { + final SearchHits searchHits = + new SearchHits(new SearchHit[] {}, new TotalHits(1, Relation.EQUAL_TO), 0); + final String result = ElasticUtils.hitsAsStringResult(searchHits, metaSearchResult); + + Assert.assertEquals(1, new JSONObject(result).query("/hits/total/value")); + Assert.assertEquals( + Relation.EQUAL_TO.toString(), new JSONObject(result).query("/hits/total/relation")); + } + + /** test handling {@link TotalHits} with null value correctly. */ + @Test + public void test_hitsAsStringResult_withNullTotalHits() throws IOException { + final SearchHits searchHits = new SearchHits(new SearchHit[] {}, null, 0); + final String result = ElasticUtils.hitsAsStringResult(searchHits, metaSearchResult); + + Assert.assertEquals(0, new JSONObject(result).query("/hits/total/value")); + Assert.assertEquals( + Relation.EQUAL_TO.toString(), new JSONObject(result).query("/hits/total/relation")); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/BinaryExpressionTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/BinaryExpressionTest.java index 2f802f4f91..37a0666ad3 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/BinaryExpressionTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/BinaryExpressionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.expression.core; import static org.junit.Assert.assertEquals; @@ -21,69 +20,65 @@ @RunWith(MockitoJUnitRunner.class) public class BinaryExpressionTest extends ExpressionTest { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); - - @Test - public void addIntegerValueShouldPass() { - assertEquals(2, - apply(ScalarOperation.ADD, ref("intValue"), ref("intValue"))); - } - - @Test - public void multipleAddIntegerValueShouldPass() { - assertEquals(3, - apply(ScalarOperation.ADD, ref("intValue"), - of(ScalarOperation.ADD, ref("intValue"), ref("intValue")))); - } - - @Test - public void addDoubleValueShouldPass() { - assertEquals(4d, - apply(ScalarOperation.ADD, ref("doubleValue"), ref("doubleValue"))); - } - - @Test - public void addDoubleAndIntegerShouldPass() { - assertEquals(3d, - apply(ScalarOperation.ADD, ref("doubleValue"), ref("intValue"))); - } - - @Test - public void divideIntegerValueShouldPass() { - assertEquals(0, - apply(ScalarOperation.DIVIDE, ref("intValue"), ref("intValue2"))); - } - - @Test - public void divideIntegerAndDoubleShouldPass() { - assertEquals(0.5d, - apply(ScalarOperation.DIVIDE, ref("intValue"), ref("doubleValue"))); - } - - @Test - public void subtractIntAndDoubleShouldPass() { - assertEquals(-1d, - apply(ScalarOperation.SUBTRACT, ref("intValue"), ref("doubleValue"))); - } - - @Test - public void multiplyIntAndDoubleShouldPass() { - assertEquals(2d, - apply(ScalarOperation.MULTIPLY, ref("intValue"), ref("doubleValue"))); - } - - @Test - public void modulesIntAndDoubleShouldPass() { - assertEquals(1d, - apply(ScalarOperation.MODULES, ref("intValue"), ref("doubleValue"))); - } - - @Test - public void addIntAndStringShouldPass() { - exceptionRule.expect(RuntimeException.class); - exceptionRule.expectMessage("unexpected operation type: ADD(INTEGER_VALUE, STRING_VALUE)"); - - assertEquals(2, apply(ScalarOperation.ADD, literal(integerValue(1)), literal(stringValue("stringValue")))); - } + @Rule public ExpectedException exceptionRule = ExpectedException.none(); + + @Test + public void addIntegerValueShouldPass() { + assertEquals(2, apply(ScalarOperation.ADD, ref("intValue"), ref("intValue"))); + } + + @Test + public void multipleAddIntegerValueShouldPass() { + assertEquals( + 3, + apply( + ScalarOperation.ADD, + ref("intValue"), + of(ScalarOperation.ADD, ref("intValue"), ref("intValue")))); + } + + @Test + public void addDoubleValueShouldPass() { + assertEquals(4d, apply(ScalarOperation.ADD, ref("doubleValue"), ref("doubleValue"))); + } + + @Test + public void addDoubleAndIntegerShouldPass() { + assertEquals(3d, apply(ScalarOperation.ADD, ref("doubleValue"), ref("intValue"))); + } + + @Test + public void divideIntegerValueShouldPass() { + assertEquals(0, apply(ScalarOperation.DIVIDE, ref("intValue"), ref("intValue2"))); + } + + @Test + public void divideIntegerAndDoubleShouldPass() { + assertEquals(0.5d, apply(ScalarOperation.DIVIDE, ref("intValue"), ref("doubleValue"))); + } + + @Test + public void subtractIntAndDoubleShouldPass() { + assertEquals(-1d, apply(ScalarOperation.SUBTRACT, ref("intValue"), ref("doubleValue"))); + } + + @Test + public void multiplyIntAndDoubleShouldPass() { + assertEquals(2d, apply(ScalarOperation.MULTIPLY, ref("intValue"), ref("doubleValue"))); + } + + @Test + public void modulesIntAndDoubleShouldPass() { + assertEquals(1d, apply(ScalarOperation.MODULES, ref("intValue"), ref("doubleValue"))); + } + + @Test + public void addIntAndStringShouldPass() { + exceptionRule.expect(RuntimeException.class); + exceptionRule.expectMessage("unexpected operation type: ADD(INTEGER_VALUE, STRING_VALUE)"); + + assertEquals( + 2, + apply(ScalarOperation.ADD, literal(integerValue(1)), literal(stringValue("stringValue")))); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/CompoundExpressionTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/CompoundExpressionTest.java index 2e75ee0c8b..3315024a13 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/CompoundExpressionTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/CompoundExpressionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.expression.core; import static org.junit.Assert.assertEquals; @@ -16,10 +15,12 @@ public class CompoundExpressionTest extends ExpressionTest { - @Test - public void absAndAddShouldPass() { - assertEquals(2.0d, apply(ScalarOperation.ABS, of(ScalarOperation.ADD, - literal(doubleValue(-1.0d)), - literal(integerValue(-1))))); - } + @Test + public void absAndAddShouldPass() { + assertEquals( + 2.0d, + apply( + ScalarOperation.ABS, + of(ScalarOperation.ADD, literal(doubleValue(-1.0d)), literal(integerValue(-1))))); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/ExpressionTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/ExpressionTest.java index a6b736eca1..08bac51d77 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/ExpressionTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/ExpressionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.expression.core; import static org.opensearch.sql.legacy.expression.model.ExprValueUtils.getNumberValue; @@ -15,35 +14,35 @@ import org.opensearch.sql.legacy.expression.core.operator.ScalarOperation; import org.opensearch.sql.legacy.expression.domain.BindingTuple; - public class ExpressionTest { - protected BindingTuple bindingTuple() { - String json = "{\n" + - " \"intValue\": 1,\n" + - " \"intValue2\": 2,\n" + - " \"doubleValue\": 2.0,\n" + - " \"negDoubleValue\": -2.0,\n" + - " \"stringValue\": \"string\",\n" + - " \"booleanValue\": true,\n" + - " \"tupleValue\": {\n" + - " \"intValue\": 1,\n" + - " \"doubleValue\": 2.0,\n" + - " \"stringValue\": \"string\"\n" + - " },\n" + - " \"collectValue\": [\n" + - " 1,\n" + - " 2,\n" + - " 3\n" + - " ]\n" + - "}"; - return BindingTuple.from(new JSONObject(json)); - } - - protected Expression of(ScalarOperation op, Expression... expressions) { - return ExpressionFactory.of(op, Arrays.asList(expressions)); - } - - protected Number apply(ScalarOperation op, Expression... expressions) { - return getNumberValue(of(op, expressions).valueOf(bindingTuple())); - } + protected BindingTuple bindingTuple() { + String json = + "{\n" + + " \"intValue\": 1,\n" + + " \"intValue2\": 2,\n" + + " \"doubleValue\": 2.0,\n" + + " \"negDoubleValue\": -2.0,\n" + + " \"stringValue\": \"string\",\n" + + " \"booleanValue\": true,\n" + + " \"tupleValue\": {\n" + + " \"intValue\": 1,\n" + + " \"doubleValue\": 2.0,\n" + + " \"stringValue\": \"string\"\n" + + " },\n" + + " \"collectValue\": [\n" + + " 1,\n" + + " 2,\n" + + " 3\n" + + " ]\n" + + "}"; + return BindingTuple.from(new JSONObject(json)); + } + + protected Expression of(ScalarOperation op, Expression... expressions) { + return ExpressionFactory.of(op, Arrays.asList(expressions)); + } + + protected Number apply(ScalarOperation op, Expression... expressions) { + return getNumberValue(of(op, expressions).valueOf(bindingTuple())); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/RefExpressionTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/RefExpressionTest.java index f8607ca889..faefa6d2c1 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/RefExpressionTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/RefExpressionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.expression.core; import static org.hamcrest.MatcherAssert.assertThat; @@ -25,36 +24,40 @@ import org.junit.Test; public class RefExpressionTest extends ExpressionTest { - @Test - public void refIntegerValueShouldPass() { - assertEquals(Integer.valueOf(1), getIntegerValue(ref("intValue").valueOf(bindingTuple()))); - } - - @Test - public void refDoubleValueShouldPass() { - assertEquals(Double.valueOf(2d), getDoubleValue(ref("doubleValue").valueOf(bindingTuple()))); - } - - @Test - public void refStringValueShouldPass() { - assertEquals("string", getStringValue(ref("stringValue").valueOf(bindingTuple()))); - } - - @Test - public void refBooleanValueShouldPass() { - assertEquals(true, getBooleanValue(ref("booleanValue").valueOf(bindingTuple()))); - } - - @Test - public void refTupleValueShouldPass() { - assertThat(getTupleValue(ref("tupleValue").valueOf(bindingTuple())), - allOf(hasEntry("intValue", integerValue(1)), hasEntry("doubleValue", doubleValue(2d)), - hasEntry("stringValue", stringValue("string")))); - } - - @Test - public void refCollectValueShouldPass() { - assertThat(getCollectionValue(ref("collectValue").valueOf(bindingTuple())), - contains(integerValue(1), integerValue(2), integerValue(3))); - } + @Test + public void refIntegerValueShouldPass() { + assertEquals(Integer.valueOf(1), getIntegerValue(ref("intValue").valueOf(bindingTuple()))); + } + + @Test + public void refDoubleValueShouldPass() { + assertEquals(Double.valueOf(2d), getDoubleValue(ref("doubleValue").valueOf(bindingTuple()))); + } + + @Test + public void refStringValueShouldPass() { + assertEquals("string", getStringValue(ref("stringValue").valueOf(bindingTuple()))); + } + + @Test + public void refBooleanValueShouldPass() { + assertEquals(true, getBooleanValue(ref("booleanValue").valueOf(bindingTuple()))); + } + + @Test + public void refTupleValueShouldPass() { + assertThat( + getTupleValue(ref("tupleValue").valueOf(bindingTuple())), + allOf( + hasEntry("intValue", integerValue(1)), + hasEntry("doubleValue", doubleValue(2d)), + hasEntry("stringValue", stringValue("string")))); + } + + @Test + public void refCollectValueShouldPass() { + assertThat( + getCollectionValue(ref("collectValue").valueOf(bindingTuple())), + contains(integerValue(1), integerValue(2), integerValue(3))); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/UnaryExpressionTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/UnaryExpressionTest.java index 04196bab0a..c8582ecb05 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/UnaryExpressionTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/core/UnaryExpressionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.expression.core; import static org.junit.Assert.assertEquals; @@ -21,108 +20,98 @@ @RunWith(MockitoJUnitRunner.class) public class UnaryExpressionTest extends ExpressionTest { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); - - @Test - public void absShouldPass() { - assertEquals(2.0d, apply(ScalarOperation.ABS, literal(doubleValue(-2d)))); - } - - @Test - public void asinShouldPass() { - assertEquals(0.1001674211615598d, apply(ScalarOperation.ASIN, literal(doubleValue(0.1d)))); - } - - @Test - public void atanShouldPass() { - assertEquals(1.1071487177940904d, apply(ScalarOperation.ATAN, literal(doubleValue(2d)))); - } - - @Test - public void tanShouldPass() { - assertEquals(-2.185039863261519, apply(ScalarOperation.TAN, literal(doubleValue(2d)))); - } - - @Test - public void atan2ShouldPass() { - assertEquals(1.1071487177940904d, - apply(ScalarOperation.ATAN2, literal(doubleValue(2d)), literal(doubleValue(1d)))); - } - - @Test - public void cbrtShouldPass() { - assertEquals(1.2599210498948732d, - apply(ScalarOperation.CBRT, literal(doubleValue(2d)))); - } - - @Test - public void ceilShouldPass() { - assertEquals(3.0d, - apply(ScalarOperation.CEIL, literal(doubleValue(2.1d)))); - } - - @Test - public void floorShouldPass() { - assertEquals(2.0d, - apply(ScalarOperation.FLOOR, literal(doubleValue(2.1d)))); - } - - @Test - public void cosShouldPass() { - assertEquals(-0.4161468365471424d, - apply(ScalarOperation.COS, literal(doubleValue(2d)))); - } - - @Test - public void coshShouldPass() { - assertEquals(3.7621956910836314d, - apply(ScalarOperation.COSH, literal(doubleValue(2d)))); - } - - @Test - public void expShouldPass() { - assertEquals(7.38905609893065d, - apply(ScalarOperation.EXP, literal(doubleValue(2d)))); - } - - @Test - public void lnShouldPass() { - assertEquals(0.6931471805599453d, - apply(ScalarOperation.LN, literal(doubleValue(2d)))); - } - - @Test - public void logShouldPass() { - assertEquals(0.6931471805599453d, - apply(ScalarOperation.LOG, literal(doubleValue(2d)))); - } - - @Test - public void log2ShouldPass() { - assertEquals(1.0d, - apply(ScalarOperation.LOG2, literal(doubleValue(2d)))); - } - - @Test - public void log10ShouldPass() { - assertEquals(0.3010299956639812, - apply(ScalarOperation.LOG10, literal(doubleValue(2d)))); - } - - @Test - public void absWithStringShouldThrowException() { - exceptionRule.expect(RuntimeException.class); - exceptionRule.expectMessage("unexpected operation type: ABS(STRING_VALUE)"); - - apply(ScalarOperation.ABS, literal(stringValue("stringValue"))); - } - - @Test - public void atan2WithStringShouldThrowException() { - exceptionRule.expect(RuntimeException.class); - exceptionRule.expectMessage("unexpected operation type: ATAN2(DOUBLE_VALUE,STRING_VALUE)"); - - apply(ScalarOperation.ATAN2, literal(doubleValue(2d)), literal(stringValue("stringValue"))); - } + @Rule public ExpectedException exceptionRule = ExpectedException.none(); + + @Test + public void absShouldPass() { + assertEquals(2.0d, apply(ScalarOperation.ABS, literal(doubleValue(-2d)))); + } + + @Test + public void asinShouldPass() { + assertEquals(0.1001674211615598d, apply(ScalarOperation.ASIN, literal(doubleValue(0.1d)))); + } + + @Test + public void atanShouldPass() { + assertEquals(1.1071487177940904d, apply(ScalarOperation.ATAN, literal(doubleValue(2d)))); + } + + @Test + public void tanShouldPass() { + assertEquals(-2.185039863261519, apply(ScalarOperation.TAN, literal(doubleValue(2d)))); + } + + @Test + public void atan2ShouldPass() { + assertEquals( + 1.1071487177940904d, + apply(ScalarOperation.ATAN2, literal(doubleValue(2d)), literal(doubleValue(1d)))); + } + + @Test + public void cbrtShouldPass() { + assertEquals(1.2599210498948732d, apply(ScalarOperation.CBRT, literal(doubleValue(2d)))); + } + + @Test + public void ceilShouldPass() { + assertEquals(3.0d, apply(ScalarOperation.CEIL, literal(doubleValue(2.1d)))); + } + + @Test + public void floorShouldPass() { + assertEquals(2.0d, apply(ScalarOperation.FLOOR, literal(doubleValue(2.1d)))); + } + + @Test + public void cosShouldPass() { + assertEquals(-0.4161468365471424d, apply(ScalarOperation.COS, literal(doubleValue(2d)))); + } + + @Test + public void coshShouldPass() { + assertEquals(3.7621956910836314d, apply(ScalarOperation.COSH, literal(doubleValue(2d)))); + } + + @Test + public void expShouldPass() { + assertEquals(7.38905609893065d, apply(ScalarOperation.EXP, literal(doubleValue(2d)))); + } + + @Test + public void lnShouldPass() { + assertEquals(0.6931471805599453d, apply(ScalarOperation.LN, literal(doubleValue(2d)))); + } + + @Test + public void logShouldPass() { + assertEquals(0.6931471805599453d, apply(ScalarOperation.LOG, literal(doubleValue(2d)))); + } + + @Test + public void log2ShouldPass() { + assertEquals(1.0d, apply(ScalarOperation.LOG2, literal(doubleValue(2d)))); + } + + @Test + public void log10ShouldPass() { + assertEquals(0.3010299956639812, apply(ScalarOperation.LOG10, literal(doubleValue(2d)))); + } + + @Test + public void absWithStringShouldThrowException() { + exceptionRule.expect(RuntimeException.class); + exceptionRule.expectMessage("unexpected operation type: ABS(STRING_VALUE)"); + + apply(ScalarOperation.ABS, literal(stringValue("stringValue"))); + } + + @Test + public void atan2WithStringShouldThrowException() { + exceptionRule.expect(RuntimeException.class); + exceptionRule.expectMessage("unexpected operation type: ATAN2(DOUBLE_VALUE,STRING_VALUE)"); + + apply(ScalarOperation.ATAN2, literal(doubleValue(2d)), literal(stringValue("stringValue"))); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/model/ExprValueUtilsTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/model/ExprValueUtilsTest.java index 150afcacd3..d84543956d 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/model/ExprValueUtilsTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/expression/model/ExprValueUtilsTest.java @@ -3,11 +3,10 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.expression.model; -import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; import org.junit.Rule; import org.junit.Test; @@ -19,42 +18,41 @@ @RunWith(MockitoJUnitRunner.class) public class ExprValueUtilsTest { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); - - @Test - public void getIntegerValueWithIntegerExprValueShouldPass() { - assertThat(ExprValueUtils.getIntegerValue(ExprValueFactory.integerValue(1)), equalTo(1)); - } - - @Test - public void getDoubleValueWithIntegerExprValueShouldPass() { - assertThat(ExprValueUtils.getDoubleValue(ExprValueFactory.integerValue(1)), equalTo(1d)); - } - - @Test - public void getIntegerWithDoubleExprValueShouldPass() { - assertThat(ExprValueUtils.getIntegerValue(ExprValueFactory.doubleValue(1d)), equalTo(1)); - } - - @Test - public void getLongValueFromLongExprValueShouldPass() { - assertThat(ExprValueUtils.getLongValue(ExprValueFactory.from(1L)), equalTo(1L)); - } - - @Test - public void getIntegerValueFromStringExprValueShouldThrowException() { - exceptionRule.expect(IllegalStateException.class); - exceptionRule.expectMessage("invalid to get NUMBER_VALUE from expr type of STRING_VALUE"); - - ExprValueUtils.getIntegerValue(ExprValueFactory.stringValue("string")); - } - - @Test - public void getStringValueFromIntegerExprValueShouldThrowException() { - exceptionRule.expect(IllegalStateException.class); - exceptionRule.expectMessage("invalid to get STRING_VALUE from expr type of INTEGER_VALUE"); - - ExprValueUtils.getStringValue(ExprValueFactory.integerValue(1)); - } + @Rule public ExpectedException exceptionRule = ExpectedException.none(); + + @Test + public void getIntegerValueWithIntegerExprValueShouldPass() { + assertThat(ExprValueUtils.getIntegerValue(ExprValueFactory.integerValue(1)), equalTo(1)); + } + + @Test + public void getDoubleValueWithIntegerExprValueShouldPass() { + assertThat(ExprValueUtils.getDoubleValue(ExprValueFactory.integerValue(1)), equalTo(1d)); + } + + @Test + public void getIntegerWithDoubleExprValueShouldPass() { + assertThat(ExprValueUtils.getIntegerValue(ExprValueFactory.doubleValue(1d)), equalTo(1)); + } + + @Test + public void getLongValueFromLongExprValueShouldPass() { + assertThat(ExprValueUtils.getLongValue(ExprValueFactory.from(1L)), equalTo(1L)); + } + + @Test + public void getIntegerValueFromStringExprValueShouldThrowException() { + exceptionRule.expect(IllegalStateException.class); + exceptionRule.expectMessage("invalid to get NUMBER_VALUE from expr type of STRING_VALUE"); + + ExprValueUtils.getIntegerValue(ExprValueFactory.stringValue("string")); + } + + @Test + public void getStringValueFromIntegerExprValueShouldThrowException() { + exceptionRule.expect(IllegalStateException.class); + exceptionRule.expectMessage("invalid to get STRING_VALUE from expr type of INTEGER_VALUE"); + + ExprValueUtils.getStringValue(ExprValueFactory.integerValue(1)); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/BasicCounterTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/BasicCounterTest.java index ebe61109a7..34dc170a37 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/BasicCounterTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/BasicCounterTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.metrics; import static org.hamcrest.MatcherAssert.assertThat; @@ -14,22 +13,21 @@ public class BasicCounterTest { - @Test - public void increment() { - BasicCounter counter = new BasicCounter(); - for (int i=0; i<5; ++i) { - counter.increment(); - } - - assertThat(counter.getValue(), equalTo(5L)); + @Test + public void increment() { + BasicCounter counter = new BasicCounter(); + for (int i = 0; i < 5; ++i) { + counter.increment(); } - @Test - public void incrementN() { - BasicCounter counter = new BasicCounter(); - counter.add(5); + assertThat(counter.getValue(), equalTo(5L)); + } - assertThat(counter.getValue(), equalTo(5L)); - } + @Test + public void incrementN() { + BasicCounter counter = new BasicCounter(); + counter.add(5); + assertThat(counter.getValue(), equalTo(5L)); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/GaugeMetricTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/GaugeMetricTest.java index a818a115fd..1ec499ce9b 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/GaugeMetricTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/GaugeMetricTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.metrics; import static org.hamcrest.MatcherAssert.assertThat; @@ -14,19 +13,17 @@ public class GaugeMetricTest { - private static long x = 0; - - @Test - public void getValue() { - GaugeMetric gaugeMetric = new GaugeMetric<>("test", this::getSeq); - - assertThat(gaugeMetric.getValue(), equalTo(1L)); - assertThat(gaugeMetric.getValue(), equalTo(2L)); + private static long x = 0; - } + @Test + public void getValue() { + GaugeMetric gaugeMetric = new GaugeMetric<>("test", this::getSeq); - private long getSeq() { - return ++x; - } + assertThat(gaugeMetric.getValue(), equalTo(1L)); + assertThat(gaugeMetric.getValue(), equalTo(2L)); + } + private long getSeq() { + return ++x; + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/MetricsTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/MetricsTest.java index ff6d8e0c49..885ce6a7cd 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/MetricsTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/MetricsTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.metrics; import static org.hamcrest.MatcherAssert.assertThat; @@ -20,55 +19,53 @@ public class MetricsTest { - @Test - public void registerMetric() { - Metrics.getInstance().clear(); - Metrics.getInstance().registerMetric(new NumericMetric("test", new BasicCounter())); - - assertThat(Metrics.getInstance().getAllMetrics().size(), equalTo(1)); - } - - @Test - public void unRegisterMetric() { - Metrics.getInstance().clear(); - Metrics.getInstance().registerMetric(new NumericMetric("test1", new BasicCounter())); - Metrics.getInstance().registerMetric(new NumericMetric("test2", new BasicCounter())); - assertThat(Metrics.getInstance().getAllMetrics().size(), equalTo(2)); - - Metrics.getInstance().unregisterMetric("test2"); - assertThat(Metrics.getInstance().getAllMetrics().size(), equalTo(1)); - } - - @Test - public void getMetric() { - Metrics.getInstance().clear(); - Metrics.getInstance().registerMetric(new NumericMetric("test1", new BasicCounter())); - Metric metric = Metrics.getInstance().getMetric("test1"); - - assertThat(metric, notNullValue()); - } - - - @Test - public void getAllMetric() { - Metrics.getInstance().clear(); - Metrics.getInstance().registerMetric(new NumericMetric("test1", new BasicCounter())); - Metrics.getInstance().registerMetric(new NumericMetric("test2", new BasicCounter())); - List list = Metrics.getInstance().getAllMetrics(); - - assertThat(list.size(), equalTo(2)); - } - - @Test - public void collectToJSON() { - Metrics.getInstance().clear(); - Metrics.getInstance().registerMetric(new NumericMetric("test1", new BasicCounter())); - Metrics.getInstance().registerMetric(new NumericMetric("test2", new BasicCounter())); - String res = Metrics.getInstance().collectToJSON(); - JSONObject jsonObject = new JSONObject(res); - - assertThat(jsonObject.getLong("test1"), equalTo(0L)); - assertThat(jsonObject.getInt("test2"), equalTo(0)); - } - + @Test + public void registerMetric() { + Metrics.getInstance().clear(); + Metrics.getInstance().registerMetric(new NumericMetric("test", new BasicCounter())); + + assertThat(Metrics.getInstance().getAllMetrics().size(), equalTo(1)); + } + + @Test + public void unRegisterMetric() { + Metrics.getInstance().clear(); + Metrics.getInstance().registerMetric(new NumericMetric("test1", new BasicCounter())); + Metrics.getInstance().registerMetric(new NumericMetric("test2", new BasicCounter())); + assertThat(Metrics.getInstance().getAllMetrics().size(), equalTo(2)); + + Metrics.getInstance().unregisterMetric("test2"); + assertThat(Metrics.getInstance().getAllMetrics().size(), equalTo(1)); + } + + @Test + public void getMetric() { + Metrics.getInstance().clear(); + Metrics.getInstance().registerMetric(new NumericMetric("test1", new BasicCounter())); + Metric metric = Metrics.getInstance().getMetric("test1"); + + assertThat(metric, notNullValue()); + } + + @Test + public void getAllMetric() { + Metrics.getInstance().clear(); + Metrics.getInstance().registerMetric(new NumericMetric("test1", new BasicCounter())); + Metrics.getInstance().registerMetric(new NumericMetric("test2", new BasicCounter())); + List list = Metrics.getInstance().getAllMetrics(); + + assertThat(list.size(), equalTo(2)); + } + + @Test + public void collectToJSON() { + Metrics.getInstance().clear(); + Metrics.getInstance().registerMetric(new NumericMetric("test1", new BasicCounter())); + Metrics.getInstance().registerMetric(new NumericMetric("test2", new BasicCounter())); + String res = Metrics.getInstance().collectToJSON(); + JSONObject jsonObject = new JSONObject(res); + + assertThat(jsonObject.getLong("test1"), equalTo(0L)); + assertThat(jsonObject.getInt("test2"), equalTo(0)); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/NumericMetricTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/NumericMetricTest.java index f2c2c25fab..d76241056f 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/NumericMetricTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/NumericMetricTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.metrics; import static org.hamcrest.MatcherAssert.assertThat; @@ -15,22 +14,21 @@ public class NumericMetricTest { - @Test - public void increment() { - NumericMetric metric = new NumericMetric("test", new BasicCounter()); - for (int i=0; i<5; ++i) { - metric.increment(); - } - - assertThat(metric.getValue(), equalTo(5L)); + @Test + public void increment() { + NumericMetric metric = new NumericMetric("test", new BasicCounter()); + for (int i = 0; i < 5; ++i) { + metric.increment(); } - @Test - public void add() { - NumericMetric metric = new NumericMetric("test", new BasicCounter()); - metric.increment(5); + assertThat(metric.getValue(), equalTo(5L)); + } - assertThat(metric.getValue(), equalTo(5L)); - } + @Test + public void add() { + NumericMetric metric = new NumericMetric("test", new BasicCounter()); + metric.increment(5); + assertThat(metric.getValue(), equalTo(5L)); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/RollingCounterTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/RollingCounterTest.java index a1651aad6b..0ad333a6e2 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/RollingCounterTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/metrics/RollingCounterTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.metrics; import static org.hamcrest.MatcherAssert.assertThat; @@ -20,61 +19,58 @@ @RunWith(MockitoJUnitRunner.class) public class RollingCounterTest { - @Mock - Clock clock; + @Mock Clock clock; - @Test - public void increment() { - RollingCounter counter = new RollingCounter(3, 1, clock); - for (int i=0; i<5; ++i) { - counter.increment(); - } + @Test + public void increment() { + RollingCounter counter = new RollingCounter(3, 1, clock); + for (int i = 0; i < 5; ++i) { + counter.increment(); + } - assertThat(counter.getValue(), equalTo(0L)); + assertThat(counter.getValue(), equalTo(0L)); - when(clock.millis()).thenReturn(1000L); // 1 second passed - assertThat(counter.getValue(), equalTo(5L)); + when(clock.millis()).thenReturn(1000L); // 1 second passed + assertThat(counter.getValue(), equalTo(5L)); - counter.increment(); - counter.increment(); + counter.increment(); + counter.increment(); - when(clock.millis()).thenReturn(2000L); // 1 second passed - assertThat(counter.getValue(), lessThanOrEqualTo(3L)); + when(clock.millis()).thenReturn(2000L); // 1 second passed + assertThat(counter.getValue(), lessThanOrEqualTo(3L)); - when(clock.millis()).thenReturn(3000L); // 1 second passed - assertThat(counter.getValue(), equalTo(0L)); + when(clock.millis()).thenReturn(3000L); // 1 second passed + assertThat(counter.getValue(), equalTo(0L)); + } - } + @Test + public void add() { + RollingCounter counter = new RollingCounter(3, 1, clock); - @Test - public void add() { - RollingCounter counter = new RollingCounter(3, 1, clock); + counter.add(6); + assertThat(counter.getValue(), equalTo(0L)); - counter.add(6); - assertThat(counter.getValue(), equalTo(0L)); + when(clock.millis()).thenReturn(1000L); // 1 second passed + assertThat(counter.getValue(), equalTo(6L)); - when(clock.millis()).thenReturn(1000L); // 1 second passed - assertThat(counter.getValue(), equalTo(6L)); + counter.add(4); + when(clock.millis()).thenReturn(2000L); // 1 second passed + assertThat(counter.getValue(), equalTo(4L)); - counter.add(4); - when(clock.millis()).thenReturn(2000L); // 1 second passed - assertThat(counter.getValue(), equalTo(4L)); + when(clock.millis()).thenReturn(3000L); // 1 second passed + assertThat(counter.getValue(), equalTo(0L)); + } - when(clock.millis()).thenReturn(3000L); // 1 second passed - assertThat(counter.getValue(), equalTo(0L)); - } + @Test + public void trim() { + RollingCounter counter = new RollingCounter(2, 1, clock); - @Test - public void trim() { - RollingCounter counter = new RollingCounter(2, 1, clock); - - for (int i=1; i<6; ++i) { - counter.increment(); - assertThat(counter.size(), equalTo(i)); - when(clock.millis()).thenReturn(i * 1000L); // i seconds passed - } - counter.increment(); - assertThat(counter.size(), lessThanOrEqualTo(3)); + for (int i = 1; i < 6; ++i) { + counter.increment(); + assertThat(counter.size(), equalTo(i)); + when(clock.millis()).thenReturn(i * 1000L); // i seconds passed } - + counter.increment(); + assertThat(counter.size(), lessThanOrEqualTo(3)); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/parser/BucketPathTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/parser/BucketPathTest.java index 067143716d..c26740a04c 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/parser/BucketPathTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/parser/BucketPathTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.parser; import static org.junit.Assert.assertEquals; @@ -16,46 +15,45 @@ public class BucketPathTest { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); + @Rule public ExpectedException exceptionRule = ExpectedException.none(); - private final Path agg1 = Path.getAggPath("projects@NESTED"); - private final Path agg2 = Path.getAggPath("projects@FILTERED"); - private final Path metric = Path.getMetricPath("c"); + private final Path agg1 = Path.getAggPath("projects@NESTED"); + private final Path agg2 = Path.getAggPath("projects@FILTERED"); + private final Path metric = Path.getMetricPath("c"); - @Test - public void bucketPath() { - BucketPath bucketPath = new BucketPath(); - bucketPath.add(metric); - bucketPath.add(agg2); - bucketPath.add(agg1); + @Test + public void bucketPath() { + BucketPath bucketPath = new BucketPath(); + bucketPath.add(metric); + bucketPath.add(agg2); + bucketPath.add(agg1); - assertEquals("projects@NESTED>projects@FILTERED.c", bucketPath.getBucketPath()); - } + assertEquals("projects@NESTED>projects@FILTERED.c", bucketPath.getBucketPath()); + } - @Test - public void bucketPathEmpty() { - BucketPath bucketPath = new BucketPath(); + @Test + public void bucketPathEmpty() { + BucketPath bucketPath = new BucketPath(); - assertEquals("", bucketPath.getBucketPath()); - } + assertEquals("", bucketPath.getBucketPath()); + } - @Test - public void theLastMustBeMetric() { - BucketPath bucketPath = new BucketPath(); + @Test + public void theLastMustBeMetric() { + BucketPath bucketPath = new BucketPath(); - exceptionRule.expect(AssertionError.class); - exceptionRule.expectMessage("The last path in the bucket path must be Metric"); - bucketPath.add(agg1); - } + exceptionRule.expect(AssertionError.class); + exceptionRule.expectMessage("The last path in the bucket path must be Metric"); + bucketPath.add(agg1); + } - @Test - public void allTheOtherMustBeAgg() { - BucketPath bucketPath = new BucketPath(); + @Test + public void allTheOtherMustBeAgg() { + BucketPath bucketPath = new BucketPath(); - exceptionRule.expect(AssertionError.class); - exceptionRule.expectMessage("All the other path in the bucket path must be Agg"); - bucketPath.add(metric); - bucketPath.add(metric); - } + exceptionRule.expect(AssertionError.class); + exceptionRule.expectMessage("All the other path in the bucket path must be Agg"); + bucketPath.add(metric); + bucketPath.add(metric); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/parser/FieldMakerTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/parser/FieldMakerTest.java index 5115757c9c..c33e768f43 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/parser/FieldMakerTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/parser/FieldMakerTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.parser; import static org.junit.Assert.assertEquals; @@ -19,36 +18,40 @@ public class FieldMakerTest { - private static final String ALIAS = "a"; - - private static final String TABLE_ALIAS = "t"; - - private FieldMaker fieldMaker; - - @Before - public void init() { - fieldMaker = new FieldMaker(); - } - - @Test - public void makeFieldAssign() throws SqlParseException { - final SQLIntegerExpr sqlExpr = new SQLIntegerExpr(10); - final MethodField field = (MethodField) fieldMaker.makeField(sqlExpr, ALIAS, TABLE_ALIAS); - - assertEquals("script", field.getName()); - assertEquals(ALIAS, field.getParams().get(0).value); - assertTrue(((String)field.getParams().get(1).value).matches("def assign_[0-9]+ = 10;return assign_[0-9]+;")); - assertEquals(ALIAS, field.getAlias()); - } - - @Test - public void makeFieldAssignDouble() throws SqlParseException { - final SQLNumberExpr sqlExpr = new SQLNumberExpr(10.0); - final MethodField field = (MethodField) fieldMaker.makeField(sqlExpr, ALIAS, TABLE_ALIAS); - - assertEquals("script", field.getName()); - assertEquals(ALIAS, field.getParams().get(0).value); - assertTrue(((String)field.getParams().get(1).value).matches("def assign_[0-9]+ = 10.0;return assign_[0-9]+;")); - assertEquals(ALIAS, field.getAlias()); - } + private static final String ALIAS = "a"; + + private static final String TABLE_ALIAS = "t"; + + private FieldMaker fieldMaker; + + @Before + public void init() { + fieldMaker = new FieldMaker(); + } + + @Test + public void makeFieldAssign() throws SqlParseException { + final SQLIntegerExpr sqlExpr = new SQLIntegerExpr(10); + final MethodField field = (MethodField) fieldMaker.makeField(sqlExpr, ALIAS, TABLE_ALIAS); + + assertEquals("script", field.getName()); + assertEquals(ALIAS, field.getParams().get(0).value); + assertTrue( + ((String) field.getParams().get(1).value) + .matches("def assign_[0-9]+ = 10;return assign_[0-9]+;")); + assertEquals(ALIAS, field.getAlias()); + } + + @Test + public void makeFieldAssignDouble() throws SqlParseException { + final SQLNumberExpr sqlExpr = new SQLNumberExpr(10.0); + final MethodField field = (MethodField) fieldMaker.makeField(sqlExpr, ALIAS, TABLE_ALIAS); + + assertEquals("script", field.getName()); + assertEquals(ALIAS, field.getParams().get(0).value); + assertTrue( + ((String) field.getParams().get(1).value) + .matches("def assign_[0-9]+ = 10.0;return assign_[0-9]+;")); + assertEquals(ALIAS, field.getAlias()); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/parser/SqlParserTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/parser/SqlParserTest.java index 354c6ff8a1..38eefaaec1 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/parser/SqlParserTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/parser/SqlParserTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.parser; import static org.hamcrest.Matchers.equalTo; @@ -56,1366 +55,1460 @@ public class SqlParserTest { - private SqlParser parser; - - @Before - public void init() { - parser = new SqlParser(); - } - - @Rule - public ExpectedException thrown= ExpectedException.none(); - - @Test - public void whereConditionLeftFunctionRightPropertyGreatTest() throws Exception { - - String query = "SELECT " + - " * from " + - TEST_INDEX_ACCOUNT + "/account " + - " where floor(split(address,' ')[0]+0) > b limit 1000 "; - - Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); - Where where = select.getWhere(); - Assert.assertTrue((where.getWheres().size() == 1)); - Assert.assertTrue(((Condition) (where.getWheres().get(0))).getValue() instanceof ScriptFilter); - ScriptFilter scriptFilter = (ScriptFilter) (((Condition) (where.getWheres().get(0))).getValue()); - - Assert.assertTrue(scriptFilter.getScript().contains("doc['address'].value.split(' ')[0]")); - Pattern pattern = Pattern.compile("floor_\\d+ > doc\\['b'].value"); - java.util.regex.Matcher matcher = pattern.matcher(scriptFilter.getScript()); - Assert.assertTrue(matcher.find()); - } - - @Test() - public void failingQueryTest() throws SqlParseException { - thrown.expect(SqlFeatureNotImplementedException.class); - thrown.expectMessage( - "The complex aggregate expressions are not implemented yet: MAX(FlightDelayMin) - MIN(FlightDelayMin)"); - - Select select = - parser.parseSelect((SQLQueryExpr) queryToExpr( - "SELECT DestCountry, dayOfWeek, max(FlightDelayMin) - min(FlightDelayMin)" + - " FROM opensearch_dashboards_sample_data_flights\n" + - " GROUP BY DestCountry, dayOfWeek\n")); - - AggregationQueryAction queryAction = new AggregationQueryAction(mock(Client.class), select); - String elasticDsl = queryAction.explain().explain(); - } - - @Test() - public void failingQueryTest2() throws SqlParseException { - thrown.expect(SqlFeatureNotImplementedException.class); - thrown.expectMessage( - "Function calls of form 'log(MAX(...))' are not implemented yet"); - - Select select = - parser.parseSelect((SQLQueryExpr) queryToExpr( - "SELECT DestCountry, dayOfWeek, log(max(FlightDelayMin))" + - " FROM opensearch_dashboards_sample_data_flights\n" + - " GROUP BY DestCountry, dayOfWeek\n")); - - AggregationQueryAction queryAction = new AggregationQueryAction(mock(Client.class), select); - String elasticDsl = queryAction.explain().explain(); - } - - @Test() - public void failingQueryWithHavingTest() throws SqlParseException { - thrown.expect(SqlFeatureNotImplementedException.class); - thrown.expectMessage( - "The complex aggregate expressions are not implemented yet: MAX(FlightDelayMin) - MIN(FlightDelayMin)"); - - Select select = - parser.parseSelect((SQLQueryExpr) queryToExpr( - "SELECT DestCountry, dayOfWeek, max(FlightDelayMin) - min(FlightDelayMin) " + - " FROM opensearch_dashboards_sample_data_flights\n" + - " GROUP BY DestCountry, dayOfWeek\n" + - " HAVING max(FlightDelayMin) - min(FlightDelayMin)) * count(FlightDelayMin) + 14 > 100")); - - AggregationQueryAction queryAction = new AggregationQueryAction(mock(Client.class), select); - String elasticDsl = queryAction.explain().explain(); - } - - @Test() - @Ignore("Github issues: https://github.com/opendistro-for-elasticsearch/sql/issues/194, " + - "https://github.com/opendistro-for-elasticsearch/sql/issues/234") - public void failingQueryWithHavingTest2() throws SqlParseException { - Select select = - parser.parseSelect((SQLQueryExpr) queryToExpr( - "SELECT DestCountry, dayOfWeek, max(FlightDelayMin) " + - " FROM opensearch_dashboards_sample_data_flights\n" + - " GROUP BY DestCountry, dayOfWeek\n" + - " HAVING max(FlightDelayMin) - min(FlightDelayMin) > 100")); - - AggregationQueryAction queryAction = new AggregationQueryAction(mock(Client.class), select); - - String elasticDsl = queryAction.explain().explain(); - } - - @Test - public void whereConditionLeftFunctionRightFunctionEqualTest() throws Exception { - - String query = "SELECT " + - " * from " + - TEST_INDEX_ACCOUNT + "/account " + - " where floor(split(address,' ')[0]+0) = floor(split(address,' ')[0]+0) limit 1000 "; - - Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); - Where where = select.getWhere(); - Assert.assertTrue((where.getWheres().size() == 1)); - Assert.assertTrue(((Condition) (where.getWheres().get(0))).getValue() instanceof ScriptFilter); - ScriptFilter scriptFilter = (ScriptFilter) (((Condition) (where.getWheres().get(0))).getValue()); - Assert.assertTrue(scriptFilter.getScript().contains("doc['address'].value.split(' ')[0]")); - Pattern pattern = Pattern.compile("floor_\\d+ == floor_\\d+"); - java.util.regex.Matcher matcher = pattern.matcher(scriptFilter.getScript()); - Assert.assertTrue(matcher.find()); - } - - @Test - public void whereConditionVariableRightVariableEqualTest() throws Exception { - - String query = "SELECT " + - " * from " + - TEST_INDEX_ACCOUNT + "/account " + - " where a = b limit 1000 "; - - Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); - Where where = select.getWhere(); - Assert.assertTrue((where.getWheres().size() == 1)); - Assert.assertTrue(((Condition) (where.getWheres().get(0))).getValue() instanceof ScriptFilter); - ScriptFilter scriptFilter = (ScriptFilter) (((Condition) (where.getWheres().get(0))).getValue()); - Assert.assertTrue(scriptFilter.getScript().contains("doc['a'].value == doc['b'].value")); - } - - @Test - public void joinParseCheckSelectedFieldsSplit() throws SqlParseException { - String query = "SELECT a.firstname ,a.lastname , a.gender , d.holdersName ,d.name FROM " + - TestsConstants.TEST_INDEX_ACCOUNT + - "/account a " + - "LEFT JOIN " + - TEST_INDEX_DOG + - "/dog d on d.holdersName = a.firstname " + - " AND d.age < a.age " + - " WHERE a.firstname = 'eliran' AND " + - " (a.age > 10 OR a.balance > 2000)" + - " AND d.age > 1"; - - JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); - - List t1Fields = joinSelect.getFirstTable().getSelectedFields(); - Assert.assertEquals(t1Fields.size(), 3); - Assert.assertTrue(fieldExist(t1Fields, "firstname")); - Assert.assertTrue(fieldExist(t1Fields, "lastname")); - Assert.assertTrue(fieldExist(t1Fields, "gender")); - - List t2Fields = joinSelect.getSecondTable().getSelectedFields(); - Assert.assertEquals(t2Fields.size(), 2); - Assert.assertTrue(fieldExist(t2Fields, "holdersName")); - Assert.assertTrue(fieldExist(t2Fields, "name")); - } - - @Test - public void joinParseCheckConnectedFields() throws SqlParseException { - String query = "SELECT a.firstname ,a.lastname , a.gender , d.holdersName ,d.name FROM " + - TestsConstants.TEST_INDEX_ACCOUNT + - "/account a " + - "LEFT JOIN " + - TEST_INDEX_DOG + - "/dog d on d.holdersName = a.firstname " + - " AND d.age < a.age " + - " WHERE a.firstname = 'eliran' AND " + - " (a.age > 10 OR a.balance > 2000)" + - " AND d.age > 1"; - - JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); - - List t1Fields = joinSelect.getFirstTable().getConnectedFields(); - Assert.assertEquals(t1Fields.size(), 2); - Assert.assertTrue(fieldExist(t1Fields, "firstname")); - Assert.assertTrue(fieldExist(t1Fields, "age")); - - List t2Fields = joinSelect.getSecondTable().getConnectedFields(); - Assert.assertEquals(t2Fields.size(), 2); - Assert.assertTrue(fieldExist(t2Fields, "holdersName")); - Assert.assertTrue(fieldExist(t2Fields, "age")); - } - - private boolean fieldExist(List fields, String fieldName) { - for (Field field : fields) - if (field.getName().equals(fieldName)) return true; - - return false; - } - - - @Test - public void joinParseFromsAreSplitedCorrectly() throws SqlParseException { - String query = "SELECT a.firstname ,a.lastname , a.gender , d.holdersName ,d.name FROM " + - TestsConstants.TEST_INDEX_ACCOUNT + - " a " + - "LEFT JOIN " + - TEST_INDEX_DOG + - " d on d.holdersName = a.firstname" + - " WHERE a.firstname = 'eliran' AND " + - " (a.age > 10 OR a.balance > 2000)" + - " AND d.age > 1"; - - JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); - List t1From = joinSelect.getFirstTable().getFrom(); - - Assert.assertNotNull(t1From); - Assert.assertEquals(1, t1From.size()); - Assert.assertTrue(checkFrom(t1From.get(0), TestsConstants.TEST_INDEX_ACCOUNT, "a")); - - List t2From = joinSelect.getSecondTable().getFrom(); - Assert.assertNotNull(t2From); - Assert.assertEquals(1, t2From.size()); - Assert.assertTrue(checkFrom(t2From.get(0), TEST_INDEX_DOG, "d")); - } - - private boolean checkFrom(From from, String index, String alias) { - return from.getAlias().equals(alias) && from.getIndex().equals(index); - } - - @Test - public void joinParseConditionsTestOneCondition() throws SqlParseException { - String query = "SELECT a.*, a.firstname ,a.lastname , a.gender , d.holdersName ,d.name FROM " + - TestsConstants.TEST_INDEX_ACCOUNT + - "/account a " + - "LEFT JOIN " + - TEST_INDEX_DOG + - "/dog d on d.holdersName = a.firstname" + - " WHERE a.firstname = 'eliran' AND " + - " (a.age > 10 OR a.balance > 2000)" + - " AND d.age > 1"; - - JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); - List conditions = joinSelect.getConnectedConditions(); - Assert.assertNotNull(conditions); - Assert.assertEquals(1, conditions.size()); - Assert.assertTrue("condition not exist: d.holdersName = a.firstname", - conditionExist(conditions, "d.holdersName", "a.firstname", Condition.OPERATOR.EQ)); - } - - @Test - public void joinParseConditionsTestTwoConditions() throws SqlParseException { - String query = "SELECT a.*, a.firstname ,a.lastname , a.gender , d.holdersName ,d.name FROM " + - TestsConstants.TEST_INDEX_ACCOUNT + - "/account a " + - "LEFT JOIN " + - TEST_INDEX_DOG + - "/dog d on d.holdersName = a.firstname " + - " AND d.age < a.age " + - " WHERE a.firstname = 'eliran' AND " + - " (a.age > 10 OR a.balance > 2000)" + - " AND d.age > 1"; - - JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); - List conditions = joinSelect.getConnectedConditions(); - Assert.assertNotNull(conditions); - Assert.assertEquals(2, conditions.size()); - Assert.assertTrue("condition not exist: d.holdersName = a.firstname", - conditionExist(conditions, "d.holdersName", "a.firstname", Condition.OPERATOR.EQ)); - Assert.assertTrue("condition not exist: d.age < a.age", - conditionExist(conditions, "d.age", "a.age", Condition.OPERATOR.LT)); - } - - - @Test - public void joinSplitWhereCorrectly() throws SqlParseException { - String query = "SELECT a.*, a.firstname ,a.lastname , a.gender , d.holdersName ,d.name FROM " + - TestsConstants.TEST_INDEX_ACCOUNT + - "/account a " + - "LEFT JOIN " + - TEST_INDEX_DOG + - "/dog d on d.holdersName = a.firstname" + - " WHERE a.firstname = 'eliran' AND " + - " (a.age > 10 OR a.balance > 2000)" + - " AND d.age > 1"; - - JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); - String s1Where = joinSelect.getFirstTable().getWhere().toString(); - Assert.assertEquals("AND ( AND firstname EQ eliran, AND ( OR age GT 10, OR balance GT 2000 ) ) ", s1Where); - String s2Where = joinSelect.getSecondTable().getWhere().toString(); - Assert.assertEquals("AND age GT 1", s2Where); - } - - @Test - public void joinConditionWithComplexObjectComparisonRightSide() throws SqlParseException { - String query = String.format(Locale.ROOT, "select c.name.firstname,c.parents.father , h.name,h.words " + - "from %s/gotCharacters c " + - "JOIN %s/gotCharacters h " + - "on h.name = c.name.lastname " + - "where c.name.firstname='Daenerys'", TEST_INDEX_GAME_OF_THRONES, TEST_INDEX_GAME_OF_THRONES); - JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); - List conditions = joinSelect.getConnectedConditions(); - Assert.assertNotNull(conditions); - Assert.assertEquals(1, conditions.size()); - Assert.assertTrue("condition not exist: h.name = c.name.lastname", - conditionExist(conditions, "h.name", "c.name.lastname", Condition.OPERATOR.EQ)); - } - - @Test - public void joinConditionWithComplexObjectComparisonLeftSide() throws SqlParseException { - String query = String.format(Locale.ROOT, - "select c.name.firstname,c.parents.father , h.name,h.words from %s/gotCharacters c " + - "JOIN %s/gotCharacters h " + - "on c.name.lastname = h.name " + - "where c.name.firstname='Daenerys'", TEST_INDEX_GAME_OF_THRONES, TEST_INDEX_GAME_OF_THRONES); - JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); - List conditions = joinSelect.getConnectedConditions(); - Assert.assertNotNull(conditions); - Assert.assertEquals(1, conditions.size()); - Assert.assertTrue("condition not exist: c.name.lastname = h.name", - conditionExist(conditions, "c.name.lastname", "h.name", Condition.OPERATOR.EQ)); - } - - - @Test - public void limitHintsOnJoin() throws SqlParseException { - String query = String.format(Locale.ROOT,"select /*! JOIN_TABLES_LIMIT(1000,null) */ " + - "c.name.firstname,c.parents.father , h.name,h.words from %s/gotCharacters c " + - "use KEY (termsFilter) " + - "JOIN %s/gotCharacters h " + - "on c.name.lastname = h.name " + - "where c.name.firstname='Daenerys'", TEST_INDEX_GAME_OF_THRONES, TEST_INDEX_GAME_OF_THRONES); - JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); - List hints = joinSelect.getHints(); - Assert.assertNotNull(hints); - Assert.assertEquals("hints size was not 1", 1, hints.size()); - Hint hint = hints.get(0); - Assert.assertEquals(HintType.JOIN_LIMIT, hint.getType()); - Object[] params = hint.getParams(); - Assert.assertNotNull(params); - Assert.assertEquals("params size was not 2", 2, params.length); - Assert.assertEquals(1000, params[0]); - Assert.assertNull(params[1]); - } - - @Test - public void hashTermsFilterHint() throws SqlParseException { - String query = String.format(Locale.ROOT, "select /*! HASH_WITH_TERMS_FILTER*/ " + - "c.name.firstname,c.parents.father , h.name,h.words from %s/gotCharacters c " + - "use KEY (termsFilter) " + - "JOIN %s/gotCharacters h " + - "on c.name.lastname = h.name " + - "where c.name.firstname='Daenerys'", TEST_INDEX_GAME_OF_THRONES, TEST_INDEX_GAME_OF_THRONES); - JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); - List hints = joinSelect.getHints(); - Assert.assertNotNull(hints); - Assert.assertEquals("hints size was not 1", 1, hints.size()); - Hint hint = hints.get(0); - Assert.assertEquals(HintType.HASH_WITH_TERMS_FILTER, hint.getType()); - } - - @Test - public void multipleHints() throws SqlParseException { - String query = String.format(Locale.ROOT, "select /*! HASH_WITH_TERMS_FILTER*/ " + - "/*! JOIN_TABLES_LIMIT(1000,null) */ " + - " /*! JOIN_TABLES_LIMIT(100,200) */ " + - "c.name.firstname,c.parents.father , h.name,h.words from %s/gotCharacters c " + - "use KEY (termsFilter) " + - "JOIN %s/gotCharacters h " + - "on c.name.lastname = h.name " + - "where c.name.firstname='Daenerys'", TEST_INDEX_GAME_OF_THRONES, TEST_INDEX_GAME_OF_THRONES); - - JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); - List hints = joinSelect.getHints(); - - Assert.assertNotNull(hints); - Assert.assertEquals("hints size was not 3", 3, hints.size()); - Hint firstHint = hints.get(0); - Assert.assertEquals(HintType.HASH_WITH_TERMS_FILTER, firstHint.getType()); - Hint secondHint = hints.get(1); - Assert.assertEquals(HintType.JOIN_LIMIT, secondHint.getType()); - Assert.assertEquals(1000, secondHint.getParams()[0]); - Assert.assertNull(secondHint.getParams()[1]); - Hint thirdHint = hints.get(2); - Assert.assertEquals(100, thirdHint.getParams()[0]); - Assert.assertEquals(200, thirdHint.getParams()[1]); - Assert.assertEquals(HintType.JOIN_LIMIT, thirdHint.getType()); - } - - @Test - public void searchWithOdbcTimeFormatParse() throws SqlParseException { - String query = String.format(Locale.ROOT, "SELECT insert_time FROM %s/odbc " + - "WHERE insert_time < {ts '2015-03-15 00:00:00.000'}", TEST_INDEX_ODBC); - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - LinkedList wheres = select.getWhere().getWheres(); - Assert.assertEquals(1, wheres.size()); - Condition condition = (Condition) wheres.get(0); - Assert.assertEquals("{ts '2015-03-15 00:00:00.000'}", condition.getValue().toString()); - - } - - @Test - public void indexWithSpacesWithinBrackets() throws SqlParseException { - String query = "SELECT insert_time FROM [Test Index] WHERE age > 3"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List fromList = select.getFrom(); - Assert.assertEquals(1, fromList.size()); - From from = fromList.get(0); - Assert.assertEquals("Test Index", from.getIndex()); - } - - @Test - public void indexWithSpacesWithTypeWithinBrackets() throws SqlParseException { - String query = "SELECT insert_time FROM [Test Index] WHERE age > 3"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List fromList = select.getFrom(); - Assert.assertEquals(1, fromList.size()); - From from = fromList.get(0); - Assert.assertEquals("Test Index", from.getIndex()); - } - - - @Test - public void fieldWithSpacesWithinBrackets() throws SqlParseException { - String query = "SELECT insert_time FROM name/type1 WHERE [first name] = 'Name'"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List where = select.getWhere().getWheres(); - Assert.assertEquals(1, where.size()); - Condition condition = (Condition) where.get(0); - Assert.assertEquals("first name", condition.getName()); - Assert.assertEquals("Name", condition.getValue()); - } - - @Test - public void twoIndices() throws SqlParseException { - String query = "SELECT insert_time FROM index1, index2 WHERE age > 3"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List fromList = select.getFrom(); - Assert.assertEquals(2, fromList.size()); - From from1 = fromList.get(0); - From from2 = fromList.get(1); - boolean preservedOrder = from1.getIndex().equals("index1") - && from2.getIndex().equals("index2"); - boolean notPreservedOrder = from1.getIndex().equals("index2") - && from2.getIndex().equals("index1"); - Assert.assertTrue(preservedOrder || notPreservedOrder); - } - - @Test - public void fieldWithATcharAtWhere() throws SqlParseException { - String query = "SELECT * FROM index/type where @field = 6 "; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - LinkedList wheres = select.getWhere().getWheres(); - Assert.assertEquals(1, wheres.size()); - Condition condition = (Condition) wheres.get(0); - Assert.assertEquals("@field", condition.getName()); - } - - @Test - public void fieldWithATcharAtSelect() throws SqlParseException { - String query = "SELECT @field FROM index/type where field2 = 6 "; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List fields = select.getFields(); - Assert.assertEquals(1, fields.size()); - Field field = fields.get(0); - Assert.assertEquals(field.getName(), "@field"); - } - - @Test - public void fieldWithATcharAtSelectOnAgg() throws SqlParseException { - String query = "SELECT max(@field) FROM index/type where field2 = 6 "; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List fields = select.getFields(); - Assert.assertEquals(1, fields.size()); - Field field = fields.get(0); - Assert.assertEquals("MAX(@field)", field.toString()); - } - - @Test - public void fieldWithColonCharAtSelect() throws SqlParseException { - String query = "SELECT a:b FROM index/type where field2 = 6 "; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List fields = select.getFields(); - Assert.assertEquals(1, fields.size()); - Field field = fields.get(0); - Assert.assertEquals(field.getName(), "a:b"); - } - - @Test - public void fieldWithColonCharAtWhere() throws SqlParseException { - String query = "SELECT * FROM index/type where a:b = 6 "; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - LinkedList wheres = select.getWhere().getWheres(); - Assert.assertEquals(1, wheres.size()); - Condition condition = (Condition) wheres.get(0); - Assert.assertEquals("a:b", condition.getName()); - } - - @Test - public void fieldIsNull() throws SqlParseException { - String query = "SELECT * FROM index/type where a IS NOT NULL"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - LinkedList wheres = select.getWhere().getWheres(); - Assert.assertEquals(1, wheres.size()); - Condition condition = (Condition) wheres.get(0); - Assert.assertEquals("a", condition.getName()); - Assert.assertNull(condition.getValue()); - } - - @Test - public void innerQueryTest() throws SqlParseException { - String query = String.format(Locale.ROOT, "select * from %s/dog where holdersName " + - "IN (select firstname from %s/account where firstname = 'eliran')", - TEST_INDEX_DOG, TestsConstants.TEST_INDEX_ACCOUNT); - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - Assert.assertTrue(select.containsSubQueries()); - Assert.assertEquals(1, select.getSubQueries().size()); - } - - @Test - public void inTermsSubQueryTest() throws SqlParseException { - String query = String.format(Locale.ROOT, "select * from %s/dog where " + - "holdersName = IN_TERMS (select firstname from %s/account where firstname = 'eliran')", - TEST_INDEX_DOG, TestsConstants.TEST_INDEX_ACCOUNT); - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - Assert.assertTrue(select.containsSubQueries()); - Assert.assertEquals(1, select.getSubQueries().size()); - } - - - @Test - public void innerQueryTestTwoQueries() throws SqlParseException { - String query = String.format(Locale.ROOT, "select * from %s/dog where holdersName IN " + - "(select firstname from %s/account where firstname = 'eliran') and " + - "age IN (select name.ofHisName from %s/gotCharacters) ", - TEST_INDEX_DOG, TestsConstants.TEST_INDEX_ACCOUNT, TEST_INDEX_GAME_OF_THRONES); - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - Assert.assertTrue(select.containsSubQueries()); - Assert.assertEquals(2, select.getSubQueries().size()); - } - - @Test - public void indexWithDotsAndHyphen() throws SqlParseException { - String query = "select * from data-2015.08.22"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - Assert.assertEquals(1, select.getFrom().size()); - Assert.assertEquals("data-2015.08.22", select.getFrom().get(0).getIndex()); - } - - @Test - public void indexNameWithDotAtTheStart() throws SqlParseException { - String query = "SELECT * FROM .opensearch_dashboards"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - Assert.assertEquals(".opensearch_dashboards", select.getFrom().get(0).getIndex()); - } - - @Test - public void indexWithSemiColons() throws SqlParseException { - String query = "select * from some;index"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - Assert.assertEquals(1, select.getFrom().size()); - Assert.assertEquals("some;index", select.getFrom().get(0).getIndex()); - } - - @Test - public void scriptFiledPlusLiteralTest() throws SqlParseException { - String query = "SELECT field1 + 3 FROM index/type"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List fields = select.getFields(); - Assert.assertEquals(1, fields.size()); - Field field = fields.get(0); - Assert.assertTrue(field instanceof MethodField); - MethodField scriptMethod = (MethodField) field; - Assert.assertEquals("script", scriptMethod.getName().toLowerCase()); - Assert.assertEquals(2, scriptMethod.getParams().size()); - Assert.assertTrue(scriptMethod.getParams().get(1).toString().contains("doc['field1'].value + 3")); - } - - @Test - public void scriptFieldPlusFieldTest() throws SqlParseException { - String query = "SELECT field1 + field2 FROM index/type"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List fields = select.getFields(); - Assert.assertEquals(1, fields.size()); - Field field = fields.get(0); - Assert.assertTrue(field instanceof MethodField); - MethodField scriptMethod = (MethodField) field; - Assert.assertEquals("script", scriptMethod.getName().toLowerCase()); - Assert.assertEquals(2, scriptMethod.getParams().size()); - Assert.assertTrue(scriptMethod.getParams().get(1).toString() - .contains("doc['field1'].value + doc['field2'].value")); - } - - - @Test - public void scriptLiteralPlusLiteralTest() throws SqlParseException { - String query = "SELECT 1 + 2 FROM index/type"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List fields = select.getFields(); - Assert.assertEquals(1, fields.size()); - Field field = fields.get(0); - Assert.assertTrue(field instanceof MethodField); - MethodField scriptMethod = (MethodField) field; - Assert.assertEquals("script", scriptMethod.getName().toLowerCase()); - Assert.assertEquals(2, scriptMethod.getParams().size()); - Assert.assertTrue(scriptMethod.getParams().get(1).toString().contains("1 + 2")); - } - - - @Test - public void explicitScriptOnAggregation() throws SqlParseException { - String query = "SELECT avg( script('add','doc[\\'field1\\'].value + doc[\\'field2\\'].value') )" + - " FROM index/type"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List fields = select.getFields(); - Assert.assertEquals(1, fields.size()); - Field field = fields.get(0); - Assert.assertTrue(field instanceof MethodField); - MethodField avgMethodField = (MethodField) field; - Assert.assertEquals("avg", avgMethodField.getName().toLowerCase()); - Assert.assertEquals(1, avgMethodField.getParams().size()); - MethodField scriptMethod = (MethodField) avgMethodField.getParams().get(0).value; - Assert.assertEquals("script", scriptMethod.getName().toLowerCase()); - Assert.assertEquals(2, scriptMethod.getParams().size()); - Assert.assertEquals("doc['field1'].value + doc['field2'].value", - scriptMethod.getParams().get(1).toString()); - } - - @Test - public void implicitScriptOnAggregation() throws SqlParseException { - String query = "SELECT avg(field(field1) + field(field2)) FROM index/type"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List fields = select.getFields(); - Assert.assertEquals(1, fields.size()); - Field field = fields.get(0); - Assert.assertTrue(field instanceof MethodField); - MethodField avgMethodField = (MethodField) field; - Assert.assertEquals("avg", avgMethodField.getName().toLowerCase()); - Assert.assertEquals(1, avgMethodField.getParams().size()); - Assert.assertTrue(avgMethodField.getParams().get(0).value.toString().contains("doc['field1'].value")); - Assert.assertTrue(avgMethodField.getParams().get(0).value.toString().contains("doc['field2'].value")); - - } - - @Test - public void nestedFieldOnWhereNoPathSimpleField() throws SqlParseException { - String query = "select * from myIndex where nested(message.name) = 'hey'"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - Where where = select.getWhere().getWheres().get(0); - Assert.assertTrue("where should be condition", where instanceof Condition); - Condition condition = (Condition) where; - Assert.assertTrue("condition should be nested", condition.isNested()); - Assert.assertEquals("message", condition.getNestedPath()); - Assert.assertEquals("message.name", condition.getName()); - } - - - @Test - public void nestedFieldOnWhereNoPathComplexField() throws SqlParseException { - String query = "select * from myIndex where nested(message.moreNested.name) = 'hey'"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - Where where = select.getWhere().getWheres().get(0); - Assert.assertTrue("where should be condition", where instanceof Condition); - Condition condition = (Condition) where; - Assert.assertTrue("condition should be nested", condition.isNested()); - Assert.assertEquals("message.moreNested", condition.getNestedPath()); - Assert.assertEquals("message.moreNested.name", condition.getName()); - } - - - @Test - public void aggFieldWithAliasTableAliasShouldBeRemoved() throws SqlParseException { - String query = "select count(t.*) as counts,sum(t.size) from xxx/locs as t group by t.kk"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List fields = select.getFields(); - Assert.assertThat(fields.size(), equalTo(2)); - Assert.assertEquals("COUNT(*)", fields.get(0).toString()); - Assert.assertEquals("SUM(size)", fields.get(1).toString()); - List> groups = select.getGroupBys(); - Assert.assertThat(groups.size(), equalTo(1)); - Assert.assertThat(groups.get(0).size(), equalTo(1)); - Assert.assertEquals("kk", groups.get(0).get(0).getName()); - } - - @Test - public void nestedFieldOnWhereGivenPath() throws SqlParseException { - String query = "select * from myIndex where nested(message.name,message) = 'hey'"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - Where where = select.getWhere().getWheres().get(0); - Assert.assertTrue("where should be condition", where instanceof Condition); - Condition condition = (Condition) where; - Assert.assertTrue("condition should be nested", condition.isNested()); - Assert.assertEquals("message", condition.getNestedPath()); - Assert.assertEquals("message.name", condition.getName()); - } - - @Test - public void nestedFieldOnGroupByNoPath() throws SqlParseException { - String query = "select * from myIndex group by nested(message.name)"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - Field field = select.getGroupBys().get(0).get(0); - Assert.assertTrue("condition should be nested", field.isNested()); - Assert.assertEquals("message", field.getNestedPath()); - Assert.assertEquals("message.name", field.getName()); - } - - @Test - public void nestedFieldOnGroupByWithPath() throws SqlParseException { - String query = "select * from myIndex group by nested(message.name,message)"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - Field field = select.getGroupBys().get(0).get(0); - Assert.assertTrue("condition should be nested", field.isNested()); - Assert.assertEquals("message", field.getNestedPath()); - Assert.assertEquals("message.name", field.getName()); - } - - @Test - public void filterAggTestNoAlias() throws SqlParseException { - String query = "select * from myIndex group by a , filter( a > 3 AND b='3' )"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List> groupBys = select.getGroupBys(); - Assert.assertEquals(1, groupBys.size()); - Field aAgg = groupBys.get(0).get(0); - Assert.assertEquals("a", aAgg.getName()); - Field field = groupBys.get(0).get(1); - Assert.assertTrue("filter field should be method field", field instanceof MethodField); - MethodField filterAgg = (MethodField) field; - Assert.assertEquals("filter", filterAgg.getName()); - Map params = filterAgg.getParamsAsMap(); - Assert.assertEquals(2, params.size()); - Object alias = params.get("alias"); - Assert.assertEquals("filter(a > 3 AND b = '3')@FILTER", alias); - - Assert.assertTrue(params.get("where") instanceof Where); - Where where = (Where) params.get("where"); - Assert.assertEquals(2, where.getWheres().size()); - } - - @Test - public void filterAggTestWithAlias() throws SqlParseException { - String query = "select * from myIndex group by a , filter(myFilter, a > 3 AND b='3' )"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List> groupBys = select.getGroupBys(); - Assert.assertEquals(1, groupBys.size()); - Field aAgg = groupBys.get(0).get(0); - Assert.assertEquals("a", aAgg.getName()); - Field field = groupBys.get(0).get(1); - Assert.assertTrue("filter field should be method field", field instanceof MethodField); - MethodField filterAgg = (MethodField) field; - Assert.assertEquals("filter", filterAgg.getName()); - Map params = filterAgg.getParamsAsMap(); - Assert.assertEquals(2, params.size()); - Object alias = params.get("alias"); - Assert.assertEquals("myFilter@FILTER", alias); - - Assert.assertTrue(params.get("where") instanceof Where); - Where where = (Where) params.get("where"); - Assert.assertEquals(2, where.getWheres().size()); - } - - - @Test - public void filterAggTestWithAliasAsString() throws SqlParseException { - String query = "select * from myIndex group by a , filter('my filter', a > 3 AND b='3' )"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List> groupBys = select.getGroupBys(); - Assert.assertEquals(1, groupBys.size()); - Field aAgg = groupBys.get(0).get(0); - Assert.assertEquals("a", aAgg.getName()); - Field field = groupBys.get(0).get(1); - Assert.assertTrue("filter field should be method field", field instanceof MethodField); - MethodField filterAgg = (MethodField) field; - Assert.assertEquals("filter", filterAgg.getName()); - Map params = filterAgg.getParamsAsMap(); - Assert.assertEquals(2, params.size()); - Object alias = params.get("alias"); - Assert.assertEquals("my filter@FILTER", alias); - - Assert.assertTrue(params.get("where") instanceof Where); - Where where = (Where) params.get("where"); - Assert.assertEquals(2, where.getWheres().size()); - } - - @Test - public void doubleOrderByTest() throws SqlParseException { - String query = "select * from indexName order by a asc, b desc"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List orderBys = select.getOrderBys(); - Assert.assertEquals(2, orderBys.size()); - Assert.assertEquals("a", orderBys.get(0).getName()); - Assert.assertEquals("ASC", orderBys.get(0).getType()); - - Assert.assertEquals("b", orderBys.get(1).getName()); - Assert.assertEquals("DESC", orderBys.get(1).getType()); - } - - @Test - public void parseJoinWithOneTableOrderByAttachToCorrectTable() throws SqlParseException { - String query = String.format(Locale.ROOT, "select c.name.firstname , d.words from %s/gotCharacters c " + - "JOIN %s/gotCharacters d on d.name = c.house " + - "order by c.name.firstname" - , TEST_INDEX_GAME_OF_THRONES, TEST_INDEX_GAME_OF_THRONES); - - JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); - Assert.assertTrue("first table should be ordered", joinSelect.getFirstTable().isOrderdSelect()); - Assert.assertFalse("second table should not be ordered", joinSelect.getSecondTable().isOrderdSelect()); - - } - - @Test - public void parseJoinWithOneTableOrderByRemoveAlias() throws SqlParseException { - String query = String.format(Locale.ROOT, "select c.name.firstname , d.words from %s/gotCharacters c " + - "JOIN %s/gotCharacters d on d.name = c.house " + - "order by c.name.firstname" - , TEST_INDEX_GAME_OF_THRONES, TEST_INDEX_GAME_OF_THRONES); - - JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); - List orderBys = joinSelect.getFirstTable().getOrderBys(); - Assert.assertEquals(1, orderBys.size()); - Order order = orderBys.get(0); - Assert.assertEquals("name.firstname", order.getName()); - - } - - @Test - public void termsWithStringTest() throws SqlParseException { - String query = "select * from x where y = IN_TERMS('a','b')"; - Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); - Condition condition = (Condition) select.getWhere().getWheres().get(0); - Object[] values = (Object[]) condition.getValue(); - Assert.assertEquals("a", values[0]); - Assert.assertEquals("b", values[1]); - } - - @Test - public void termWithStringTest() throws SqlParseException { - String query = "select * from x where y = TERM('a')"; - Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); - Condition condition = (Condition) select.getWhere().getWheres().get(0); - Object[] values = (Object[]) condition.getValue(); - Assert.assertEquals("a", values[0]); - } - - @Test - public void complexNestedTest() throws SqlParseException { - String query = "select * from x where nested('y',y.b = 'a' and y.c = 'd') "; - Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); - Condition condition = (Condition) select.getWhere().getWheres().get(0); - Assert.assertEquals(Condition.OPERATOR.NESTED_COMPLEX, condition.getOPERATOR()); - Assert.assertEquals("y", condition.getName()); - Assert.assertTrue(condition.getValue() instanceof Where); - Where where = (Where) condition.getValue(); - Assert.assertEquals(2, where.getWheres().size()); - } - - @Test - public void scriptOnFilterNoParams() throws SqlParseException { - String query = "select * from x where script('doc[\\'field\\'].date.hourOfDay == 3') "; - Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); - Condition condition = (Condition) select.getWhere().getWheres().get(0); - Assert.assertEquals(Condition.OPERATOR.SCRIPT, condition.getOPERATOR()); - Assert.assertNull(condition.getName()); - Assert.assertTrue(condition.getValue() instanceof ScriptFilter); - ScriptFilter scriptFilter = (ScriptFilter) condition.getValue(); - Assert.assertEquals("doc['field'].date.hourOfDay == 3", scriptFilter.getScript()); - Assert.assertFalse(scriptFilter.containsParameters()); - } - - @Test - public void scriptOnFilterWithParams() throws SqlParseException { - String query = "select * from x where script('doc[\\'field\\'].date.hourOfDay == x','x'=3) "; - Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); - Condition condition = (Condition) select.getWhere().getWheres().get(0); - Assert.assertEquals(Condition.OPERATOR.SCRIPT, condition.getOPERATOR()); - Assert.assertNull(condition.getName()); - Assert.assertTrue(condition.getValue() instanceof ScriptFilter); - ScriptFilter scriptFilter = (ScriptFilter) condition.getValue(); - Assert.assertEquals("doc['field'].date.hourOfDay == x", scriptFilter.getScript()); - Assert.assertTrue(scriptFilter.containsParameters()); - Map args = scriptFilter.getArgs(); - Assert.assertEquals(1, args.size()); - Assert.assertTrue(args.containsKey("x")); - Assert.assertEquals(3, args.get("x")); - - } - - @Test - public void fieldsAsNumbersOnWhere() throws SqlParseException { - String query = "select * from x where ['3'] > 2"; - Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); - LinkedList wheres = select.getWhere().getWheres(); - Assert.assertEquals(1, wheres.size()); - Where where = wheres.get(0); - Assert.assertEquals(Condition.class, where.getClass()); - Condition condition = (Condition) where; - Assert.assertEquals("3", condition.getName()); - } - - @Test - public void likeTestWithEscaped() throws SqlParseException { - String query = "select * from x where name like '&UNDERSCOREhey_%&PERCENT'"; - Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); - BoolQueryBuilder explan = QueryMaker.explain(select.getWhere()); - String filterAsString = explan.toString(); - Assert.assertTrue(filterAsString.contains("_hey?*%")); - } - - - @Test - public void complexNestedAndOtherQuery() throws SqlParseException { - String query = "select * from x where nested('path',path.x=3) and y=3"; - Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); - LinkedList wheres = select.getWhere().getWheres(); - Assert.assertEquals(2, wheres.size()); - Assert.assertEquals("AND path NESTED_COMPLEX AND ( AND path.x EQ 3 ) ", wheres.get(0).toString()); - Assert.assertEquals("AND y EQ 3", wheres.get(1).toString()); - } - - - @Test - public void numberEqualConditionWithoutProperty() throws SqlParseException { - SQLExpr sqlExpr = queryToExpr("select * from xxx/locs where 1 = 1"); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List wheres = select.getWhere().getWheres(); - Assert.assertThat(wheres.size(), equalTo(1)); - Condition condition = (Condition) wheres.get(0); - Assert.assertTrue(condition.getValue() instanceof ScriptFilter); - ScriptFilter sf = (ScriptFilter) condition.getValue(); - Assert.assertEquals(sf.getScript(), "1 == 1"); - } - - @Test - public void numberGreatConditionWithoutProperty() throws SqlParseException { - SQLExpr sqlExpr = queryToExpr("select * from xxx/locs where 1 > 1"); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List wheres = select.getWhere().getWheres(); - Assert.assertThat(wheres.size(), equalTo(1)); - Condition condition = (Condition) wheres.get(0); - Assert.assertTrue(condition.getValue() instanceof ScriptFilter); - ScriptFilter sf = (ScriptFilter) condition.getValue(); - Assert.assertEquals(sf.getScript(), "1 > 1"); - } - - @Test - public void stringEqualConditionWithoutProperty() throws SqlParseException { - SQLExpr sqlExpr = queryToExpr("select * from xxx/locs where 'a' = 'b'"); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List wheres = select.getWhere().getWheres(); - Assert.assertThat(wheres.size(), equalTo(1)); - Condition condition = (Condition) wheres.get(0); - Assert.assertTrue(condition.getValue() instanceof ScriptFilter); - ScriptFilter sf = (ScriptFilter) condition.getValue(); - Assert.assertEquals(sf.getScript(), "'a' == 'b'"); - } - - @Test - public void propertyEqualCondition() throws SqlParseException { - SQLExpr sqlExpr = queryToExpr("select * from xxx/locs where a = b"); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List wheres = select.getWhere().getWheres(); - Assert.assertThat(wheres.size(), equalTo(1)); - Condition condition = (Condition) wheres.get(0); - Assert.assertTrue(condition.getValue() instanceof ScriptFilter); - ScriptFilter sf = (ScriptFilter) condition.getValue(); - Assert.assertEquals(sf.getScript(), "doc['a'].value == doc['b'].value"); - } - - - @Test - public void propertyWithTableAliasEqualCondition() throws SqlParseException { - SQLExpr sqlExpr = queryToExpr("select t.* from xxx/locs where t.a = t.b"); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List wheres = select.getWhere().getWheres(); - Assert.assertThat(wheres.size(), equalTo(1)); - Condition condition = (Condition) wheres.get(0); - Assert.assertTrue(condition.getValue() instanceof ScriptFilter); - ScriptFilter sf = (ScriptFilter) condition.getValue(); - Assert.assertEquals(sf.getScript(), "doc['a'].value == doc['b'].value"); - } - - @Test - public void propertyGreatCondition() throws SqlParseException { - SQLExpr sqlExpr = queryToExpr("select * from xxx/locs where a > b"); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List wheres = select.getWhere().getWheres(); - Assert.assertThat(wheres.size(), equalTo(1)); - Condition condition = (Condition) wheres.get(0); - Assert.assertTrue(condition.getValue() instanceof ScriptFilter); - ScriptFilter sf = (ScriptFilter) condition.getValue(); - Assert.assertEquals(sf.getScript(), "doc['a'].value > doc['b'].value"); - } - - @Test - public void stringAndNumberEqualConditionWithoutProperty() throws SqlParseException { - SQLExpr sqlExpr = queryToExpr("select * from xxx/locs where 'a' = 1"); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - List wheres = select.getWhere().getWheres(); - Assert.assertThat(wheres.size(), equalTo(1)); - Condition condition = (Condition) wheres.get(0); - Assert.assertTrue(condition.getValue() instanceof ScriptFilter); - ScriptFilter sf = (ScriptFilter) condition.getValue(); - Assert.assertEquals(sf.getScript(), "'a' == 1"); - } - - - @Test - public void caseWhenTest() throws SqlParseException { - String query = "Select k,\n" + - "Case \n" + - "When floor(testBase)>=90 then 'A'\n" + - "When testBase = '80' then 'B'\n" + - "Else 'E' end as testBaseLevel\n" + - "from t"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - for (Field field : select.getFields()) { - if (field instanceof MethodField) { - MethodField methodField = (MethodField) field; - String alias = (String) methodField.getParams().get(0).value; - String scriptCode = (String) methodField.getParams().get(1).value; - Assert.assertEquals(alias, "testBaseLevel"); - Matcher docValue = Pattern.compile("doc\\['testBase'].value").matcher(scriptCode); - Matcher number = Pattern.compile(" (\\s+90) | (\\s+'80')").matcher(scriptCode); - - AtomicInteger docValueCounter = new AtomicInteger(); - - while (docValue.find()) { - docValueCounter.incrementAndGet(); - } - - Assert.assertThat(docValueCounter.get(), equalTo(2)); - Assert.assertThat(number.groupCount(), equalTo(2)); - - } - } - - } - - @Test - public void caseWhenTestWithFieldElseExpr() throws SqlParseException { - String query = "Select k,\n" + - "Case \n" + - "When floor(testBase)>=90 then 'A'\n" + - "When testBase = '80' then 'B'\n" + - "Else testBase end as testBaseLevel\n" + - "from t"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - for (Field field : select.getFields()) { - if (field instanceof MethodField) { - MethodField methodField = (MethodField) field; - String alias = (String) methodField.getParams().get(0).value; - String scriptCode = (String) methodField.getParams().get(1).value; - Assert.assertEquals(alias, "testBaseLevel"); - Matcher docValue = Pattern.compile("doc\\['testBase'].value").matcher(scriptCode); - Matcher number = Pattern.compile(" (\\s+90) | (\\s+'80')").matcher(scriptCode); - - AtomicInteger docValueCounter = new AtomicInteger(); - - while (docValue.find()) { - docValueCounter.incrementAndGet(); - } - - Assert.assertThat(docValueCounter.get(), equalTo(3)); - Assert.assertThat(number.groupCount(), equalTo(2)); - - } - } - - } - - @Test - public void caseWhenTestWithouhtElseExpr() throws SqlParseException { - String query = "Select k,\n" + - "Case \n" + - "When floor(testBase)>=90 then 'A'\n" + - "When testBase = '80' then 'B'\n" + - "end as testBaseLevel\n" + - "from t"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - for (Field field : select.getFields()) { - if (field instanceof MethodField) { - MethodField methodField = (MethodField) field; - String alias = (String) methodField.getParams().get(0).value; - String scriptCode = (String) methodField.getParams().get(1).value; - Assert.assertEquals(alias, "testBaseLevel"); - - Matcher docValue = Pattern.compile("\\{\\s+null\\s+}").matcher(scriptCode); - - AtomicInteger docValueCounter = new AtomicInteger(); - - while (docValue.find()) { - docValueCounter.incrementAndGet(); - } - - Assert.assertThat(docValueCounter.get(), equalTo(1)); - - } - } - - } - - @Test - public void caseWhenSwitchTest() { - String query = "SELECT CASE weather " - + "WHEN 'Sunny' THEN '0' " - + "WHEN 'Rainy' THEN '1' " - + "ELSE 'NA' END AS case " - + "FROM t"; - ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); - Assert.assertTrue( - CheckScriptContents.scriptContainsString( - scriptField, - "doc['weather'].value=='Sunny'" - ) - ); - } - - @Test - public void castToIntTest() throws Exception { - String query = "select cast(age as int) from "+ TestsConstants.TEST_INDEX_ACCOUNT + "/account limit 10"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - Field castField = select.getFields().get(0); - Assert.assertTrue(castField instanceof MethodField); - - MethodField methodField = (MethodField) castField; - Assert.assertEquals("script",castField.getName()); - + private SqlParser parser; + + @Before + public void init() { + parser = new SqlParser(); + } + + @Rule public ExpectedException thrown = ExpectedException.none(); + + @Test + public void whereConditionLeftFunctionRightPropertyGreatTest() throws Exception { + + String query = + "SELECT " + + " * from " + + TEST_INDEX_ACCOUNT + + "/account " + + " where floor(split(address,' ')[0]+0) > b limit 1000 "; + + Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); + Where where = select.getWhere(); + Assert.assertTrue((where.getWheres().size() == 1)); + Assert.assertTrue(((Condition) (where.getWheres().get(0))).getValue() instanceof ScriptFilter); + ScriptFilter scriptFilter = + (ScriptFilter) (((Condition) (where.getWheres().get(0))).getValue()); + + Assert.assertTrue(scriptFilter.getScript().contains("doc['address'].value.split(' ')[0]")); + Pattern pattern = Pattern.compile("floor_\\d+ > doc\\['b'].value"); + java.util.regex.Matcher matcher = pattern.matcher(scriptFilter.getScript()); + Assert.assertTrue(matcher.find()); + } + + @Test() + public void failingQueryTest() throws SqlParseException { + thrown.expect(SqlFeatureNotImplementedException.class); + thrown.expectMessage( + "The complex aggregate expressions are not implemented yet: MAX(FlightDelayMin) -" + + " MIN(FlightDelayMin)"); + + Select select = + parser.parseSelect( + (SQLQueryExpr) + queryToExpr( + "SELECT DestCountry, dayOfWeek, max(FlightDelayMin) - min(FlightDelayMin)" + + " FROM opensearch_dashboards_sample_data_flights\n" + + " GROUP BY DestCountry, dayOfWeek\n")); + + AggregationQueryAction queryAction = new AggregationQueryAction(mock(Client.class), select); + String elasticDsl = queryAction.explain().explain(); + } + + @Test() + public void failingQueryTest2() throws SqlParseException { + thrown.expect(SqlFeatureNotImplementedException.class); + thrown.expectMessage("Function calls of form 'log(MAX(...))' are not implemented yet"); + + Select select = + parser.parseSelect( + (SQLQueryExpr) + queryToExpr( + "SELECT DestCountry, dayOfWeek, log(max(FlightDelayMin))" + + " FROM opensearch_dashboards_sample_data_flights\n" + + " GROUP BY DestCountry, dayOfWeek\n")); + + AggregationQueryAction queryAction = new AggregationQueryAction(mock(Client.class), select); + String elasticDsl = queryAction.explain().explain(); + } + + @Test() + public void failingQueryWithHavingTest() throws SqlParseException { + thrown.expect(SqlFeatureNotImplementedException.class); + thrown.expectMessage( + "The complex aggregate expressions are not implemented yet: MAX(FlightDelayMin) -" + + " MIN(FlightDelayMin)"); + + Select select = + parser.parseSelect( + (SQLQueryExpr) + queryToExpr( + "SELECT DestCountry, dayOfWeek, max(FlightDelayMin) - min(FlightDelayMin) " + + " FROM opensearch_dashboards_sample_data_flights\n" + + " GROUP BY DestCountry, dayOfWeek\n" + + " HAVING max(FlightDelayMin) - min(FlightDelayMin)) *" + + " count(FlightDelayMin) + 14 > 100")); + + AggregationQueryAction queryAction = new AggregationQueryAction(mock(Client.class), select); + String elasticDsl = queryAction.explain().explain(); + } + + @Test() + @Ignore( + "Github issues: https://github.com/opendistro-for-elasticsearch/sql/issues/194, " + + "https://github.com/opendistro-for-elasticsearch/sql/issues/234") + public void failingQueryWithHavingTest2() throws SqlParseException { + Select select = + parser.parseSelect( + (SQLQueryExpr) + queryToExpr( + "SELECT DestCountry, dayOfWeek, max(FlightDelayMin) " + + " FROM opensearch_dashboards_sample_data_flights\n" + + " GROUP BY DestCountry, dayOfWeek\n" + + " HAVING max(FlightDelayMin) - min(FlightDelayMin) > 100")); + + AggregationQueryAction queryAction = new AggregationQueryAction(mock(Client.class), select); + + String elasticDsl = queryAction.explain().explain(); + } + + @Test + public void whereConditionLeftFunctionRightFunctionEqualTest() throws Exception { + + String query = + "SELECT " + + " * from " + + TEST_INDEX_ACCOUNT + + "/account " + + " where floor(split(address,' ')[0]+0) = floor(split(address,' ')[0]+0) limit 1000 "; + + Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); + Where where = select.getWhere(); + Assert.assertTrue((where.getWheres().size() == 1)); + Assert.assertTrue(((Condition) (where.getWheres().get(0))).getValue() instanceof ScriptFilter); + ScriptFilter scriptFilter = + (ScriptFilter) (((Condition) (where.getWheres().get(0))).getValue()); + Assert.assertTrue(scriptFilter.getScript().contains("doc['address'].value.split(' ')[0]")); + Pattern pattern = Pattern.compile("floor_\\d+ == floor_\\d+"); + java.util.regex.Matcher matcher = pattern.matcher(scriptFilter.getScript()); + Assert.assertTrue(matcher.find()); + } + + @Test + public void whereConditionVariableRightVariableEqualTest() throws Exception { + + String query = + "SELECT " + " * from " + TEST_INDEX_ACCOUNT + "/account " + " where a = b limit 1000 "; + + Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); + Where where = select.getWhere(); + Assert.assertTrue((where.getWheres().size() == 1)); + Assert.assertTrue(((Condition) (where.getWheres().get(0))).getValue() instanceof ScriptFilter); + ScriptFilter scriptFilter = + (ScriptFilter) (((Condition) (where.getWheres().get(0))).getValue()); + Assert.assertTrue(scriptFilter.getScript().contains("doc['a'].value == doc['b'].value")); + } + + @Test + public void joinParseCheckSelectedFieldsSplit() throws SqlParseException { + String query = + "SELECT a.firstname ,a.lastname , a.gender , d.holdersName ,d.name FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + "/account a " + + "LEFT JOIN " + + TEST_INDEX_DOG + + "/dog d on d.holdersName = a.firstname " + + " AND d.age < a.age " + + " WHERE a.firstname = 'eliran' AND " + + " (a.age > 10 OR a.balance > 2000)" + + " AND d.age > 1"; + + JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); + + List t1Fields = joinSelect.getFirstTable().getSelectedFields(); + Assert.assertEquals(t1Fields.size(), 3); + Assert.assertTrue(fieldExist(t1Fields, "firstname")); + Assert.assertTrue(fieldExist(t1Fields, "lastname")); + Assert.assertTrue(fieldExist(t1Fields, "gender")); + + List t2Fields = joinSelect.getSecondTable().getSelectedFields(); + Assert.assertEquals(t2Fields.size(), 2); + Assert.assertTrue(fieldExist(t2Fields, "holdersName")); + Assert.assertTrue(fieldExist(t2Fields, "name")); + } + + @Test + public void joinParseCheckConnectedFields() throws SqlParseException { + String query = + "SELECT a.firstname ,a.lastname , a.gender , d.holdersName ,d.name FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + "/account a " + + "LEFT JOIN " + + TEST_INDEX_DOG + + "/dog d on d.holdersName = a.firstname " + + " AND d.age < a.age " + + " WHERE a.firstname = 'eliran' AND " + + " (a.age > 10 OR a.balance > 2000)" + + " AND d.age > 1"; + + JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); + + List t1Fields = joinSelect.getFirstTable().getConnectedFields(); + Assert.assertEquals(t1Fields.size(), 2); + Assert.assertTrue(fieldExist(t1Fields, "firstname")); + Assert.assertTrue(fieldExist(t1Fields, "age")); + + List t2Fields = joinSelect.getSecondTable().getConnectedFields(); + Assert.assertEquals(t2Fields.size(), 2); + Assert.assertTrue(fieldExist(t2Fields, "holdersName")); + Assert.assertTrue(fieldExist(t2Fields, "age")); + } + + private boolean fieldExist(List fields, String fieldName) { + for (Field field : fields) if (field.getName().equals(fieldName)) return true; + + return false; + } + + @Test + public void joinParseFromsAreSplitedCorrectly() throws SqlParseException { + String query = + "SELECT a.firstname ,a.lastname , a.gender , d.holdersName ,d.name FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + " a " + + "LEFT JOIN " + + TEST_INDEX_DOG + + " d on d.holdersName = a.firstname" + + " WHERE a.firstname = 'eliran' AND " + + " (a.age > 10 OR a.balance > 2000)" + + " AND d.age > 1"; + + JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); + List t1From = joinSelect.getFirstTable().getFrom(); + + Assert.assertNotNull(t1From); + Assert.assertEquals(1, t1From.size()); + Assert.assertTrue(checkFrom(t1From.get(0), TestsConstants.TEST_INDEX_ACCOUNT, "a")); + + List t2From = joinSelect.getSecondTable().getFrom(); + Assert.assertNotNull(t2From); + Assert.assertEquals(1, t2From.size()); + Assert.assertTrue(checkFrom(t2From.get(0), TEST_INDEX_DOG, "d")); + } + + private boolean checkFrom(From from, String index, String alias) { + return from.getAlias().equals(alias) && from.getIndex().equals(index); + } + + @Test + public void joinParseConditionsTestOneCondition() throws SqlParseException { + String query = + "SELECT a.*, a.firstname ,a.lastname , a.gender , d.holdersName ,d.name FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + "/account a " + + "LEFT JOIN " + + TEST_INDEX_DOG + + "/dog d on d.holdersName = a.firstname" + + " WHERE a.firstname = 'eliran' AND " + + " (a.age > 10 OR a.balance > 2000)" + + " AND d.age > 1"; + + JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); + List conditions = joinSelect.getConnectedConditions(); + Assert.assertNotNull(conditions); + Assert.assertEquals(1, conditions.size()); + Assert.assertTrue( + "condition not exist: d.holdersName = a.firstname", + conditionExist(conditions, "d.holdersName", "a.firstname", Condition.OPERATOR.EQ)); + } + + @Test + public void joinParseConditionsTestTwoConditions() throws SqlParseException { + String query = + "SELECT a.*, a.firstname ,a.lastname , a.gender , d.holdersName ,d.name FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + "/account a " + + "LEFT JOIN " + + TEST_INDEX_DOG + + "/dog d on d.holdersName = a.firstname " + + " AND d.age < a.age " + + " WHERE a.firstname = 'eliran' AND " + + " (a.age > 10 OR a.balance > 2000)" + + " AND d.age > 1"; + + JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); + List conditions = joinSelect.getConnectedConditions(); + Assert.assertNotNull(conditions); + Assert.assertEquals(2, conditions.size()); + Assert.assertTrue( + "condition not exist: d.holdersName = a.firstname", + conditionExist(conditions, "d.holdersName", "a.firstname", Condition.OPERATOR.EQ)); + Assert.assertTrue( + "condition not exist: d.age < a.age", + conditionExist(conditions, "d.age", "a.age", Condition.OPERATOR.LT)); + } + + @Test + public void joinSplitWhereCorrectly() throws SqlParseException { + String query = + "SELECT a.*, a.firstname ,a.lastname , a.gender , d.holdersName ,d.name FROM " + + TestsConstants.TEST_INDEX_ACCOUNT + + "/account a " + + "LEFT JOIN " + + TEST_INDEX_DOG + + "/dog d on d.holdersName = a.firstname" + + " WHERE a.firstname = 'eliran' AND " + + " (a.age > 10 OR a.balance > 2000)" + + " AND d.age > 1"; + + JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); + String s1Where = joinSelect.getFirstTable().getWhere().toString(); + Assert.assertEquals( + "AND ( AND firstname EQ eliran, AND ( OR age GT 10, OR balance GT 2000 ) ) ", s1Where); + String s2Where = joinSelect.getSecondTable().getWhere().toString(); + Assert.assertEquals("AND age GT 1", s2Where); + } + + @Test + public void joinConditionWithComplexObjectComparisonRightSide() throws SqlParseException { + String query = + String.format( + Locale.ROOT, + "select c.name.firstname,c.parents.father , h.name,h.words " + + "from %s/gotCharacters c " + + "JOIN %s/gotCharacters h " + + "on h.name = c.name.lastname " + + "where c.name.firstname='Daenerys'", + TEST_INDEX_GAME_OF_THRONES, + TEST_INDEX_GAME_OF_THRONES); + JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); + List conditions = joinSelect.getConnectedConditions(); + Assert.assertNotNull(conditions); + Assert.assertEquals(1, conditions.size()); + Assert.assertTrue( + "condition not exist: h.name = c.name.lastname", + conditionExist(conditions, "h.name", "c.name.lastname", Condition.OPERATOR.EQ)); + } + + @Test + public void joinConditionWithComplexObjectComparisonLeftSide() throws SqlParseException { + String query = + String.format( + Locale.ROOT, + "select c.name.firstname,c.parents.father , h.name,h.words from %s/gotCharacters c " + + "JOIN %s/gotCharacters h " + + "on c.name.lastname = h.name " + + "where c.name.firstname='Daenerys'", + TEST_INDEX_GAME_OF_THRONES, + TEST_INDEX_GAME_OF_THRONES); + JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); + List conditions = joinSelect.getConnectedConditions(); + Assert.assertNotNull(conditions); + Assert.assertEquals(1, conditions.size()); + Assert.assertTrue( + "condition not exist: c.name.lastname = h.name", + conditionExist(conditions, "c.name.lastname", "h.name", Condition.OPERATOR.EQ)); + } + + @Test + public void limitHintsOnJoin() throws SqlParseException { + String query = + String.format( + Locale.ROOT, + "select /*! JOIN_TABLES_LIMIT(1000,null) */ " + + "c.name.firstname,c.parents.father , h.name,h.words from %s/gotCharacters c " + + "use KEY (termsFilter) " + + "JOIN %s/gotCharacters h " + + "on c.name.lastname = h.name " + + "where c.name.firstname='Daenerys'", + TEST_INDEX_GAME_OF_THRONES, + TEST_INDEX_GAME_OF_THRONES); + JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); + List hints = joinSelect.getHints(); + Assert.assertNotNull(hints); + Assert.assertEquals("hints size was not 1", 1, hints.size()); + Hint hint = hints.get(0); + Assert.assertEquals(HintType.JOIN_LIMIT, hint.getType()); + Object[] params = hint.getParams(); + Assert.assertNotNull(params); + Assert.assertEquals("params size was not 2", 2, params.length); + Assert.assertEquals(1000, params[0]); + Assert.assertNull(params[1]); + } + + @Test + public void hashTermsFilterHint() throws SqlParseException { + String query = + String.format( + Locale.ROOT, + "select /*! HASH_WITH_TERMS_FILTER*/ " + + "c.name.firstname,c.parents.father , h.name,h.words from %s/gotCharacters c " + + "use KEY (termsFilter) " + + "JOIN %s/gotCharacters h " + + "on c.name.lastname = h.name " + + "where c.name.firstname='Daenerys'", + TEST_INDEX_GAME_OF_THRONES, + TEST_INDEX_GAME_OF_THRONES); + JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); + List hints = joinSelect.getHints(); + Assert.assertNotNull(hints); + Assert.assertEquals("hints size was not 1", 1, hints.size()); + Hint hint = hints.get(0); + Assert.assertEquals(HintType.HASH_WITH_TERMS_FILTER, hint.getType()); + } + + @Test + public void multipleHints() throws SqlParseException { + String query = + String.format( + Locale.ROOT, + "select /*! HASH_WITH_TERMS_FILTER*/ " + + "/*! JOIN_TABLES_LIMIT(1000,null) */ " + + " /*! JOIN_TABLES_LIMIT(100,200) */ " + + "c.name.firstname,c.parents.father , h.name,h.words from %s/gotCharacters c " + + "use KEY (termsFilter) " + + "JOIN %s/gotCharacters h " + + "on c.name.lastname = h.name " + + "where c.name.firstname='Daenerys'", + TEST_INDEX_GAME_OF_THRONES, + TEST_INDEX_GAME_OF_THRONES); + + JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); + List hints = joinSelect.getHints(); + + Assert.assertNotNull(hints); + Assert.assertEquals("hints size was not 3", 3, hints.size()); + Hint firstHint = hints.get(0); + Assert.assertEquals(HintType.HASH_WITH_TERMS_FILTER, firstHint.getType()); + Hint secondHint = hints.get(1); + Assert.assertEquals(HintType.JOIN_LIMIT, secondHint.getType()); + Assert.assertEquals(1000, secondHint.getParams()[0]); + Assert.assertNull(secondHint.getParams()[1]); + Hint thirdHint = hints.get(2); + Assert.assertEquals(100, thirdHint.getParams()[0]); + Assert.assertEquals(200, thirdHint.getParams()[1]); + Assert.assertEquals(HintType.JOIN_LIMIT, thirdHint.getType()); + } + + @Test + public void searchWithOdbcTimeFormatParse() throws SqlParseException { + String query = + String.format( + Locale.ROOT, + "SELECT insert_time FROM %s/odbc " + + "WHERE insert_time < {ts '2015-03-15 00:00:00.000'}", + TEST_INDEX_ODBC); + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + LinkedList wheres = select.getWhere().getWheres(); + Assert.assertEquals(1, wheres.size()); + Condition condition = (Condition) wheres.get(0); + Assert.assertEquals("{ts '2015-03-15 00:00:00.000'}", condition.getValue().toString()); + } + + @Test + public void indexWithSpacesWithinBrackets() throws SqlParseException { + String query = "SELECT insert_time FROM [Test Index] WHERE age > 3"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List fromList = select.getFrom(); + Assert.assertEquals(1, fromList.size()); + From from = fromList.get(0); + Assert.assertEquals("Test Index", from.getIndex()); + } + + @Test + public void indexWithSpacesWithTypeWithinBrackets() throws SqlParseException { + String query = "SELECT insert_time FROM [Test Index] WHERE age > 3"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List fromList = select.getFrom(); + Assert.assertEquals(1, fromList.size()); + From from = fromList.get(0); + Assert.assertEquals("Test Index", from.getIndex()); + } + + @Test + public void fieldWithSpacesWithinBrackets() throws SqlParseException { + String query = "SELECT insert_time FROM name/type1 WHERE [first name] = 'Name'"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List where = select.getWhere().getWheres(); + Assert.assertEquals(1, where.size()); + Condition condition = (Condition) where.get(0); + Assert.assertEquals("first name", condition.getName()); + Assert.assertEquals("Name", condition.getValue()); + } + + @Test + public void twoIndices() throws SqlParseException { + String query = "SELECT insert_time FROM index1, index2 WHERE age > 3"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List fromList = select.getFrom(); + Assert.assertEquals(2, fromList.size()); + From from1 = fromList.get(0); + From from2 = fromList.get(1); + boolean preservedOrder = from1.getIndex().equals("index1") && from2.getIndex().equals("index2"); + boolean notPreservedOrder = + from1.getIndex().equals("index2") && from2.getIndex().equals("index1"); + Assert.assertTrue(preservedOrder || notPreservedOrder); + } + + @Test + public void fieldWithATcharAtWhere() throws SqlParseException { + String query = "SELECT * FROM index/type where @field = 6 "; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + LinkedList wheres = select.getWhere().getWheres(); + Assert.assertEquals(1, wheres.size()); + Condition condition = (Condition) wheres.get(0); + Assert.assertEquals("@field", condition.getName()); + } + + @Test + public void fieldWithATcharAtSelect() throws SqlParseException { + String query = "SELECT @field FROM index/type where field2 = 6 "; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List fields = select.getFields(); + Assert.assertEquals(1, fields.size()); + Field field = fields.get(0); + Assert.assertEquals(field.getName(), "@field"); + } + + @Test + public void fieldWithATcharAtSelectOnAgg() throws SqlParseException { + String query = "SELECT max(@field) FROM index/type where field2 = 6 "; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List fields = select.getFields(); + Assert.assertEquals(1, fields.size()); + Field field = fields.get(0); + Assert.assertEquals("MAX(@field)", field.toString()); + } + + @Test + public void fieldWithColonCharAtSelect() throws SqlParseException { + String query = "SELECT a:b FROM index/type where field2 = 6 "; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List fields = select.getFields(); + Assert.assertEquals(1, fields.size()); + Field field = fields.get(0); + Assert.assertEquals(field.getName(), "a:b"); + } + + @Test + public void fieldWithColonCharAtWhere() throws SqlParseException { + String query = "SELECT * FROM index/type where a:b = 6 "; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + LinkedList wheres = select.getWhere().getWheres(); + Assert.assertEquals(1, wheres.size()); + Condition condition = (Condition) wheres.get(0); + Assert.assertEquals("a:b", condition.getName()); + } + + @Test + public void fieldIsNull() throws SqlParseException { + String query = "SELECT * FROM index/type where a IS NOT NULL"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + LinkedList wheres = select.getWhere().getWheres(); + Assert.assertEquals(1, wheres.size()); + Condition condition = (Condition) wheres.get(0); + Assert.assertEquals("a", condition.getName()); + Assert.assertNull(condition.getValue()); + } + + @Test + public void innerQueryTest() throws SqlParseException { + String query = + String.format( + Locale.ROOT, + "select * from %s/dog where holdersName " + + "IN (select firstname from %s/account where firstname = 'eliran')", + TEST_INDEX_DOG, + TestsConstants.TEST_INDEX_ACCOUNT); + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + Assert.assertTrue(select.containsSubQueries()); + Assert.assertEquals(1, select.getSubQueries().size()); + } + + @Test + public void inTermsSubQueryTest() throws SqlParseException { + String query = + String.format( + Locale.ROOT, + "select * from %s/dog where holdersName = IN_TERMS (select firstname from %s/account" + + " where firstname = 'eliran')", + TEST_INDEX_DOG, + TestsConstants.TEST_INDEX_ACCOUNT); + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + Assert.assertTrue(select.containsSubQueries()); + Assert.assertEquals(1, select.getSubQueries().size()); + } + + @Test + public void innerQueryTestTwoQueries() throws SqlParseException { + String query = + String.format( + Locale.ROOT, + "select * from %s/dog where holdersName IN " + + "(select firstname from %s/account where firstname = 'eliran') and " + + "age IN (select name.ofHisName from %s/gotCharacters) ", + TEST_INDEX_DOG, + TestsConstants.TEST_INDEX_ACCOUNT, + TEST_INDEX_GAME_OF_THRONES); + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + Assert.assertTrue(select.containsSubQueries()); + Assert.assertEquals(2, select.getSubQueries().size()); + } + + @Test + public void indexWithDotsAndHyphen() throws SqlParseException { + String query = "select * from data-2015.08.22"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + Assert.assertEquals(1, select.getFrom().size()); + Assert.assertEquals("data-2015.08.22", select.getFrom().get(0).getIndex()); + } + + @Test + public void indexNameWithDotAtTheStart() throws SqlParseException { + String query = "SELECT * FROM .opensearch_dashboards"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + Assert.assertEquals(".opensearch_dashboards", select.getFrom().get(0).getIndex()); + } + + @Test + public void indexWithSemiColons() throws SqlParseException { + String query = "select * from some;index"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + Assert.assertEquals(1, select.getFrom().size()); + Assert.assertEquals("some;index", select.getFrom().get(0).getIndex()); + } + + @Test + public void scriptFiledPlusLiteralTest() throws SqlParseException { + String query = "SELECT field1 + 3 FROM index/type"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List fields = select.getFields(); + Assert.assertEquals(1, fields.size()); + Field field = fields.get(0); + Assert.assertTrue(field instanceof MethodField); + MethodField scriptMethod = (MethodField) field; + Assert.assertEquals("script", scriptMethod.getName().toLowerCase()); + Assert.assertEquals(2, scriptMethod.getParams().size()); + Assert.assertTrue( + scriptMethod.getParams().get(1).toString().contains("doc['field1'].value + 3")); + } + + @Test + public void scriptFieldPlusFieldTest() throws SqlParseException { + String query = "SELECT field1 + field2 FROM index/type"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List fields = select.getFields(); + Assert.assertEquals(1, fields.size()); + Field field = fields.get(0); + Assert.assertTrue(field instanceof MethodField); + MethodField scriptMethod = (MethodField) field; + Assert.assertEquals("script", scriptMethod.getName().toLowerCase()); + Assert.assertEquals(2, scriptMethod.getParams().size()); + Assert.assertTrue( + scriptMethod + .getParams() + .get(1) + .toString() + .contains("doc['field1'].value + doc['field2'].value")); + } + + @Test + public void scriptLiteralPlusLiteralTest() throws SqlParseException { + String query = "SELECT 1 + 2 FROM index/type"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List fields = select.getFields(); + Assert.assertEquals(1, fields.size()); + Field field = fields.get(0); + Assert.assertTrue(field instanceof MethodField); + MethodField scriptMethod = (MethodField) field; + Assert.assertEquals("script", scriptMethod.getName().toLowerCase()); + Assert.assertEquals(2, scriptMethod.getParams().size()); + Assert.assertTrue(scriptMethod.getParams().get(1).toString().contains("1 + 2")); + } + + @Test + public void explicitScriptOnAggregation() throws SqlParseException { + String query = + "SELECT avg( script('add','doc[\\'field1\\'].value + doc[\\'field2\\'].value') )" + + " FROM index/type"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List fields = select.getFields(); + Assert.assertEquals(1, fields.size()); + Field field = fields.get(0); + Assert.assertTrue(field instanceof MethodField); + MethodField avgMethodField = (MethodField) field; + Assert.assertEquals("avg", avgMethodField.getName().toLowerCase()); + Assert.assertEquals(1, avgMethodField.getParams().size()); + MethodField scriptMethod = (MethodField) avgMethodField.getParams().get(0).value; + Assert.assertEquals("script", scriptMethod.getName().toLowerCase()); + Assert.assertEquals(2, scriptMethod.getParams().size()); + Assert.assertEquals( + "doc['field1'].value + doc['field2'].value", scriptMethod.getParams().get(1).toString()); + } + + @Test + public void implicitScriptOnAggregation() throws SqlParseException { + String query = "SELECT avg(field(field1) + field(field2)) FROM index/type"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List fields = select.getFields(); + Assert.assertEquals(1, fields.size()); + Field field = fields.get(0); + Assert.assertTrue(field instanceof MethodField); + MethodField avgMethodField = (MethodField) field; + Assert.assertEquals("avg", avgMethodField.getName().toLowerCase()); + Assert.assertEquals(1, avgMethodField.getParams().size()); + Assert.assertTrue( + avgMethodField.getParams().get(0).value.toString().contains("doc['field1'].value")); + Assert.assertTrue( + avgMethodField.getParams().get(0).value.toString().contains("doc['field2'].value")); + } + + @Test + public void nestedFieldOnWhereNoPathSimpleField() throws SqlParseException { + String query = "select * from myIndex where nested(message.name) = 'hey'"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + Where where = select.getWhere().getWheres().get(0); + Assert.assertTrue("where should be condition", where instanceof Condition); + Condition condition = (Condition) where; + Assert.assertTrue("condition should be nested", condition.isNested()); + Assert.assertEquals("message", condition.getNestedPath()); + Assert.assertEquals("message.name", condition.getName()); + } + + @Test + public void nestedFieldOnWhereNoPathComplexField() throws SqlParseException { + String query = "select * from myIndex where nested(message.moreNested.name) = 'hey'"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + Where where = select.getWhere().getWheres().get(0); + Assert.assertTrue("where should be condition", where instanceof Condition); + Condition condition = (Condition) where; + Assert.assertTrue("condition should be nested", condition.isNested()); + Assert.assertEquals("message.moreNested", condition.getNestedPath()); + Assert.assertEquals("message.moreNested.name", condition.getName()); + } + + @Test + public void aggFieldWithAliasTableAliasShouldBeRemoved() throws SqlParseException { + String query = "select count(t.*) as counts,sum(t.size) from xxx/locs as t group by t.kk"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List fields = select.getFields(); + Assert.assertThat(fields.size(), equalTo(2)); + Assert.assertEquals("COUNT(*)", fields.get(0).toString()); + Assert.assertEquals("SUM(size)", fields.get(1).toString()); + List> groups = select.getGroupBys(); + Assert.assertThat(groups.size(), equalTo(1)); + Assert.assertThat(groups.get(0).size(), equalTo(1)); + Assert.assertEquals("kk", groups.get(0).get(0).getName()); + } + + @Test + public void nestedFieldOnWhereGivenPath() throws SqlParseException { + String query = "select * from myIndex where nested(message.name,message) = 'hey'"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + Where where = select.getWhere().getWheres().get(0); + Assert.assertTrue("where should be condition", where instanceof Condition); + Condition condition = (Condition) where; + Assert.assertTrue("condition should be nested", condition.isNested()); + Assert.assertEquals("message", condition.getNestedPath()); + Assert.assertEquals("message.name", condition.getName()); + } + + @Test + public void nestedFieldOnGroupByNoPath() throws SqlParseException { + String query = "select * from myIndex group by nested(message.name)"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + Field field = select.getGroupBys().get(0).get(0); + Assert.assertTrue("condition should be nested", field.isNested()); + Assert.assertEquals("message", field.getNestedPath()); + Assert.assertEquals("message.name", field.getName()); + } + + @Test + public void nestedFieldOnGroupByWithPath() throws SqlParseException { + String query = "select * from myIndex group by nested(message.name,message)"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + Field field = select.getGroupBys().get(0).get(0); + Assert.assertTrue("condition should be nested", field.isNested()); + Assert.assertEquals("message", field.getNestedPath()); + Assert.assertEquals("message.name", field.getName()); + } + + @Test + public void filterAggTestNoAlias() throws SqlParseException { + String query = "select * from myIndex group by a , filter( a > 3 AND b='3' )"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List> groupBys = select.getGroupBys(); + Assert.assertEquals(1, groupBys.size()); + Field aAgg = groupBys.get(0).get(0); + Assert.assertEquals("a", aAgg.getName()); + Field field = groupBys.get(0).get(1); + Assert.assertTrue("filter field should be method field", field instanceof MethodField); + MethodField filterAgg = (MethodField) field; + Assert.assertEquals("filter", filterAgg.getName()); + Map params = filterAgg.getParamsAsMap(); + Assert.assertEquals(2, params.size()); + Object alias = params.get("alias"); + Assert.assertEquals("filter(a > 3 AND b = '3')@FILTER", alias); + + Assert.assertTrue(params.get("where") instanceof Where); + Where where = (Where) params.get("where"); + Assert.assertEquals(2, where.getWheres().size()); + } + + @Test + public void filterAggTestWithAlias() throws SqlParseException { + String query = "select * from myIndex group by a , filter(myFilter, a > 3 AND b='3' )"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List> groupBys = select.getGroupBys(); + Assert.assertEquals(1, groupBys.size()); + Field aAgg = groupBys.get(0).get(0); + Assert.assertEquals("a", aAgg.getName()); + Field field = groupBys.get(0).get(1); + Assert.assertTrue("filter field should be method field", field instanceof MethodField); + MethodField filterAgg = (MethodField) field; + Assert.assertEquals("filter", filterAgg.getName()); + Map params = filterAgg.getParamsAsMap(); + Assert.assertEquals(2, params.size()); + Object alias = params.get("alias"); + Assert.assertEquals("myFilter@FILTER", alias); + + Assert.assertTrue(params.get("where") instanceof Where); + Where where = (Where) params.get("where"); + Assert.assertEquals(2, where.getWheres().size()); + } + + @Test + public void filterAggTestWithAliasAsString() throws SqlParseException { + String query = "select * from myIndex group by a , filter('my filter', a > 3 AND b='3' )"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List> groupBys = select.getGroupBys(); + Assert.assertEquals(1, groupBys.size()); + Field aAgg = groupBys.get(0).get(0); + Assert.assertEquals("a", aAgg.getName()); + Field field = groupBys.get(0).get(1); + Assert.assertTrue("filter field should be method field", field instanceof MethodField); + MethodField filterAgg = (MethodField) field; + Assert.assertEquals("filter", filterAgg.getName()); + Map params = filterAgg.getParamsAsMap(); + Assert.assertEquals(2, params.size()); + Object alias = params.get("alias"); + Assert.assertEquals("my filter@FILTER", alias); + + Assert.assertTrue(params.get("where") instanceof Where); + Where where = (Where) params.get("where"); + Assert.assertEquals(2, where.getWheres().size()); + } + + @Test + public void doubleOrderByTest() throws SqlParseException { + String query = "select * from indexName order by a asc, b desc"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List orderBys = select.getOrderBys(); + Assert.assertEquals(2, orderBys.size()); + Assert.assertEquals("a", orderBys.get(0).getName()); + Assert.assertEquals("ASC", orderBys.get(0).getType()); + + Assert.assertEquals("b", orderBys.get(1).getName()); + Assert.assertEquals("DESC", orderBys.get(1).getType()); + } + + @Test + public void parseJoinWithOneTableOrderByAttachToCorrectTable() throws SqlParseException { + String query = + String.format( + Locale.ROOT, + "select c.name.firstname , d.words from %s/gotCharacters c " + + "JOIN %s/gotCharacters d on d.name = c.house " + + "order by c.name.firstname", + TEST_INDEX_GAME_OF_THRONES, + TEST_INDEX_GAME_OF_THRONES); + + JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); + Assert.assertTrue("first table should be ordered", joinSelect.getFirstTable().isOrderdSelect()); + Assert.assertFalse( + "second table should not be ordered", joinSelect.getSecondTable().isOrderdSelect()); + } + + @Test + public void parseJoinWithOneTableOrderByRemoveAlias() throws SqlParseException { + String query = + String.format( + Locale.ROOT, + "select c.name.firstname , d.words from %s/gotCharacters c " + + "JOIN %s/gotCharacters d on d.name = c.house " + + "order by c.name.firstname", + TEST_INDEX_GAME_OF_THRONES, + TEST_INDEX_GAME_OF_THRONES); + + JoinSelect joinSelect = parser.parseJoinSelect((SQLQueryExpr) queryToExpr(query)); + List orderBys = joinSelect.getFirstTable().getOrderBys(); + Assert.assertEquals(1, orderBys.size()); + Order order = orderBys.get(0); + Assert.assertEquals("name.firstname", order.getName()); + } + + @Test + public void termsWithStringTest() throws SqlParseException { + String query = "select * from x where y = IN_TERMS('a','b')"; + Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); + Condition condition = (Condition) select.getWhere().getWheres().get(0); + Object[] values = (Object[]) condition.getValue(); + Assert.assertEquals("a", values[0]); + Assert.assertEquals("b", values[1]); + } + + @Test + public void termWithStringTest() throws SqlParseException { + String query = "select * from x where y = TERM('a')"; + Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); + Condition condition = (Condition) select.getWhere().getWheres().get(0); + Object[] values = (Object[]) condition.getValue(); + Assert.assertEquals("a", values[0]); + } + + @Test + public void complexNestedTest() throws SqlParseException { + String query = "select * from x where nested('y',y.b = 'a' and y.c = 'd') "; + Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); + Condition condition = (Condition) select.getWhere().getWheres().get(0); + Assert.assertEquals(Condition.OPERATOR.NESTED_COMPLEX, condition.getOPERATOR()); + Assert.assertEquals("y", condition.getName()); + Assert.assertTrue(condition.getValue() instanceof Where); + Where where = (Where) condition.getValue(); + Assert.assertEquals(2, where.getWheres().size()); + } + + @Test + public void scriptOnFilterNoParams() throws SqlParseException { + String query = "select * from x where script('doc[\\'field\\'].date.hourOfDay == 3') "; + Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); + Condition condition = (Condition) select.getWhere().getWheres().get(0); + Assert.assertEquals(Condition.OPERATOR.SCRIPT, condition.getOPERATOR()); + Assert.assertNull(condition.getName()); + Assert.assertTrue(condition.getValue() instanceof ScriptFilter); + ScriptFilter scriptFilter = (ScriptFilter) condition.getValue(); + Assert.assertEquals("doc['field'].date.hourOfDay == 3", scriptFilter.getScript()); + Assert.assertFalse(scriptFilter.containsParameters()); + } + + @Test + public void scriptOnFilterWithParams() throws SqlParseException { + String query = "select * from x where script('doc[\\'field\\'].date.hourOfDay == x','x'=3) "; + Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); + Condition condition = (Condition) select.getWhere().getWheres().get(0); + Assert.assertEquals(Condition.OPERATOR.SCRIPT, condition.getOPERATOR()); + Assert.assertNull(condition.getName()); + Assert.assertTrue(condition.getValue() instanceof ScriptFilter); + ScriptFilter scriptFilter = (ScriptFilter) condition.getValue(); + Assert.assertEquals("doc['field'].date.hourOfDay == x", scriptFilter.getScript()); + Assert.assertTrue(scriptFilter.containsParameters()); + Map args = scriptFilter.getArgs(); + Assert.assertEquals(1, args.size()); + Assert.assertTrue(args.containsKey("x")); + Assert.assertEquals(3, args.get("x")); + } + + @Test + public void fieldsAsNumbersOnWhere() throws SqlParseException { + String query = "select * from x where ['3'] > 2"; + Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); + LinkedList wheres = select.getWhere().getWheres(); + Assert.assertEquals(1, wheres.size()); + Where where = wheres.get(0); + Assert.assertEquals(Condition.class, where.getClass()); + Condition condition = (Condition) where; + Assert.assertEquals("3", condition.getName()); + } + + @Test + public void likeTestWithEscaped() throws SqlParseException { + String query = "select * from x where name like '&UNDERSCOREhey_%&PERCENT'"; + Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); + BoolQueryBuilder explan = QueryMaker.explain(select.getWhere()); + String filterAsString = explan.toString(); + Assert.assertTrue(filterAsString.contains("_hey?*%")); + } + + @Test + public void complexNestedAndOtherQuery() throws SqlParseException { + String query = "select * from x where nested('path',path.x=3) and y=3"; + Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); + LinkedList wheres = select.getWhere().getWheres(); + Assert.assertEquals(2, wheres.size()); + Assert.assertEquals( + "AND path NESTED_COMPLEX AND ( AND path.x EQ 3 ) ", wheres.get(0).toString()); + Assert.assertEquals("AND y EQ 3", wheres.get(1).toString()); + } + + @Test + public void numberEqualConditionWithoutProperty() throws SqlParseException { + SQLExpr sqlExpr = queryToExpr("select * from xxx/locs where 1 = 1"); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List wheres = select.getWhere().getWheres(); + Assert.assertThat(wheres.size(), equalTo(1)); + Condition condition = (Condition) wheres.get(0); + Assert.assertTrue(condition.getValue() instanceof ScriptFilter); + ScriptFilter sf = (ScriptFilter) condition.getValue(); + Assert.assertEquals(sf.getScript(), "1 == 1"); + } + + @Test + public void numberGreatConditionWithoutProperty() throws SqlParseException { + SQLExpr sqlExpr = queryToExpr("select * from xxx/locs where 1 > 1"); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List wheres = select.getWhere().getWheres(); + Assert.assertThat(wheres.size(), equalTo(1)); + Condition condition = (Condition) wheres.get(0); + Assert.assertTrue(condition.getValue() instanceof ScriptFilter); + ScriptFilter sf = (ScriptFilter) condition.getValue(); + Assert.assertEquals(sf.getScript(), "1 > 1"); + } + + @Test + public void stringEqualConditionWithoutProperty() throws SqlParseException { + SQLExpr sqlExpr = queryToExpr("select * from xxx/locs where 'a' = 'b'"); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List wheres = select.getWhere().getWheres(); + Assert.assertThat(wheres.size(), equalTo(1)); + Condition condition = (Condition) wheres.get(0); + Assert.assertTrue(condition.getValue() instanceof ScriptFilter); + ScriptFilter sf = (ScriptFilter) condition.getValue(); + Assert.assertEquals(sf.getScript(), "'a' == 'b'"); + } + + @Test + public void propertyEqualCondition() throws SqlParseException { + SQLExpr sqlExpr = queryToExpr("select * from xxx/locs where a = b"); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List wheres = select.getWhere().getWheres(); + Assert.assertThat(wheres.size(), equalTo(1)); + Condition condition = (Condition) wheres.get(0); + Assert.assertTrue(condition.getValue() instanceof ScriptFilter); + ScriptFilter sf = (ScriptFilter) condition.getValue(); + Assert.assertEquals(sf.getScript(), "doc['a'].value == doc['b'].value"); + } + + @Test + public void propertyWithTableAliasEqualCondition() throws SqlParseException { + SQLExpr sqlExpr = queryToExpr("select t.* from xxx/locs where t.a = t.b"); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List wheres = select.getWhere().getWheres(); + Assert.assertThat(wheres.size(), equalTo(1)); + Condition condition = (Condition) wheres.get(0); + Assert.assertTrue(condition.getValue() instanceof ScriptFilter); + ScriptFilter sf = (ScriptFilter) condition.getValue(); + Assert.assertEquals(sf.getScript(), "doc['a'].value == doc['b'].value"); + } + + @Test + public void propertyGreatCondition() throws SqlParseException { + SQLExpr sqlExpr = queryToExpr("select * from xxx/locs where a > b"); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List wheres = select.getWhere().getWheres(); + Assert.assertThat(wheres.size(), equalTo(1)); + Condition condition = (Condition) wheres.get(0); + Assert.assertTrue(condition.getValue() instanceof ScriptFilter); + ScriptFilter sf = (ScriptFilter) condition.getValue(); + Assert.assertEquals(sf.getScript(), "doc['a'].value > doc['b'].value"); + } + + @Test + public void stringAndNumberEqualConditionWithoutProperty() throws SqlParseException { + SQLExpr sqlExpr = queryToExpr("select * from xxx/locs where 'a' = 1"); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + List wheres = select.getWhere().getWheres(); + Assert.assertThat(wheres.size(), equalTo(1)); + Condition condition = (Condition) wheres.get(0); + Assert.assertTrue(condition.getValue() instanceof ScriptFilter); + ScriptFilter sf = (ScriptFilter) condition.getValue(); + Assert.assertEquals(sf.getScript(), "'a' == 1"); + } + + @Test + public void caseWhenTest() throws SqlParseException { + String query = + "Select k,\n" + + "Case \n" + + "When floor(testBase)>=90 then 'A'\n" + + "When testBase = '80' then 'B'\n" + + "Else 'E' end as testBaseLevel\n" + + "from t"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + for (Field field : select.getFields()) { + if (field instanceof MethodField) { + MethodField methodField = (MethodField) field; String alias = (String) methodField.getParams().get(0).value; String scriptCode = (String) methodField.getParams().get(1).value; - Assert.assertEquals("cast_age",alias); - Assert.assertTrue(scriptCode.contains("doc['age'].value")); - Assert.assertTrue(scriptCode.contains("Double.parseDouble(doc['age'].value.toString()).intValue()")); - } + Assert.assertEquals(alias, "testBaseLevel"); + Matcher docValue = Pattern.compile("doc\\['testBase'].value").matcher(scriptCode); + Matcher number = Pattern.compile(" (\\s+90) | (\\s+'80')").matcher(scriptCode); - @Test - public void castToLongTest() throws Exception { - String query = "select cast(insert_time as long) from "+ TestsConstants.TEST_INDEX_ACCOUNT + " limit 10"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - Field castField = select.getFields().get(0); - Assert.assertTrue(castField instanceof MethodField); + AtomicInteger docValueCounter = new AtomicInteger(); - MethodField methodField = (MethodField) castField; - Assert.assertEquals("script",castField.getName()); - - String alias = (String) methodField.getParams().get(0).value; - String scriptCode = (String) methodField.getParams().get(1).value; - Assert.assertEquals("cast_insert_time",alias); - Assert.assertTrue(scriptCode.contains("doc['insert_time'].value")); - Assert.assertTrue(scriptCode.contains("Double.parseDouble(doc['insert_time'].value.toString()).longValue()")); - } - - @Test - public void castToFloatTest() throws Exception { - String query = "select cast(age as float) from "+ TestsConstants.TEST_INDEX_ACCOUNT + " limit 10"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - Field castField = select.getFields().get(0); - Assert.assertTrue(castField instanceof MethodField); - - MethodField methodField = (MethodField) castField; - Assert.assertEquals("script",castField.getName()); - - String alias = (String) methodField.getParams().get(0).value; - String scriptCode = (String) methodField.getParams().get(1).value; - Assert.assertEquals("cast_age",alias); - Assert.assertTrue(scriptCode.contains("doc['age'].value")); - Assert.assertTrue(scriptCode.contains("Double.parseDouble(doc['age'].value.toString()).floatValue()")); - } - - @Test - public void castToDoubleTest() throws Exception { - String query = "select cast(age as double) from "+ TestsConstants.TEST_INDEX_ACCOUNT + "/account limit 10"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - Field castField = select.getFields().get(0); - Assert.assertTrue(castField instanceof MethodField); - - MethodField methodField = (MethodField) castField; - Assert.assertEquals("script",castField.getName()); + while (docValue.find()) { + docValueCounter.incrementAndGet(); + } + Assert.assertThat(docValueCounter.get(), equalTo(2)); + Assert.assertThat(number.groupCount(), equalTo(2)); + } + } + } + + @Test + public void caseWhenTestWithFieldElseExpr() throws SqlParseException { + String query = + "Select k,\n" + + "Case \n" + + "When floor(testBase)>=90 then 'A'\n" + + "When testBase = '80' then 'B'\n" + + "Else testBase end as testBaseLevel\n" + + "from t"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + for (Field field : select.getFields()) { + if (field instanceof MethodField) { + MethodField methodField = (MethodField) field; String alias = (String) methodField.getParams().get(0).value; String scriptCode = (String) methodField.getParams().get(1).value; - Assert.assertEquals("cast_age",alias); - Assert.assertTrue(scriptCode.contains("doc['age'].value")); - Assert.assertTrue(scriptCode.contains("Double.parseDouble(doc['age'].value.toString()).doubleValue()")); - } - - @Test - public void castToStringTest() throws Exception { - String query = "select cast(age as string) from "+ TestsConstants.TEST_INDEX_ACCOUNT + "/account limit 10"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - Field castField = select.getFields().get(0); - Assert.assertTrue(castField instanceof MethodField); - - MethodField methodField = (MethodField) castField; - Assert.assertEquals("script",castField.getName()); + Assert.assertEquals(alias, "testBaseLevel"); + Matcher docValue = Pattern.compile("doc\\['testBase'].value").matcher(scriptCode); + Matcher number = Pattern.compile(" (\\s+90) | (\\s+'80')").matcher(scriptCode); - String alias = (String) methodField.getParams().get(0).value; - String scriptCode = (String) methodField.getParams().get(1).value; - Assert.assertEquals("cast_age",alias); - Assert.assertTrue(scriptCode.contains("doc['age'].value.toString()")); - } + AtomicInteger docValueCounter = new AtomicInteger(); - @Test - public void castToDateTimeTest() throws Exception { - String query = "select cast(age as datetime) from "+ TestsConstants.TEST_INDEX_ACCOUNT + "/account limit 10"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - Field castField = select.getFields().get(0); - Assert.assertTrue(castField instanceof MethodField); - - MethodField methodField = (MethodField) castField; - Assert.assertEquals("script",castField.getName()); + while (docValue.find()) { + docValueCounter.incrementAndGet(); + } + Assert.assertThat(docValueCounter.get(), equalTo(3)); + Assert.assertThat(number.groupCount(), equalTo(2)); + } + } + } + + @Test + public void caseWhenTestWithouhtElseExpr() throws SqlParseException { + String query = + "Select k,\n" + + "Case \n" + + "When floor(testBase)>=90 then 'A'\n" + + "When testBase = '80' then 'B'\n" + + "end as testBaseLevel\n" + + "from t"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + for (Field field : select.getFields()) { + if (field instanceof MethodField) { + MethodField methodField = (MethodField) field; String alias = (String) methodField.getParams().get(0).value; String scriptCode = (String) methodField.getParams().get(1).value; - Assert.assertEquals("cast_age",alias); - Assert.assertTrue(scriptCode.contains("doc['age'].value")); - Assert.assertTrue(scriptCode.contains("DateTimeFormatter.ofPattern(\"yyyy-MM-dd'T'HH:mm:ss.SSS'Z'\").format(" - + "DateTimeFormatter.ISO_DATE_TIME.parse(doc['age'].value.toString()))")); - } + Assert.assertEquals(alias, "testBaseLevel"); - @Test - public void castToDoubleThenDivideTest() throws Exception { - String query = "select cast(age as double)/2 from "+ TestsConstants.TEST_INDEX_ACCOUNT + "/account limit 10"; - SQLExpr sqlExpr = queryToExpr(query); - Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); - Field castField = select.getFields().get(0); - Assert.assertTrue(castField instanceof MethodField); - - MethodField methodField = (MethodField) castField; - Assert.assertEquals("script",castField.getName()); - - String scriptCode = (String) methodField.getParams().get(1).value; - Assert.assertTrue(scriptCode.contains("doc['age'].value")); - Assert.assertTrue(scriptCode.contains("Double.parseDouble(doc['age'].value.toString()).doubleValue()")); - Assert.assertTrue(scriptCode.contains("/ 2")); - } + Matcher docValue = Pattern.compile("\\{\\s+null\\s+}").matcher(scriptCode); + AtomicInteger docValueCounter = new AtomicInteger(); - @Test - public void multiSelectMinusOperationCheckIndices() throws SqlParseException { - String query = "select pk from firstIndex minus select pk from secondIndex "; - MultiQuerySelect select = parser.parseMultiSelect((SQLUnionQuery) - ((SQLQueryExpr) queryToExpr(query)).getSubQuery().getQuery()); - Assert.assertEquals("firstIndex",select.getFirstSelect().getFrom().get(0).getIndex()); - Assert.assertEquals("secondIndex",select.getSecondSelect().getFrom().get(0).getIndex()); - Assert.assertEquals(SQLUnionOperator.MINUS,select.getOperation()); - } - - @Test - public void multiSelectMinusWithAliasCheckAliases() throws SqlParseException { - String query = "select pk as myId from firstIndex minus select myId from secondIndex "; - MultiQuerySelect select = parser.parseMultiSelect((com.alibaba.druid.sql.ast.statement.SQLUnionQuery) - ((SQLQueryExpr) queryToExpr(query)).getSubQuery().getQuery()); - Assert.assertEquals("myId",select.getFirstSelect().getFields().get(0).getAlias()); - Assert.assertEquals("myId",select.getSecondSelect().getFields().get(0).getName()); - Assert.assertEquals(SQLUnionOperator.MINUS,select.getOperation()); - } - @Test - public void multiSelectMinusTestMinusHints() throws SqlParseException { - String query = "select /*! MINUS_SCROLL_FETCH_AND_RESULT_LIMITS(1000,50,100)*/ " + - "/*! MINUS_USE_TERMS_OPTIMIZATION(true)*/ pk from firstIndex minus select pk from secondIndex "; - MultiQuerySelect select = parser.parseMultiSelect((SQLUnionQuery) - ((SQLQueryExpr) queryToExpr(query)).getSubQuery().getQuery()); - List hints = select.getFirstSelect().getHints(); - Assert.assertEquals(2,hints.size()); - for(Hint hint : hints) { - if (hint.getType() == HintType.MINUS_FETCH_AND_RESULT_LIMITS) { - Object[] params = hint.getParams(); - Assert.assertEquals(1000,params[0]); - Assert.assertEquals(50,params[1]); - Assert.assertEquals(100,params[2]); - } - if(hint.getType() == HintType.MINUS_USE_TERMS_OPTIMIZATION){ - Assert.assertEquals(true,hint.getParams()[0]); - } + while (docValue.find()) { + docValueCounter.incrementAndGet(); } - } - @Test - public void multiSelectMinusScrollCheckDefaultsAllDefaults() throws SqlParseException { - String query = "select /*! MINUS_SCROLL_FETCH_AND_RESULT_LIMITS*/ pk from firstIndex " + - "minus select pk from secondIndex "; - MultiQuerySelect select = parser.parseMultiSelect((com.alibaba.druid.sql.ast.statement.SQLUnionQuery) + Assert.assertThat(docValueCounter.get(), equalTo(1)); + } + } + } + + @Test + public void caseWhenSwitchTest() { + String query = + "SELECT CASE weather " + + "WHEN 'Sunny' THEN '0' " + + "WHEN 'Rainy' THEN '1' " + + "ELSE 'NA' END AS case " + + "FROM t"; + ScriptField scriptField = CheckScriptContents.getScriptFieldFromQuery(query); + Assert.assertTrue( + CheckScriptContents.scriptContainsString(scriptField, "doc['weather'].value=='Sunny'")); + } + + @Test + public void castToIntTest() throws Exception { + String query = + "select cast(age as int) from " + TestsConstants.TEST_INDEX_ACCOUNT + "/account limit 10"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + Field castField = select.getFields().get(0); + Assert.assertTrue(castField instanceof MethodField); + + MethodField methodField = (MethodField) castField; + Assert.assertEquals("script", castField.getName()); + + String alias = (String) methodField.getParams().get(0).value; + String scriptCode = (String) methodField.getParams().get(1).value; + Assert.assertEquals("cast_age", alias); + Assert.assertTrue(scriptCode.contains("doc['age'].value")); + Assert.assertTrue( + scriptCode.contains("Double.parseDouble(doc['age'].value.toString()).intValue()")); + } + + @Test + public void castToLongTest() throws Exception { + String query = + "select cast(insert_time as long) from " + TestsConstants.TEST_INDEX_ACCOUNT + " limit 10"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + Field castField = select.getFields().get(0); + Assert.assertTrue(castField instanceof MethodField); + + MethodField methodField = (MethodField) castField; + Assert.assertEquals("script", castField.getName()); + + String alias = (String) methodField.getParams().get(0).value; + String scriptCode = (String) methodField.getParams().get(1).value; + Assert.assertEquals("cast_insert_time", alias); + Assert.assertTrue(scriptCode.contains("doc['insert_time'].value")); + Assert.assertTrue( + scriptCode.contains("Double.parseDouble(doc['insert_time'].value.toString()).longValue()")); + } + + @Test + public void castToFloatTest() throws Exception { + String query = + "select cast(age as float) from " + TestsConstants.TEST_INDEX_ACCOUNT + " limit 10"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + Field castField = select.getFields().get(0); + Assert.assertTrue(castField instanceof MethodField); + + MethodField methodField = (MethodField) castField; + Assert.assertEquals("script", castField.getName()); + + String alias = (String) methodField.getParams().get(0).value; + String scriptCode = (String) methodField.getParams().get(1).value; + Assert.assertEquals("cast_age", alias); + Assert.assertTrue(scriptCode.contains("doc['age'].value")); + Assert.assertTrue( + scriptCode.contains("Double.parseDouble(doc['age'].value.toString()).floatValue()")); + } + + @Test + public void castToDoubleTest() throws Exception { + String query = + "select cast(age as double) from " + + TestsConstants.TEST_INDEX_ACCOUNT + + "/account limit 10"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + Field castField = select.getFields().get(0); + Assert.assertTrue(castField instanceof MethodField); + + MethodField methodField = (MethodField) castField; + Assert.assertEquals("script", castField.getName()); + + String alias = (String) methodField.getParams().get(0).value; + String scriptCode = (String) methodField.getParams().get(1).value; + Assert.assertEquals("cast_age", alias); + Assert.assertTrue(scriptCode.contains("doc['age'].value")); + Assert.assertTrue( + scriptCode.contains("Double.parseDouble(doc['age'].value.toString()).doubleValue()")); + } + + @Test + public void castToStringTest() throws Exception { + String query = + "select cast(age as string) from " + + TestsConstants.TEST_INDEX_ACCOUNT + + "/account limit 10"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + Field castField = select.getFields().get(0); + Assert.assertTrue(castField instanceof MethodField); + + MethodField methodField = (MethodField) castField; + Assert.assertEquals("script", castField.getName()); + + String alias = (String) methodField.getParams().get(0).value; + String scriptCode = (String) methodField.getParams().get(1).value; + Assert.assertEquals("cast_age", alias); + Assert.assertTrue(scriptCode.contains("doc['age'].value.toString()")); + } + + @Test + public void castToDateTimeTest() throws Exception { + String query = + "select cast(age as datetime) from " + + TestsConstants.TEST_INDEX_ACCOUNT + + "/account limit 10"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + Field castField = select.getFields().get(0); + Assert.assertTrue(castField instanceof MethodField); + + MethodField methodField = (MethodField) castField; + Assert.assertEquals("script", castField.getName()); + + String alias = (String) methodField.getParams().get(0).value; + String scriptCode = (String) methodField.getParams().get(1).value; + Assert.assertEquals("cast_age", alias); + Assert.assertTrue(scriptCode.contains("doc['age'].value")); + Assert.assertTrue( + scriptCode.contains( + "DateTimeFormatter.ofPattern(\"yyyy-MM-dd'T'HH:mm:ss.SSS'Z'\").format(" + + "DateTimeFormatter.ISO_DATE_TIME.parse(doc['age'].value.toString()))")); + } + + @Test + public void castToDoubleThenDivideTest() throws Exception { + String query = + "select cast(age as double)/2 from " + + TestsConstants.TEST_INDEX_ACCOUNT + + "/account limit 10"; + SQLExpr sqlExpr = queryToExpr(query); + Select select = parser.parseSelect((SQLQueryExpr) sqlExpr); + Field castField = select.getFields().get(0); + Assert.assertTrue(castField instanceof MethodField); + + MethodField methodField = (MethodField) castField; + Assert.assertEquals("script", castField.getName()); + + String scriptCode = (String) methodField.getParams().get(1).value; + Assert.assertTrue(scriptCode.contains("doc['age'].value")); + Assert.assertTrue( + scriptCode.contains("Double.parseDouble(doc['age'].value.toString()).doubleValue()")); + Assert.assertTrue(scriptCode.contains("/ 2")); + } + + @Test + public void multiSelectMinusOperationCheckIndices() throws SqlParseException { + String query = "select pk from firstIndex minus select pk from secondIndex "; + MultiQuerySelect select = + parser.parseMultiSelect( + (SQLUnionQuery) ((SQLQueryExpr) queryToExpr(query)).getSubQuery().getQuery()); + Assert.assertEquals("firstIndex", select.getFirstSelect().getFrom().get(0).getIndex()); + Assert.assertEquals("secondIndex", select.getSecondSelect().getFrom().get(0).getIndex()); + Assert.assertEquals(SQLUnionOperator.MINUS, select.getOperation()); + } + + @Test + public void multiSelectMinusWithAliasCheckAliases() throws SqlParseException { + String query = "select pk as myId from firstIndex minus select myId from secondIndex "; + MultiQuerySelect select = + parser.parseMultiSelect( + (com.alibaba.druid.sql.ast.statement.SQLUnionQuery) ((SQLQueryExpr) queryToExpr(query)).getSubQuery().getQuery()); - List hints = select.getFirstSelect().getHints(); - Assert.assertEquals(1, hints.size()); - Hint hint = hints.get(0); - Assert.assertEquals(HintType.MINUS_FETCH_AND_RESULT_LIMITS,hint.getType()); + Assert.assertEquals("myId", select.getFirstSelect().getFields().get(0).getAlias()); + Assert.assertEquals("myId", select.getSecondSelect().getFields().get(0).getName()); + Assert.assertEquals(SQLUnionOperator.MINUS, select.getOperation()); + } + + @Test + public void multiSelectMinusTestMinusHints() throws SqlParseException { + String query = + "select /*! MINUS_SCROLL_FETCH_AND_RESULT_LIMITS(1000,50,100)*/ /*!" + + " MINUS_USE_TERMS_OPTIMIZATION(true)*/ pk from firstIndex minus select pk from" + + " secondIndex "; + MultiQuerySelect select = + parser.parseMultiSelect( + (SQLUnionQuery) ((SQLQueryExpr) queryToExpr(query)).getSubQuery().getQuery()); + List hints = select.getFirstSelect().getHints(); + Assert.assertEquals(2, hints.size()); + for (Hint hint : hints) { + if (hint.getType() == HintType.MINUS_FETCH_AND_RESULT_LIMITS) { Object[] params = hint.getParams(); - Assert.assertEquals(100000, params[0]); - Assert.assertEquals(100000, params[1]); - Assert.assertEquals(1000, params[2]); - } - - @Test - public void multiSelectMinusScrollCheckDefaultsOneDefault() throws SqlParseException { - String query = "select /*! MINUS_SCROLL_FETCH_AND_RESULT_LIMITS(50,100)*/ pk " + - "from firstIndex minus select pk from secondIndex "; - MultiQuerySelect select = parser.parseMultiSelect((com.alibaba.druid.sql.ast.statement.SQLUnionQuery) + Assert.assertEquals(1000, params[0]); + Assert.assertEquals(50, params[1]); + Assert.assertEquals(100, params[2]); + } + if (hint.getType() == HintType.MINUS_USE_TERMS_OPTIMIZATION) { + Assert.assertEquals(true, hint.getParams()[0]); + } + } + } + + @Test + public void multiSelectMinusScrollCheckDefaultsAllDefaults() throws SqlParseException { + String query = + "select /*! MINUS_SCROLL_FETCH_AND_RESULT_LIMITS*/ pk from firstIndex " + + "minus select pk from secondIndex "; + MultiQuerySelect select = + parser.parseMultiSelect( + (com.alibaba.druid.sql.ast.statement.SQLUnionQuery) ((SQLQueryExpr) queryToExpr(query)).getSubQuery().getQuery()); - List hints = select.getFirstSelect().getHints(); - Assert.assertEquals(1, hints.size()); - Hint hint = hints.get(0); - Assert.assertEquals(HintType.MINUS_FETCH_AND_RESULT_LIMITS,hint.getType()); - Object[] params = hint.getParams(); - Assert.assertEquals(50, params[0]); - Assert.assertEquals(100, params[1]); - Assert.assertEquals(1000, params[2]); - } - - private SQLExpr queryToExpr(String query) { - return new ElasticSqlExprParser(query).expr(); - } - - private boolean conditionExist(List conditions, String from, String to, Condition.OPERATOR OPERATOR) { - String[] aliasAndField = to.split("\\.", 2); - String toAlias = aliasAndField[0]; - String toField = aliasAndField[1]; - for (Condition condition : conditions) { - if (condition.getOPERATOR() != OPERATOR) continue; - - boolean fromIsEqual = condition.getName().equals(from); - if (!fromIsEqual) continue; - - String[] valueAliasAndField = condition.getValue().toString().split("\\.", 2); - boolean toFieldNameIsEqual = valueAliasAndField[1].equals(toField); - boolean toAliasIsEqual = valueAliasAndField[0].equals(toAlias); - boolean toIsEqual = toAliasIsEqual && toFieldNameIsEqual; - - if (toIsEqual) return true; - } - return false; - } + List hints = select.getFirstSelect().getHints(); + Assert.assertEquals(1, hints.size()); + Hint hint = hints.get(0); + Assert.assertEquals(HintType.MINUS_FETCH_AND_RESULT_LIMITS, hint.getType()); + Object[] params = hint.getParams(); + Assert.assertEquals(100000, params[0]); + Assert.assertEquals(100000, params[1]); + Assert.assertEquals(1000, params[2]); + } + + @Test + public void multiSelectMinusScrollCheckDefaultsOneDefault() throws SqlParseException { + String query = + "select /*! MINUS_SCROLL_FETCH_AND_RESULT_LIMITS(50,100)*/ pk " + + "from firstIndex minus select pk from secondIndex "; + MultiQuerySelect select = + parser.parseMultiSelect( + (com.alibaba.druid.sql.ast.statement.SQLUnionQuery) + ((SQLQueryExpr) queryToExpr(query)).getSubQuery().getQuery()); + List hints = select.getFirstSelect().getHints(); + Assert.assertEquals(1, hints.size()); + Hint hint = hints.get(0); + Assert.assertEquals(HintType.MINUS_FETCH_AND_RESULT_LIMITS, hint.getType()); + Object[] params = hint.getParams(); + Assert.assertEquals(50, params[0]); + Assert.assertEquals(100, params[1]); + Assert.assertEquals(1000, params[2]); + } + + private SQLExpr queryToExpr(String query) { + return new ElasticSqlExprParser(query).expr(); + } + + private boolean conditionExist( + List conditions, String from, String to, Condition.OPERATOR OPERATOR) { + String[] aliasAndField = to.split("\\.", 2); + String toAlias = aliasAndField[0]; + String toField = aliasAndField[1]; + for (Condition condition : conditions) { + if (condition.getOPERATOR() != OPERATOR) continue; + + boolean fromIsEqual = condition.getName().equals(from); + if (!fromIsEqual) continue; + + String[] valueAliasAndField = condition.getValue().toString().split("\\.", 2); + boolean toFieldNameIsEqual = valueAliasAndField[1].equals(toField); + boolean toAliasIsEqual = valueAliasAndField[0].equals(toAlias); + boolean toIsEqual = toAliasIsEqual && toFieldNameIsEqual; + + if (toIsEqual) return true; + } + return false; + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/parser/SubQueryParserTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/parser/SubQueryParserTest.java index ac614affdb..32885c79fe 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/parser/SubQueryParserTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/parser/SubQueryParserTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.parser; import static org.junit.Assert.assertEquals; @@ -20,127 +19,135 @@ public class SubQueryParserTest { - private static SqlParser parser = new SqlParser(); - - @Test - public void selectFromSubqueryShouldPass() throws SqlParseException { - Select select = parseSelect( - StringUtils.format( - "SELECT t.T1 as age1, t.T2 as balance1 " + - "FROM (SELECT age as T1, balance as T2 FROM %s/account) t", - TEST_INDEX_ACCOUNT)); - - assertEquals(2, select.getFields().size()); - assertEquals("age", select.getFields().get(0).getName()); - assertEquals("age1", select.getFields().get(0).getAlias()); - assertEquals("balance", select.getFields().get(1).getName()); - assertEquals("balance1", select.getFields().get(1).getAlias()); - } - - @Test - public void selectFromSubqueryWithoutAliasShouldPass() throws SqlParseException { - Select select = parseSelect( - StringUtils.format( - "SELECT t.age as finalAge, t.balance as finalBalance " + - "FROM (SELECT age, balance FROM %s/account) t", - TEST_INDEX_ACCOUNT)); - - assertEquals(2, select.getFields().size()); - assertEquals("age", select.getFields().get(0).getName()); - assertEquals("finalAge", select.getFields().get(0).getAlias()); - assertEquals("balance", select.getFields().get(1).getName()); - assertEquals("finalBalance", select.getFields().get(1).getAlias()); - } - - @Test - public void selectFromSubqueryShouldIgnoreUnusedField() throws SqlParseException { - Select select = parseSelect( - StringUtils.format( - "SELECT t.T1 as age1 " + - "FROM (SELECT age as T1, balance as T2 FROM %s/account) t", - TEST_INDEX_ACCOUNT)); - - assertEquals(1, select.getFields().size()); - assertEquals("age", select.getFields().get(0).getName()); - assertEquals("age1", select.getFields().get(0).getAlias()); - } - - @Test - public void selectFromSubqueryWithAggShouldPass() throws SqlParseException { - Select select = parseSelect( - StringUtils.format( - "SELECT t.TEMP as count " + - "FROM (SELECT COUNT(*) as TEMP FROM %s/account) t", - TEST_INDEX_ACCOUNT)); - assertEquals(1, select.getFields().size()); - assertEquals("COUNT", select.getFields().get(0).getName()); - assertEquals("count", select.getFields().get(0).getAlias()); - } - - @Test - public void selectFromSubqueryWithWhereAndCountShouldPass() throws SqlParseException { - Select select = parseSelect( - StringUtils.format( - "SELECT t.TEMP as count " + - "FROM (SELECT COUNT(*) as TEMP FROM %s/account WHERE age > 30) t", - TEST_INDEX_ACCOUNT)); - - assertEquals(1, select.getFields().size()); - assertEquals("COUNT", select.getFields().get(0).getName()); - assertEquals("count", select.getFields().get(0).getAlias()); - } - - @Test - public void selectFromSubqueryWithCountAndGroupByAndOrderByShouldPass() throws SqlParseException { - Select select = parseSelect( - StringUtils.format( - "SELECT t.TEMP as count " + - "FROM (SELECT COUNT(*) as TEMP FROM %s/account GROUP BY age ORDER BY TEMP) t", - TEST_INDEX_ACCOUNT)); - - assertEquals(1, select.getFields().size()); - assertEquals("COUNT", select.getFields().get(0).getName()); - assertEquals("count", select.getFields().get(0).getAlias()); - assertEquals(1, select.getOrderBys().size()); - assertEquals("count", select.getOrderBys().get(0).getName()); - assertEquals("count", select.getOrderBys().get(0).getSortField().getName()); - } - - @Test - public void selectFromSubqueryWithCountAndGroupByAndHavingShouldPass() throws Exception { - - Select select = parseSelect( - StringUtils.format("SELECT t.T1 as g, t.T2 as c " + - "FROM (SELECT gender as T1, COUNT(*) as T2 " + - " FROM %s/account " + - " GROUP BY gender " + - " HAVING T2 > 500) t", TEST_INDEX_ACCOUNT)); - - assertEquals(2, select.getFields().size()); - assertEquals("gender", select.getFields().get(0).getName()); - assertEquals("g", select.getFields().get(0).getAlias()); - assertEquals("COUNT", select.getFields().get(1).getName()); - assertEquals("c", select.getFields().get(1).getAlias()); - assertEquals(1, select.getHaving().getConditions().size()); - assertEquals("c", ((Condition) select.getHaving().getConditions().get(0)).getName()); - } - - @Test - public void selectFromSubqueryCountAndSum() throws Exception { - Select select = parseSelect( - StringUtils.format( - "SELECT t.TEMP1 as count, t.TEMP2 as balance " + - "FROM (SELECT COUNT(*) as TEMP1, SUM(balance) as TEMP2 " + - " FROM %s/account) t", - TEST_INDEX_ACCOUNT)); - assertEquals(2, select.getFields().size()); - assertEquals("COUNT", select.getFields().get(0).getName()); - assertEquals("count", select.getFields().get(0).getAlias()); - assertEquals("SUM", select.getFields().get(1).getName()); - assertEquals("balance", select.getFields().get(1).getAlias()); - } - - private Select parseSelect(String query) throws SqlParseException { - return parser.parseSelect((SQLQueryExpr) new ElasticSqlExprParser(query).expr()); - } + private static SqlParser parser = new SqlParser(); + + @Test + public void selectFromSubqueryShouldPass() throws SqlParseException { + Select select = + parseSelect( + StringUtils.format( + "SELECT t.T1 as age1, t.T2 as balance1 " + + "FROM (SELECT age as T1, balance as T2 FROM %s/account) t", + TEST_INDEX_ACCOUNT)); + + assertEquals(2, select.getFields().size()); + assertEquals("age", select.getFields().get(0).getName()); + assertEquals("age1", select.getFields().get(0).getAlias()); + assertEquals("balance", select.getFields().get(1).getName()); + assertEquals("balance1", select.getFields().get(1).getAlias()); + } + + @Test + public void selectFromSubqueryWithoutAliasShouldPass() throws SqlParseException { + Select select = + parseSelect( + StringUtils.format( + "SELECT t.age as finalAge, t.balance as finalBalance " + + "FROM (SELECT age, balance FROM %s/account) t", + TEST_INDEX_ACCOUNT)); + + assertEquals(2, select.getFields().size()); + assertEquals("age", select.getFields().get(0).getName()); + assertEquals("finalAge", select.getFields().get(0).getAlias()); + assertEquals("balance", select.getFields().get(1).getName()); + assertEquals("finalBalance", select.getFields().get(1).getAlias()); + } + + @Test + public void selectFromSubqueryShouldIgnoreUnusedField() throws SqlParseException { + Select select = + parseSelect( + StringUtils.format( + "SELECT t.T1 as age1 " + "FROM (SELECT age as T1, balance as T2 FROM %s/account) t", + TEST_INDEX_ACCOUNT)); + + assertEquals(1, select.getFields().size()); + assertEquals("age", select.getFields().get(0).getName()); + assertEquals("age1", select.getFields().get(0).getAlias()); + } + + @Test + public void selectFromSubqueryWithAggShouldPass() throws SqlParseException { + Select select = + parseSelect( + StringUtils.format( + "SELECT t.TEMP as count " + "FROM (SELECT COUNT(*) as TEMP FROM %s/account) t", + TEST_INDEX_ACCOUNT)); + assertEquals(1, select.getFields().size()); + assertEquals("COUNT", select.getFields().get(0).getName()); + assertEquals("count", select.getFields().get(0).getAlias()); + } + + @Test + public void selectFromSubqueryWithWhereAndCountShouldPass() throws SqlParseException { + Select select = + parseSelect( + StringUtils.format( + "SELECT t.TEMP as count " + + "FROM (SELECT COUNT(*) as TEMP FROM %s/account WHERE age > 30) t", + TEST_INDEX_ACCOUNT)); + + assertEquals(1, select.getFields().size()); + assertEquals("COUNT", select.getFields().get(0).getName()); + assertEquals("count", select.getFields().get(0).getAlias()); + } + + @Test + public void selectFromSubqueryWithCountAndGroupByAndOrderByShouldPass() throws SqlParseException { + Select select = + parseSelect( + StringUtils.format( + "SELECT t.TEMP as count " + + "FROM (SELECT COUNT(*) as TEMP FROM %s/account GROUP BY age ORDER BY TEMP) t", + TEST_INDEX_ACCOUNT)); + + assertEquals(1, select.getFields().size()); + assertEquals("COUNT", select.getFields().get(0).getName()); + assertEquals("count", select.getFields().get(0).getAlias()); + assertEquals(1, select.getOrderBys().size()); + assertEquals("count", select.getOrderBys().get(0).getName()); + assertEquals("count", select.getOrderBys().get(0).getSortField().getName()); + } + + @Test + public void selectFromSubqueryWithCountAndGroupByAndHavingShouldPass() throws Exception { + + Select select = + parseSelect( + StringUtils.format( + "SELECT t.T1 as g, t.T2 as c " + + "FROM (SELECT gender as T1, COUNT(*) as T2 " + + " FROM %s/account " + + " GROUP BY gender " + + " HAVING T2 > 500) t", + TEST_INDEX_ACCOUNT)); + + assertEquals(2, select.getFields().size()); + assertEquals("gender", select.getFields().get(0).getName()); + assertEquals("g", select.getFields().get(0).getAlias()); + assertEquals("COUNT", select.getFields().get(1).getName()); + assertEquals("c", select.getFields().get(1).getAlias()); + assertEquals(1, select.getHaving().getConditions().size()); + assertEquals("c", ((Condition) select.getHaving().getConditions().get(0)).getName()); + } + + @Test + public void selectFromSubqueryCountAndSum() throws Exception { + Select select = + parseSelect( + StringUtils.format( + "SELECT t.TEMP1 as count, t.TEMP2 as balance " + + "FROM (SELECT COUNT(*) as TEMP1, SUM(balance) as TEMP2 " + + " FROM %s/account) t", + TEST_INDEX_ACCOUNT)); + assertEquals(2, select.getFields().size()); + assertEquals("COUNT", select.getFields().get(0).getName()); + assertEquals("count", select.getFields().get(0).getAlias()); + assertEquals("SUM", select.getFields().get(1).getName()); + assertEquals("balance", select.getFields().get(1).getAlias()); + } + + private Select parseSelect(String query) throws SqlParseException { + return parser.parseSelect((SQLQueryExpr) new ElasticSqlExprParser(query).expr()); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/BindingTupleQueryPlannerExecuteTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/BindingTupleQueryPlannerExecuteTest.java index 9f6fcbcc6d..1260b551fb 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/BindingTupleQueryPlannerExecuteTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/BindingTupleQueryPlannerExecuteTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.planner; import static org.hamcrest.MatcherAssert.assertThat; @@ -35,79 +34,78 @@ @RunWith(MockitoJUnitRunner.class) public class BindingTupleQueryPlannerExecuteTest { - @Mock - private Client client; - @Mock - private SearchResponse aggResponse; - @Mock - private ColumnTypeProvider columnTypeProvider; + @Mock private Client client; + @Mock private SearchResponse aggResponse; + @Mock private ColumnTypeProvider columnTypeProvider; - @Before - public void init() { - MockitoAnnotations.initMocks(this); + @Before + public void init() { + MockitoAnnotations.initMocks(this); - ActionFuture mockFuture = mock(ActionFuture.class); - when(client.execute(any(), any())).thenReturn(mockFuture); - when(mockFuture.actionGet()).thenAnswer(invocationOnMock -> aggResponse); - } + ActionFuture mockFuture = mock(ActionFuture.class); + when(client.execute(any(), any())).thenReturn(mockFuture); + when(mockFuture.actionGet()).thenAnswer(invocationOnMock -> aggResponse); + } - @Test - public void testAggregationShouldPass() { - assertThat(query("SELECT gender, MAX(age) as max, MIN(age) as min FROM accounts GROUP BY gender", - mockSearchAggregation()), - containsInAnyOrder( - BindingTuple.from(ImmutableMap.of("gender", "m", "max", 20d, "min", 10d)), - BindingTuple.from(ImmutableMap.of("gender", "f", "max", 40d, "min", 20d)))); - } + @Test + public void testAggregationShouldPass() { + assertThat( + query( + "SELECT gender, MAX(age) as max, MIN(age) as min FROM accounts GROUP BY gender", + mockSearchAggregation()), + containsInAnyOrder( + BindingTuple.from(ImmutableMap.of("gender", "m", "max", 20d, "min", 10d)), + BindingTuple.from(ImmutableMap.of("gender", "f", "max", 40d, "min", 20d)))); + } + protected List query(String sql, MockSearchAggregation mockAgg) { + doAnswer(mockAgg).when(aggResponse).getAggregations(); - protected List query(String sql, MockSearchAggregation mockAgg) { - doAnswer(mockAgg).when(aggResponse).getAggregations(); + BindingTupleQueryPlanner queryPlanner = + new BindingTupleQueryPlanner(client, SqlParserUtils.parse(sql), columnTypeProvider); + return queryPlanner.execute(); + } - BindingTupleQueryPlanner queryPlanner = - new BindingTupleQueryPlanner(client, SqlParserUtils.parse(sql), columnTypeProvider); - return queryPlanner.execute(); - } + private MockSearchAggregation mockSearchAggregation() { + return new MockSearchAggregation( + "{\n" + + " \"sterms#gender\": {\n" + + " \"buckets\": [\n" + + " {\n" + + " \"key\": \"m\",\n" + + " \"doc_count\": 507,\n" + + " \"min#min\": {\n" + + " \"value\": 10\n" + + " },\n" + + " \"max#max\": {\n" + + " \"value\": 20\n" + + " }\n" + + " },\n" + + " {\n" + + " \"key\": \"f\",\n" + + " \"doc_count\": 493,\n" + + " \"min#min\": {\n" + + " \"value\": 20\n" + + " },\n" + + " \"max#max\": {\n" + + " \"value\": 40\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + "}"); + } - private MockSearchAggregation mockSearchAggregation() { - return new MockSearchAggregation("{\n" - + " \"sterms#gender\": {\n" - + " \"buckets\": [\n" - + " {\n" - + " \"key\": \"m\",\n" - + " \"doc_count\": 507,\n" - + " \"min#min\": {\n" - + " \"value\": 10\n" - + " },\n" - + " \"max#max\": {\n" - + " \"value\": 20\n" - + " }\n" - + " },\n" - + " {\n" - + " \"key\": \"f\",\n" - + " \"doc_count\": 493,\n" - + " \"min#min\": {\n" - + " \"value\": 20\n" - + " },\n" - + " \"max#max\": {\n" - + " \"value\": 40\n" - + " }\n" - + " }\n" - + " ]\n" - + " }\n" - + "}"); - } - - protected static class MockSearchAggregation implements Answer { - private final Aggregations aggregation; + protected static class MockSearchAggregation implements Answer { + private final Aggregations aggregation; - public MockSearchAggregation(String agg) { - aggregation = AggregationUtils.fromJson(agg); - } + public MockSearchAggregation(String agg) { + aggregation = AggregationUtils.fromJson(agg); + } - @Override - public Aggregations answer(InvocationOnMock invocationOnMock) throws Throwable { - return aggregation; - } + @Override + public Aggregations answer(InvocationOnMock invocationOnMock) throws Throwable { + return aggregation; } + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/OpenSearchActionFactoryTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/OpenSearchActionFactoryTest.java index 0b7c7f6740..3c3a7b37f5 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/OpenSearchActionFactoryTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/OpenSearchActionFactoryTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.planner; import static org.junit.Assert.assertFalse; @@ -15,60 +14,51 @@ import org.opensearch.sql.legacy.util.SqlParserUtils; public class OpenSearchActionFactoryTest { - @Test - public void josnOutputRequestShouldNotMigrateToQueryPlan() { - String sql = "SELECT age, MAX(balance) " + - "FROM account " + - "GROUP BY age"; + @Test + public void josnOutputRequestShouldNotMigrateToQueryPlan() { + String sql = "SELECT age, MAX(balance) " + "FROM account " + "GROUP BY age"; - assertFalse( - OpenSearchActionFactory.shouldMigrateToQueryPlan(SqlParserUtils.parse(sql), Format.JSON)); - } + assertFalse( + OpenSearchActionFactory.shouldMigrateToQueryPlan(SqlParserUtils.parse(sql), Format.JSON)); + } - @Test - public void nestQueryShouldNotMigrateToQueryPlan() { - String sql = "SELECT age, nested(balance) " + - "FROM account " + - "GROUP BY age"; + @Test + public void nestQueryShouldNotMigrateToQueryPlan() { + String sql = "SELECT age, nested(balance) " + "FROM account " + "GROUP BY age"; - assertFalse( - OpenSearchActionFactory.shouldMigrateToQueryPlan(SqlParserUtils.parse(sql), Format.JDBC)); - } + assertFalse( + OpenSearchActionFactory.shouldMigrateToQueryPlan(SqlParserUtils.parse(sql), Format.JDBC)); + } - @Test - public void nonAggregationQueryShouldNotMigrateToQueryPlan() { - String sql = "SELECT age " + - "FROM account "; + @Test + public void nonAggregationQueryShouldNotMigrateToQueryPlan() { + String sql = "SELECT age " + "FROM account "; - assertFalse( - OpenSearchActionFactory.shouldMigrateToQueryPlan(SqlParserUtils.parse(sql), Format.JDBC)); - } + assertFalse( + OpenSearchActionFactory.shouldMigrateToQueryPlan(SqlParserUtils.parse(sql), Format.JDBC)); + } - @Test - public void aggregationQueryWithoutGroupByShouldMigrateToQueryPlan() { - String sql = "SELECT age, COUNT(balance) " + - "FROM account "; + @Test + public void aggregationQueryWithoutGroupByShouldMigrateToQueryPlan() { + String sql = "SELECT age, COUNT(balance) " + "FROM account "; - assertTrue( - OpenSearchActionFactory.shouldMigrateToQueryPlan(SqlParserUtils.parse(sql), Format.JDBC)); - } + assertTrue( + OpenSearchActionFactory.shouldMigrateToQueryPlan(SqlParserUtils.parse(sql), Format.JDBC)); + } - @Test - public void aggregationQueryWithExpressionByShouldMigrateToQueryPlan() { - String sql = "SELECT age, MAX(balance) - MIN(balance) " + - "FROM account "; + @Test + public void aggregationQueryWithExpressionByShouldMigrateToQueryPlan() { + String sql = "SELECT age, MAX(balance) - MIN(balance) " + "FROM account "; - assertTrue( - OpenSearchActionFactory.shouldMigrateToQueryPlan(SqlParserUtils.parse(sql), Format.JDBC)); - } + assertTrue( + OpenSearchActionFactory.shouldMigrateToQueryPlan(SqlParserUtils.parse(sql), Format.JDBC)); + } - @Test - public void queryOnlyHasGroupByShouldMigrateToQueryPlan() { - String sql = "SELECT CAST(age AS DOUBLE) as alias " + - "FROM account " + - "GROUP BY alias"; + @Test + public void queryOnlyHasGroupByShouldMigrateToQueryPlan() { + String sql = "SELECT CAST(age AS DOUBLE) as alias " + "FROM account " + "GROUP BY alias"; - assertTrue( - OpenSearchActionFactory.shouldMigrateToQueryPlan(SqlParserUtils.parse(sql), Format.JDBC)); - } + assertTrue( + OpenSearchActionFactory.shouldMigrateToQueryPlan(SqlParserUtils.parse(sql), Format.JDBC)); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerBatchTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerBatchTest.java index 52f8e2bc6e..0c77550a2f 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerBatchTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerBatchTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.planner; import static org.hamcrest.MatcherAssert.assertThat; @@ -17,7 +16,6 @@ import java.util.Collection; import java.util.List; import org.hamcrest.Matcher; -import org.junit.Ignore; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; @@ -25,221 +23,153 @@ import org.opensearch.search.SearchHits; /** - * Batch prefetch testing. Test against different combination of algorithm block size and scroll page size. + * Batch prefetch testing. Test against different combination of algorithm block size and scroll + * page size. */ @SuppressWarnings("unchecked") @RunWith(Parameterized.class) public class QueryPlannerBatchTest extends QueryPlannerTest { - private static final String TEST_SQL1 = - "SELECT " + - " /*! JOIN_CIRCUIT_BREAK_LIMIT(100) */ " + - " /*! JOIN_ALGORITHM_BLOCK_SIZE(%d) */ " + - " /*! JOIN_SCROLL_PAGE_SIZE(%d) */ " + - " e.lastname AS name, d.id AS id, d.name AS dep "; - - private static final String TEST_SQL2_JOIN1 = - "FROM department d " + - " %s employee e "; - - private static final String TEST_SQL2_JOIN2 = - "FROM employee e " + - " %s department d "; - - private static final String TEST_SQL3 = - "ON d.id = e.departmentId " + - " WHERE e.age <= 50"; - - private SearchHit[] employees = { - employee(1, "People 1", "A"), - employee(2, "People 2", "A"), - employee(3, "People 3", "A"), - employee(4, "People 4", "B"), - employee(5, "People 5", "B"), - employee(6, "People 6", "C"), - employee(7, "People 7", "D"), - employee(8, "People 8", "D"), - employee(9, "People 9", "E"), - employee(10, "People 10", "F") - }; - - private SearchHit[] departments = { - department(1, "A", "AWS"), - department(2, "C", "Capital One"), - department(3, "D", "Dell"), - department(4, "F", "Facebook"), - department(5, "G", "Google"), - department(6, "M", "Microsoft"), - department(7, "U", "Uber"), - }; - - private Matcher[] matched = { - hit( - kv("name", "People 1"), - kv("id", "A"), - kv("dep", "AWS") - ), - hit( - kv("name", "People 2"), - kv("id", "A"), - kv("dep", "AWS") - ), - hit( - kv("name", "People 3"), - kv("id", "A"), - kv("dep", "AWS") - ), - hit( - kv("name", "People 6"), - kv("id", "C"), - kv("dep", "Capital One") - ), - hit( - kv("name", "People 7"), - kv("id", "D"), - kv("dep", "Dell") - ), - hit( - kv("name", "People 8"), - kv("id", "D"), - kv("dep", "Dell") - ), - hit( - kv("name", "People 10"), - kv("id", "F"), - kv("dep", "Facebook") - ) - }; - - private Matcher[] mismatched1 = { - hit( - kv("name", null), - kv("id", "G"), - kv("dep", "Google") - ), - hit( - kv("name", null), - kv("id", "M"), - kv("dep", "Microsoft") - ), - hit( - kv("name", null), - kv("id", "U"), - kv("dep", "Uber") - ) - }; - - private Matcher[] mismatched2 = { - hit( - kv("name", "People 4"), - kv("id", null), - kv("dep", null) - ), - hit( - kv("name", "People 5"), - kv("id", null), - kv("dep", null) - ), - hit( - kv("name", "People 9"), - kv("id", null), - kv("dep", null) - ) - }; - - private Matcher expectedInnerJoinResult = hits(matched); - - /** Department left join Employee */ - private Matcher expectedLeftOuterJoinResult1 = hits(concat(matched, mismatched1)); - - /** Employee left join Department */ - private Matcher expectedLeftOuterJoinResult2 = hits(concat(matched, mismatched2)); - - /** Parameterized test cases */ - private final int blockSize; - private final int pageSize; - - public QueryPlannerBatchTest(int blockSize, int pageSize) { - this.blockSize = blockSize; - this.pageSize = pageSize; - } - - @Parameters - public static Collection data() { - List params = new ArrayList<>(); - for (int blockSize = 1; blockSize <= 11; blockSize++) { - for (int pageSize = 1; pageSize <= 11; pageSize++) { - params.add(new Object[]{ blockSize, pageSize }); - } - } - return params; - } - - @Test - public void departmentInnerJoinEmployee() { - assertThat( - query( - String.format( - TEST_SQL1 + TEST_SQL2_JOIN1 + TEST_SQL3, - blockSize, pageSize, "INNER JOIN"), - departments(pageSize, departments), - employees(pageSize, employees) - ), - expectedInnerJoinResult - ); - } - - @Test - public void employeeInnerJoinDepartment() { - assertThat( - query( - String.format( - TEST_SQL1 + TEST_SQL2_JOIN2 + TEST_SQL3, - blockSize, pageSize, "INNER JOIN"), - employees(pageSize, employees), - departments(pageSize, departments) - ), - expectedInnerJoinResult - ); - } - - @Test - public void departmentLeftJoinEmployee() { - assertThat( - query( - String.format( - TEST_SQL1 + TEST_SQL2_JOIN1 + TEST_SQL3, - blockSize, pageSize, "LEFT JOIN"), - departments(pageSize, departments), - employees(pageSize, employees) - ), - expectedLeftOuterJoinResult1 - ); - } - - @Test - public void employeeLeftJoinDepartment() { - assertThat( - query( - String.format( - TEST_SQL1 + TEST_SQL2_JOIN2 + TEST_SQL3, - blockSize, pageSize, "LEFT JOIN"), - employees(pageSize, employees), - departments(pageSize, departments) - ), - expectedLeftOuterJoinResult2 - ); - } - - private static Matcher[] concat(Matcher[] one, Matcher[] other) { - return concat(one, other, Matcher.class); - } - - /** Copy from OpenSearch ArrayUtils */ - private static T[] concat(T[] one, T[] other, Class clazz) { - T[] target = (T[]) Array.newInstance(clazz, one.length + other.length); - System.arraycopy(one, 0, target, 0, one.length); - System.arraycopy(other, 0, target, one.length, other.length); - return target; + private static final String TEST_SQL1 = + "SELECT " + + " /*! JOIN_CIRCUIT_BREAK_LIMIT(100) */ " + + " /*! JOIN_ALGORITHM_BLOCK_SIZE(%d) */ " + + " /*! JOIN_SCROLL_PAGE_SIZE(%d) */ " + + " e.lastname AS name, d.id AS id, d.name AS dep "; + + private static final String TEST_SQL2_JOIN1 = "FROM department d " + " %s employee e "; + + private static final String TEST_SQL2_JOIN2 = "FROM employee e " + " %s department d "; + + private static final String TEST_SQL3 = "ON d.id = e.departmentId " + " WHERE e.age <= 50"; + + private SearchHit[] employees = { + employee(1, "People 1", "A"), + employee(2, "People 2", "A"), + employee(3, "People 3", "A"), + employee(4, "People 4", "B"), + employee(5, "People 5", "B"), + employee(6, "People 6", "C"), + employee(7, "People 7", "D"), + employee(8, "People 8", "D"), + employee(9, "People 9", "E"), + employee(10, "People 10", "F") + }; + + private SearchHit[] departments = { + department(1, "A", "AWS"), + department(2, "C", "Capital One"), + department(3, "D", "Dell"), + department(4, "F", "Facebook"), + department(5, "G", "Google"), + department(6, "M", "Microsoft"), + department(7, "U", "Uber"), + }; + + private Matcher[] matched = { + hit(kv("name", "People 1"), kv("id", "A"), kv("dep", "AWS")), + hit(kv("name", "People 2"), kv("id", "A"), kv("dep", "AWS")), + hit(kv("name", "People 3"), kv("id", "A"), kv("dep", "AWS")), + hit(kv("name", "People 6"), kv("id", "C"), kv("dep", "Capital One")), + hit(kv("name", "People 7"), kv("id", "D"), kv("dep", "Dell")), + hit(kv("name", "People 8"), kv("id", "D"), kv("dep", "Dell")), + hit(kv("name", "People 10"), kv("id", "F"), kv("dep", "Facebook")) + }; + + private Matcher[] mismatched1 = { + hit(kv("name", null), kv("id", "G"), kv("dep", "Google")), + hit(kv("name", null), kv("id", "M"), kv("dep", "Microsoft")), + hit(kv("name", null), kv("id", "U"), kv("dep", "Uber")) + }; + + private Matcher[] mismatched2 = { + hit(kv("name", "People 4"), kv("id", null), kv("dep", null)), + hit(kv("name", "People 5"), kv("id", null), kv("dep", null)), + hit(kv("name", "People 9"), kv("id", null), kv("dep", null)) + }; + + private Matcher expectedInnerJoinResult = hits(matched); + + /** Department left join Employee */ + private Matcher expectedLeftOuterJoinResult1 = hits(concat(matched, mismatched1)); + + /** Employee left join Department */ + private Matcher expectedLeftOuterJoinResult2 = hits(concat(matched, mismatched2)); + + /** Parameterized test cases */ + private final int blockSize; + + private final int pageSize; + + public QueryPlannerBatchTest(int blockSize, int pageSize) { + this.blockSize = blockSize; + this.pageSize = pageSize; + } + + @Parameters + public static Collection data() { + List params = new ArrayList<>(); + for (int blockSize = 1; blockSize <= 11; blockSize++) { + for (int pageSize = 1; pageSize <= 11; pageSize++) { + params.add(new Object[] {blockSize, pageSize}); + } } + return params; + } + + @Test + public void departmentInnerJoinEmployee() { + assertThat( + query( + String.format( + TEST_SQL1 + TEST_SQL2_JOIN1 + TEST_SQL3, blockSize, pageSize, "INNER JOIN"), + departments(pageSize, departments), + employees(pageSize, employees)), + expectedInnerJoinResult); + } + + @Test + public void employeeInnerJoinDepartment() { + assertThat( + query( + String.format( + TEST_SQL1 + TEST_SQL2_JOIN2 + TEST_SQL3, blockSize, pageSize, "INNER JOIN"), + employees(pageSize, employees), + departments(pageSize, departments)), + expectedInnerJoinResult); + } + + @Test + public void departmentLeftJoinEmployee() { + assertThat( + query( + String.format( + TEST_SQL1 + TEST_SQL2_JOIN1 + TEST_SQL3, blockSize, pageSize, "LEFT JOIN"), + departments(pageSize, departments), + employees(pageSize, employees)), + expectedLeftOuterJoinResult1); + } + + @Test + public void employeeLeftJoinDepartment() { + assertThat( + query( + String.format( + TEST_SQL1 + TEST_SQL2_JOIN2 + TEST_SQL3, blockSize, pageSize, "LEFT JOIN"), + employees(pageSize, employees), + departments(pageSize, departments)), + expectedLeftOuterJoinResult2); + } + + private static Matcher[] concat(Matcher[] one, Matcher[] other) { + return concat(one, other, Matcher.class); + } + + /** Copy from OpenSearch ArrayUtils */ + private static T[] concat(T[] one, T[] other, Class clazz) { + T[] target = (T[]) Array.newInstance(clazz, one.length + other.length); + System.arraycopy(one, 0, target, 0, one.length); + System.arraycopy(other, 0, target, one.length, other.length); + return target; + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerConfigTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerConfigTest.java index 07a84683ce..81d6d718b9 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerConfigTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerConfigTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.planner; import static org.hamcrest.MatcherAssert.assertThat; @@ -23,291 +22,252 @@ import org.opensearch.sql.legacy.query.planner.HashJoinQueryPlanRequestBuilder; import org.opensearch.sql.legacy.query.planner.core.Config; -/** - * Hint & Configuring Ability Test Cases - */ +/** Hint & Configuring Ability Test Cases */ public class QueryPlannerConfigTest extends QueryPlannerTest { - private static final Matcher DEFAULT_TOTAL_AND_TABLE_LIMIT_MATCHER = totalAndTableLimit(200, 0, 0); - - @Test - public void algorithmBlockSizeHint() { - assertThat( - parseHint("! JOIN_ALGORITHM_BLOCK_SIZE(100000)"), - hint( - hintType(HintType.JOIN_ALGORITHM_BLOCK_SIZE), - hintValues(100000) - ) - ); - } - - @Test - public void algorithmUseLegacy() { - assertThat( - parseHint("! JOIN_ALGORITHM_USE_LEGACY"), - hint( - hintType(HintType.JOIN_ALGORITHM_USE_LEGACY), - hintValues() - ) - ); - } - - @Test - public void algorithmBlockSizeHintWithSpaces() { - assertThat( - parseHint("! JOIN_ALGORITHM_BLOCK_SIZE ( 200000 ) "), - hint( - hintType(HintType.JOIN_ALGORITHM_BLOCK_SIZE), - hintValues(200000) - ) - ); - } - - @Test - public void scrollPageSizeHint() { - assertThat( - parseHint("! JOIN_SCROLL_PAGE_SIZE(1000) "), - hint( - hintType(HintType.JOIN_SCROLL_PAGE_SIZE), - hintValues(1000) - ) - ); - } - - @Test - public void scrollPageSizeHintWithTwoSizes() { - assertThat( - parseHint("! JOIN_SCROLL_PAGE_SIZE(1000, 2000) "), - hint( - hintType(HintType.JOIN_SCROLL_PAGE_SIZE), - hintValues(1000, 2000) - ) - ); - } - - @Test - public void circuitBreakLimitHint() { - assertThat( - parseHint("! JOIN_CIRCUIT_BREAK_LIMIT(80)"), - hint( - hintType(HintType.JOIN_CIRCUIT_BREAK_LIMIT), - hintValues(80) - ) - ); - } - - @Test - public void backOffRetryIntervalsHint() { - assertThat( - parseHint("! JOIN_BACK_OFF_RETRY_INTERVALS(1, 5)"), - hint( - hintType(HintType.JOIN_BACK_OFF_RETRY_INTERVALS), - hintValues(1, 5) - ) - ); - } - - @Test - public void timeOutHint() { - assertThat( - parseHint("! JOIN_TIME_OUT(120)"), - hint( - hintType(HintType.JOIN_TIME_OUT), - hintValues(120) - ) - ); - } - - @Test - public void blockSizeConfig() { - assertThat(queryPlannerConfig( - "SELECT /*! JOIN_ALGORITHM_BLOCK_SIZE(200000) */ " + - " d.name FROM employee e JOIN department d ON d.id = e.departmentId "), - config( - blockSize(200000), - scrollPageSize(Config.DEFAULT_SCROLL_PAGE_SIZE, Config.DEFAULT_SCROLL_PAGE_SIZE), - circuitBreakLimit(Config.DEFAULT_CIRCUIT_BREAK_LIMIT), - backOffRetryIntervals(Config.DEFAULT_BACK_OFF_RETRY_INTERVALS), - DEFAULT_TOTAL_AND_TABLE_LIMIT_MATCHER, - timeOut(Config.DEFAULT_TIME_OUT) - ) - ); - } - - @Test - public void scrollPageSizeConfig() { - assertThat(queryPlannerConfig( - "SELECT /*! JOIN_SCROLL_PAGE_SIZE(50, 20) */ " + - " d.name FROM employee e JOIN department d ON d.id = e.departmentId "), - config( - blockSize(Config.DEFAULT_BLOCK_SIZE), - scrollPageSize(50, 20), - circuitBreakLimit(Config.DEFAULT_CIRCUIT_BREAK_LIMIT), - backOffRetryIntervals(Config.DEFAULT_BACK_OFF_RETRY_INTERVALS), - DEFAULT_TOTAL_AND_TABLE_LIMIT_MATCHER, - timeOut(Config.DEFAULT_TIME_OUT) - ) - ); - } - - @Test - public void circuitBreakLimitConfig() { - assertThat(queryPlannerConfig( - "SELECT /*! JOIN_CIRCUIT_BREAK_LIMIT(60) */ " + - " d.name FROM employee e JOIN department d ON d.id = e.departmentId "), - config( - blockSize(Config.DEFAULT_BLOCK_SIZE), - scrollPageSize(Config.DEFAULT_SCROLL_PAGE_SIZE, Config.DEFAULT_SCROLL_PAGE_SIZE), - circuitBreakLimit(60), - backOffRetryIntervals(Config.DEFAULT_BACK_OFF_RETRY_INTERVALS), - DEFAULT_TOTAL_AND_TABLE_LIMIT_MATCHER, - timeOut(Config.DEFAULT_TIME_OUT) - ) - ); - } - - @Test - public void backOffRetryIntervalsConfig() { - assertThat(queryPlannerConfig( - "SELECT /*! JOIN_BACK_OFF_RETRY_INTERVALS(1, 3, 5, 10) */ " + - " d.name FROM employee e JOIN department d ON d.id = e.departmentId "), - config( - blockSize(Config.DEFAULT_BLOCK_SIZE), - scrollPageSize(Config.DEFAULT_SCROLL_PAGE_SIZE, Config.DEFAULT_SCROLL_PAGE_SIZE), - circuitBreakLimit(Config.DEFAULT_CIRCUIT_BREAK_LIMIT), - backOffRetryIntervals(new double[]{1, 3, 5, 10}), - DEFAULT_TOTAL_AND_TABLE_LIMIT_MATCHER, - timeOut(Config.DEFAULT_TIME_OUT) - ) - ); - } - - @Test - public void totalAndTableLimitConfig() { - assertThat(queryPlannerConfig( - "SELECT /*! JOIN_TABLES_LIMIT(10, 20) */ " + - " d.name FROM employee e JOIN department d ON d.id = e.departmentId LIMIT 50"), - config( - blockSize(Config.DEFAULT_BLOCK_SIZE), - scrollPageSize(Config.DEFAULT_SCROLL_PAGE_SIZE, Config.DEFAULT_SCROLL_PAGE_SIZE), - circuitBreakLimit(Config.DEFAULT_CIRCUIT_BREAK_LIMIT), - backOffRetryIntervals(Config.DEFAULT_BACK_OFF_RETRY_INTERVALS), - totalAndTableLimit(50, 10, 20), - timeOut(Config.DEFAULT_TIME_OUT) - ) - ); - } - - @Test - public void timeOutConfig() { - assertThat(queryPlannerConfig( - "SELECT /*! JOIN_TIME_OUT(120) */ " + - " d.name FROM employee e JOIN department d ON d.id = e.departmentId"), - config( - blockSize(Config.DEFAULT_BLOCK_SIZE), - scrollPageSize(Config.DEFAULT_SCROLL_PAGE_SIZE, Config.DEFAULT_SCROLL_PAGE_SIZE), - circuitBreakLimit(Config.DEFAULT_CIRCUIT_BREAK_LIMIT), - backOffRetryIntervals(Config.DEFAULT_BACK_OFF_RETRY_INTERVALS), - DEFAULT_TOTAL_AND_TABLE_LIMIT_MATCHER, - timeOut(120) - ) - ); - } - - @Test - public void multipleConfigCombined() { - assertThat(queryPlannerConfig( - "SELECT " + - " /*! JOIN_ALGORITHM_BLOCK_SIZE(100) */ " + - " /*! JOIN_SCROLL_PAGE_SIZE(50, 20) */ " + - " /*! JOIN_CIRCUIT_BREAK_LIMIT(10) */ " + - " d.name FROM employee e JOIN department d ON d.id = e.departmentId "), - config( - blockSize(100), - scrollPageSize(50, 20), - circuitBreakLimit(10), - backOffRetryIntervals(Config.DEFAULT_BACK_OFF_RETRY_INTERVALS), - DEFAULT_TOTAL_AND_TABLE_LIMIT_MATCHER, - timeOut(Config.DEFAULT_TIME_OUT) - ) - ); - } - - private Hint parseHint(String hintStr) { - try { - return HintFactory.getHintFromString(hintStr); - } - catch (SqlParseException e) { - throw new IllegalArgumentException(e); - } - } - - private Config queryPlannerConfig(String sql) { - HashJoinQueryPlanRequestBuilder request = ((HashJoinQueryPlanRequestBuilder) createRequestBuilder(sql)); - request.plan(); - return request.getConfig(); + private static final Matcher DEFAULT_TOTAL_AND_TABLE_LIMIT_MATCHER = + totalAndTableLimit(200, 0, 0); + + @Test + public void algorithmBlockSizeHint() { + assertThat( + parseHint("! JOIN_ALGORITHM_BLOCK_SIZE(100000)"), + hint(hintType(HintType.JOIN_ALGORITHM_BLOCK_SIZE), hintValues(100000))); + } + + @Test + public void algorithmUseLegacy() { + assertThat( + parseHint("! JOIN_ALGORITHM_USE_LEGACY"), + hint(hintType(HintType.JOIN_ALGORITHM_USE_LEGACY), hintValues())); + } + + @Test + public void algorithmBlockSizeHintWithSpaces() { + assertThat( + parseHint("! JOIN_ALGORITHM_BLOCK_SIZE ( 200000 ) "), + hint(hintType(HintType.JOIN_ALGORITHM_BLOCK_SIZE), hintValues(200000))); + } + + @Test + public void scrollPageSizeHint() { + assertThat( + parseHint("! JOIN_SCROLL_PAGE_SIZE(1000) "), + hint(hintType(HintType.JOIN_SCROLL_PAGE_SIZE), hintValues(1000))); + } + + @Test + public void scrollPageSizeHintWithTwoSizes() { + assertThat( + parseHint("! JOIN_SCROLL_PAGE_SIZE(1000, 2000) "), + hint(hintType(HintType.JOIN_SCROLL_PAGE_SIZE), hintValues(1000, 2000))); + } + + @Test + public void circuitBreakLimitHint() { + assertThat( + parseHint("! JOIN_CIRCUIT_BREAK_LIMIT(80)"), + hint(hintType(HintType.JOIN_CIRCUIT_BREAK_LIMIT), hintValues(80))); + } + + @Test + public void backOffRetryIntervalsHint() { + assertThat( + parseHint("! JOIN_BACK_OFF_RETRY_INTERVALS(1, 5)"), + hint(hintType(HintType.JOIN_BACK_OFF_RETRY_INTERVALS), hintValues(1, 5))); + } + + @Test + public void timeOutHint() { + assertThat( + parseHint("! JOIN_TIME_OUT(120)"), hint(hintType(HintType.JOIN_TIME_OUT), hintValues(120))); + } + + @Test + public void blockSizeConfig() { + assertThat( + queryPlannerConfig( + "SELECT /*! JOIN_ALGORITHM_BLOCK_SIZE(200000) */ " + + " d.name FROM employee e JOIN department d ON d.id = e.departmentId "), + config( + blockSize(200000), + scrollPageSize(Config.DEFAULT_SCROLL_PAGE_SIZE, Config.DEFAULT_SCROLL_PAGE_SIZE), + circuitBreakLimit(Config.DEFAULT_CIRCUIT_BREAK_LIMIT), + backOffRetryIntervals(Config.DEFAULT_BACK_OFF_RETRY_INTERVALS), + DEFAULT_TOTAL_AND_TABLE_LIMIT_MATCHER, + timeOut(Config.DEFAULT_TIME_OUT))); + } + + @Test + public void scrollPageSizeConfig() { + assertThat( + queryPlannerConfig( + "SELECT /*! JOIN_SCROLL_PAGE_SIZE(50, 20) */ " + + " d.name FROM employee e JOIN department d ON d.id = e.departmentId "), + config( + blockSize(Config.DEFAULT_BLOCK_SIZE), + scrollPageSize(50, 20), + circuitBreakLimit(Config.DEFAULT_CIRCUIT_BREAK_LIMIT), + backOffRetryIntervals(Config.DEFAULT_BACK_OFF_RETRY_INTERVALS), + DEFAULT_TOTAL_AND_TABLE_LIMIT_MATCHER, + timeOut(Config.DEFAULT_TIME_OUT))); + } + + @Test + public void circuitBreakLimitConfig() { + assertThat( + queryPlannerConfig( + "SELECT /*! JOIN_CIRCUIT_BREAK_LIMIT(60) */ " + + " d.name FROM employee e JOIN department d ON d.id = e.departmentId "), + config( + blockSize(Config.DEFAULT_BLOCK_SIZE), + scrollPageSize(Config.DEFAULT_SCROLL_PAGE_SIZE, Config.DEFAULT_SCROLL_PAGE_SIZE), + circuitBreakLimit(60), + backOffRetryIntervals(Config.DEFAULT_BACK_OFF_RETRY_INTERVALS), + DEFAULT_TOTAL_AND_TABLE_LIMIT_MATCHER, + timeOut(Config.DEFAULT_TIME_OUT))); + } + + @Test + public void backOffRetryIntervalsConfig() { + assertThat( + queryPlannerConfig( + "SELECT /*! JOIN_BACK_OFF_RETRY_INTERVALS(1, 3, 5, 10) */ " + + " d.name FROM employee e JOIN department d ON d.id = e.departmentId "), + config( + blockSize(Config.DEFAULT_BLOCK_SIZE), + scrollPageSize(Config.DEFAULT_SCROLL_PAGE_SIZE, Config.DEFAULT_SCROLL_PAGE_SIZE), + circuitBreakLimit(Config.DEFAULT_CIRCUIT_BREAK_LIMIT), + backOffRetryIntervals(new double[] {1, 3, 5, 10}), + DEFAULT_TOTAL_AND_TABLE_LIMIT_MATCHER, + timeOut(Config.DEFAULT_TIME_OUT))); + } + + @Test + public void totalAndTableLimitConfig() { + assertThat( + queryPlannerConfig( + "SELECT /*! JOIN_TABLES_LIMIT(10, 20) */ " + + " d.name FROM employee e JOIN department d ON d.id = e.departmentId LIMIT 50"), + config( + blockSize(Config.DEFAULT_BLOCK_SIZE), + scrollPageSize(Config.DEFAULT_SCROLL_PAGE_SIZE, Config.DEFAULT_SCROLL_PAGE_SIZE), + circuitBreakLimit(Config.DEFAULT_CIRCUIT_BREAK_LIMIT), + backOffRetryIntervals(Config.DEFAULT_BACK_OFF_RETRY_INTERVALS), + totalAndTableLimit(50, 10, 20), + timeOut(Config.DEFAULT_TIME_OUT))); + } + + @Test + public void timeOutConfig() { + assertThat( + queryPlannerConfig( + "SELECT /*! JOIN_TIME_OUT(120) */ " + + " d.name FROM employee e JOIN department d ON d.id = e.departmentId"), + config( + blockSize(Config.DEFAULT_BLOCK_SIZE), + scrollPageSize(Config.DEFAULT_SCROLL_PAGE_SIZE, Config.DEFAULT_SCROLL_PAGE_SIZE), + circuitBreakLimit(Config.DEFAULT_CIRCUIT_BREAK_LIMIT), + backOffRetryIntervals(Config.DEFAULT_BACK_OFF_RETRY_INTERVALS), + DEFAULT_TOTAL_AND_TABLE_LIMIT_MATCHER, + timeOut(120))); + } + + @Test + public void multipleConfigCombined() { + assertThat( + queryPlannerConfig( + "SELECT " + + " /*! JOIN_ALGORITHM_BLOCK_SIZE(100) */ " + + " /*! JOIN_SCROLL_PAGE_SIZE(50, 20) */ " + + " /*! JOIN_CIRCUIT_BREAK_LIMIT(10) */ " + + " d.name FROM employee e JOIN department d ON d.id = e.departmentId "), + config( + blockSize(100), + scrollPageSize(50, 20), + circuitBreakLimit(10), + backOffRetryIntervals(Config.DEFAULT_BACK_OFF_RETRY_INTERVALS), + DEFAULT_TOTAL_AND_TABLE_LIMIT_MATCHER, + timeOut(Config.DEFAULT_TIME_OUT))); + } + + private Hint parseHint(String hintStr) { + try { + return HintFactory.getHintFromString(hintStr); + } catch (SqlParseException e) { + throw new IllegalArgumentException(e); } - - private Matcher hint(Matcher typeMatcher, Matcher valuesMatcher) { - return both( - featureValueOf("HintType", typeMatcher, Hint::getType) - ).and( - featureValueOf("HintValue", valuesMatcher, Hint::getParams) - ); - } - - private Matcher hintType(HintType type) { - return is(type); - } - - private Matcher hintValues(Object... values) { - if (values.length == 0) { - return emptyArray(); - } - return arrayContaining(values); - } - - private Matcher config(Matcher blockSizeMatcher, - Matcher scrollPageSizeMatcher, - Matcher circuitBreakLimitMatcher, - Matcher backOffRetryIntervalsMatcher, - Matcher totalAndTableLimitMatcher, - Matcher timeOutMatcher) { - return allOf( - featureValueOf("Block size", blockSizeMatcher, (cfg -> cfg.blockSize().size())), - featureValueOf("Scroll page size", scrollPageSizeMatcher, Config::scrollPageSize), - featureValueOf("Circuit break limit", circuitBreakLimitMatcher, Config::circuitBreakLimit), - featureValueOf("Back off retry intervals", backOffRetryIntervalsMatcher, Config::backOffRetryIntervals), - featureValueOf("Total and table limit", totalAndTableLimitMatcher, - (cfg -> new Integer[]{cfg.totalLimit(), cfg.tableLimit1(), cfg.tableLimit2()})), - featureValueOf("Time out", timeOutMatcher, Config::timeout) - ); + } + + private Config queryPlannerConfig(String sql) { + HashJoinQueryPlanRequestBuilder request = + ((HashJoinQueryPlanRequestBuilder) createRequestBuilder(sql)); + request.plan(); + return request.getConfig(); + } + + private Matcher hint(Matcher typeMatcher, Matcher valuesMatcher) { + return both(featureValueOf("HintType", typeMatcher, Hint::getType)) + .and(featureValueOf("HintValue", valuesMatcher, Hint::getParams)); + } + + private Matcher hintType(HintType type) { + return is(type); + } + + private Matcher hintValues(Object... values) { + if (values.length == 0) { + return emptyArray(); } - - private Matcher blockSize(int size) { - return is(size); - } - - @SuppressWarnings("unchecked") - private Matcher scrollPageSize(int size1, int size2) { - return arrayContaining(is(size1), is(size2)); - } - - private Matcher circuitBreakLimit(int limit) { - return is(limit); - } - - private Matcher backOffRetryIntervals(double[] intervals) { - return is(intervals); - } - - @SuppressWarnings("unchecked") - private static Matcher totalAndTableLimit(int totalLimit, int tableLimit1, int tableLimit2) { - return arrayContaining(is(totalLimit), is(tableLimit1), is(tableLimit2)); - } - - private static Matcher timeOut(int timeout) { - return is(timeout); - } - + return arrayContaining(values); + } + + private Matcher config( + Matcher blockSizeMatcher, + Matcher scrollPageSizeMatcher, + Matcher circuitBreakLimitMatcher, + Matcher backOffRetryIntervalsMatcher, + Matcher totalAndTableLimitMatcher, + Matcher timeOutMatcher) { + return allOf( + featureValueOf("Block size", blockSizeMatcher, (cfg -> cfg.blockSize().size())), + featureValueOf("Scroll page size", scrollPageSizeMatcher, Config::scrollPageSize), + featureValueOf("Circuit break limit", circuitBreakLimitMatcher, Config::circuitBreakLimit), + featureValueOf( + "Back off retry intervals", + backOffRetryIntervalsMatcher, + Config::backOffRetryIntervals), + featureValueOf( + "Total and table limit", + totalAndTableLimitMatcher, + (cfg -> new Integer[] {cfg.totalLimit(), cfg.tableLimit1(), cfg.tableLimit2()})), + featureValueOf("Time out", timeOutMatcher, Config::timeout)); + } + + private Matcher blockSize(int size) { + return is(size); + } + + @SuppressWarnings("unchecked") + private Matcher scrollPageSize(int size1, int size2) { + return arrayContaining(is(size1), is(size2)); + } + + private Matcher circuitBreakLimit(int limit) { + return is(limit); + } + + private Matcher backOffRetryIntervals(double[] intervals) { + return is(intervals); + } + + @SuppressWarnings("unchecked") + private static Matcher totalAndTableLimit( + int totalLimit, int tableLimit1, int tableLimit2) { + return arrayContaining(is(totalLimit), is(tableLimit1), is(tableLimit2)); + } + + private static Matcher timeOut(int timeout) { + return is(timeout); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerExecuteTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerExecuteTest.java index 55ea8c390b..dc8e094e2d 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerExecuteTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerExecuteTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.planner; import static org.opensearch.sql.legacy.util.MatcherUtils.hit; @@ -14,767 +13,420 @@ import org.opensearch.search.SearchHit; import org.opensearch.sql.legacy.util.MatcherUtils; -/** - * Query planner execution unit test - */ +/** Query planner execution unit test */ public class QueryPlannerExecuteTest extends QueryPlannerTest { - @Test - public void simpleJoin() { - MatcherAssert.assertThat( - query( - "SELECT d.name, e.lastname FROM employee e " + - " JOIN department d ON d.id = e.departmentId " + - " WHERE d.region = 'US' AND e.age > 30", - employees( - employee(1, "Alice", "1"), - employee(2, "Hank", "1") - ), - departments( - department(1, "1", "AWS") - ) - ), - hits( - hit( - MatcherUtils.kv("d.name", "AWS"), - MatcherUtils.kv("e.lastname", "Alice") - ), - hit( - MatcherUtils.kv("d.name", "AWS"), - MatcherUtils.kv("e.lastname", "Hank") - ) - ) - ); - } - - @Test - public void simpleJoinWithSelectAll() { - MatcherAssert.assertThat( - query( - "SELECT * FROM employee e " + - " JOIN department d ON d.id = e.departmentId ", - employees( - employee(1, "Alice", "1"), - employee(2, "Hank", "1") - ), - departments( - department(1, "1", "AWS") - ) - ), - hits( - hit( - MatcherUtils.kv("d.name", "AWS"), - MatcherUtils.kv("d.id", "1"), - MatcherUtils.kv("e.lastname", "Alice"), - MatcherUtils.kv("e.departmentId", "1") - ), - hit( - MatcherUtils.kv("d.name", "AWS"), - MatcherUtils.kv("d.id", "1"), - MatcherUtils.kv("e.lastname", "Hank"), - MatcherUtils.kv("e.departmentId", "1") - ) - ) - ); - } - - @Test - public void simpleLeftJoinWithSelectAllFromOneTable() { - MatcherAssert.assertThat( - query( - "SELECT e.lastname, d.* FROM employee e " + - " LEFT JOIN department d ON d.id = e.departmentId ", - employees( - employee(1, "Alice", "1"), - employee(2, "Hank", "1"), - employee(3, "Allen", "3") - ), - departments( - department(1, "1", "AWS"), - department(2, "2", "Retail") - ) - ), - hits( - hit( - MatcherUtils.kv("e.lastname", "Alice"), - MatcherUtils.kv("d.name", "AWS"), - MatcherUtils.kv("d.id", "1") - ), - hit( - MatcherUtils.kv("e.lastname", "Hank"), - MatcherUtils.kv("d.name", "AWS"), - MatcherUtils.kv("d.id", "1") - ), - hit( - MatcherUtils.kv("e.lastname", "Allen") - /* - * Not easy to figure out all column names for d.* without reading metadata - * or look into other rows from d. But in the extreme case, d could be empty table - * which requires metadata read anyway. - */ - //kv("d.name", null), - //kv("d.id", null) - ) - ) - ); - } - - @Test - public void simpleJoinWithSelectAllFromBothTables() { - MatcherAssert.assertThat( - query( - "SELECT e.*, d.* FROM employee e " + - " JOIN department d ON d.id = e.departmentId ", - employees( - employee(1, "Alice", "1"), - employee(2, "Hank", "1") - ), - departments( - department(1, "1", "AWS") - ) - ), - hits( - hit( - MatcherUtils.kv("d.name", "AWS"), - MatcherUtils.kv("d.id", "1"), - MatcherUtils.kv("e.lastname", "Alice"), - MatcherUtils.kv("e.departmentId", "1") - ), - hit( - MatcherUtils.kv("d.name", "AWS"), - MatcherUtils.kv("d.id", "1"), - MatcherUtils.kv("e.lastname", "Hank"), - MatcherUtils.kv("e.departmentId", "1") - ) - ) - ); - } - - @Test - public void simpleJoinWithoutMatch() { - MatcherAssert.assertThat( - query( - "SELECT d.name, e.lastname FROM employee e " + - " JOIN department d ON d.id = e.departmentId " + - " WHERE d.region = 'US' AND e.age > 30", - employees( - employee(1, "Alice", "2"), - employee(2, "Hank", "3") - ), - departments( - department(1, "1", "AWS") - ) - ), - hits() - ); - } - - @Test - public void simpleJoinWithSomeMatches() { - MatcherAssert.assertThat( - query( - "SELECT d.name, e.lastname FROM employee e " + - " JOIN department d ON d.id = e.departmentId " + - " WHERE d.region = 'US' AND e.age > 30", - employees( - employee(1, "Alice", "2"), - employee(2, "Hank", "3") - ), - departments( - department(1, "1", "AWS"), - department(2, "2", "Retail") - ) - ), - hits( - hit( - MatcherUtils.kv("d.name", "Retail"), - MatcherUtils.kv("e.lastname", "Alice") - ) - ) - ); - } - - @Test - public void simpleJoinWithAllMatches() { - MatcherAssert.assertThat( - query( - "SELECT d.name, e.lastname FROM employee e " + - " JOIN department d ON d.id = e.departmentId " + - " WHERE d.region = 'US' AND e.age > 30", - employees( - employee(1, "Alice", "1"), - employee(2, "Hank", "1"), - employee(3, "Mike", "2") - ), - departments( - department(1, "1", "AWS"), - department(2, "2", "Retail") - ) - ), - hits( - hit( - MatcherUtils.kv("d.name", "AWS"), - MatcherUtils.kv("e.lastname", "Alice") - ), - hit( - MatcherUtils.kv("d.name", "AWS"), - MatcherUtils.kv("e.lastname", "Hank") - ), - hit( - MatcherUtils.kv("d.name", "Retail"), - MatcherUtils.kv("e.lastname", "Mike") - ) - ) - ); - } - - @Test - public void simpleJoinWithNull() { - MatcherAssert.assertThat( - query( - "SELECT d.name, e.lastname FROM employee e " + - " JOIN department d ON d.id = e.departmentId " + - " WHERE d.region = 'US' AND e.age > 30", - employees( - employee(1, "Alice", "1"), - employee(2, "Hank", null), - employee(3, "Mike", "2") - ), - departments( - department(1, "1", "AWS"), - department(2, null, "Retail") - ) - ), - hits( - hit( - MatcherUtils.kv("d.name", "AWS"), - MatcherUtils.kv("e.lastname", "Alice") - ) - ) - ); - } - - @Test - public void simpleJoinWithColumnNameConflict() { - // Add a same column 'name' as in department on purpose - SearchHit alice = employee(1, "Alice", "1"); - alice.getSourceAsMap().put("name", "Alice Alice"); - SearchHit hank = employee(2, "Hank", "2"); - hank.getSourceAsMap().put("name", "Hank Hank"); - - MatcherAssert.assertThat( - query( - "SELECT d.name, e.name FROM employee e " + - " JOIN department d ON d.id = e.departmentId " + - " WHERE d.region = 'US' AND e.age > 30", - employees( - alice, hank - ), - departments( - department(1, "1", "AWS") - ) - ), - hits( - hit( - MatcherUtils.kv("d.name", "AWS"), - MatcherUtils.kv("e.name", "Alice Alice") - ) - ) - ); - } - - @Test - public void simpleJoinWithAliasInSelect() { - MatcherAssert.assertThat( - query( - "SELECT d.name AS dname, e.lastname AS ename FROM employee e " + - " JOIN department d ON d.id = e.departmentId " + - " WHERE d.region = 'US' AND e.age > 30", - employees( - employee(1, "Alice", "2"), - employee(2, "Hank", "3") - ), - departments( - department(1, "1", "AWS"), - department(2, "2", "Retail") - ) - ), - hits( - hit( - MatcherUtils.kv("dname", "Retail"), - MatcherUtils.kv("ename", "Alice") - ) - ) - ); - } - - @Test - public void simpleLeftJoinWithoutMatchInLeft() { - MatcherAssert.assertThat( - query( - "SELECT d.name, e.lastname FROM employee e " + - " LEFT JOIN department d ON d.id = e.departmentId " + - " WHERE d.region = 'US' AND e.age > 30", - employees( - employee(1, "Alice", "2"), - employee(2, "Hank", "3") - ), - departments( - department(1, "1", "AWS") - ) - ), - hits( - hit( - MatcherUtils.kv("d.name", null), - MatcherUtils.kv("e.lastname", "Alice") - ), - hit( - MatcherUtils.kv("d.name", null), - MatcherUtils.kv("e.lastname", "Hank") - ) - ) - ); - } - - @Test - public void simpleLeftJoinWithSomeMismatchesInLeft() { - MatcherAssert.assertThat( - query( - "SELECT d.name, e.lastname FROM employee e " + - " LEFT JOIN department d ON d.id = e.departmentId " + - " WHERE d.region = 'US' AND e.age > 30", - employees( - employee(1, "Alice", "1"), - employee(2, "Hank", "2") - ), - departments( - department(1, "1", "AWS") - ) - ), - hits( - hit( - MatcherUtils.kv("d.name", "AWS"), - MatcherUtils.kv("e.lastname", "Alice") - ), - hit( - MatcherUtils.kv("d.name", null), - MatcherUtils.kv("e.lastname", "Hank") - ) - ) - ); - } - - @Test - public void simpleLeftJoinWithSomeMismatchesInRight() { - MatcherAssert.assertThat( - query( - "SELECT d.name, e.lastname FROM employee e " + - " LEFT JOIN department d ON d.id = e.departmentId " + - " WHERE d.region = 'US' AND e.age > 30", - employees( - employee(1, "Alice", "1"), - employee(2, "Hank", "1") - ), - departments( - department(1, "1", "AWS"), - department(2, "2", "Retail") - ) - ), - hits( - hit( - MatcherUtils.kv("d.name", "AWS"), - MatcherUtils.kv("e.lastname", "Alice") - ), - hit( - MatcherUtils.kv("d.name", "AWS"), - MatcherUtils.kv("e.lastname", "Hank") - ) - ) - ); - } - - @Test - public void simpleQueryWithTotalLimit() { - MatcherAssert.assertThat( - query( - "SELECT d.name, e.lastname FROM employee e JOIN department d ON d.id = e.departmentId LIMIT 1", - employees( - employee(1, "Alice", "1"), - employee(2, "Hank", "2") - ), - departments( - department(1, "1", "AWS"), - department(1, "2", "Retail") - ) - ), - hits( - hit( - MatcherUtils.kv("d.name", "AWS"), - MatcherUtils.kv("e.lastname", "Alice") - ) - ) - ); - } - - @Test - public void simpleQueryWithTableLimit() { - MatcherAssert.assertThat( - query( - "SELECT /*! JOIN_TABLES_LIMIT(1, 5) */ d.name, e.lastname FROM employee e JOIN department d ON d.id = e.departmentId", - employees( - employee(1, "Alice", "1"), - employee(2, "Hank", "1") - ), - departments( - department(1, "1", "AWS"), - department(1, "2", "Retail") - ) - ), - hits( - hit( - MatcherUtils.kv("d.name", "AWS"), - MatcherUtils.kv("e.lastname", "Alice") - ) - ) - ); - } - - @Test - public void simpleQueryWithOrderBy() { - MatcherAssert.assertThat( - query( - "SELECT d.name, e.lastname FROM employee e JOIN department d ON d.id = e.departmentId ORDER BY e.lastname", - employees( - employee(1, "Hank", "1"), - employee(2, "Alice", "2"), - employee(3, "Allen", "1"), - employee(4, "Ellis", "2"), - employee(5, "Frank", "2") - ), - departments( - department(1, "1", "AWS"), - department(2, "2", "Retail") - ) - ), - MatcherUtils.hitsInOrder( - hit( - MatcherUtils.kv("d.name", "Retail"), - MatcherUtils.kv("e.lastname", "Alice") - ), - hit( - MatcherUtils.kv("d.name", "AWS"), - MatcherUtils.kv("e.lastname", "Allen") - ), - hit( - MatcherUtils.kv("d.name", "Retail"), - MatcherUtils.kv("e.lastname", "Ellis") - ), - hit( - MatcherUtils.kv("d.name", "Retail"), - MatcherUtils.kv("e.lastname", "Frank") - ), - hit( - MatcherUtils.kv("d.name", "AWS"), - MatcherUtils.kv("e.lastname", "Hank") - ) - ) - ); - } - - /** Doesn't support muliple columns from both tables (order is missing) */ - @Test - public void simpleQueryWithLeftJoinAndOrderByMultipleColumnsFromOneTableInDesc() { - MatcherAssert.assertThat( - query( - "SELECT d.id AS id, e.lastname AS lastname FROM employee e " + - " LEFT JOIN department d ON d.id = e.departmentId " + - " ORDER BY e.departmentId, e.lastname DESC", - employees( - employee(1, "Hank", "1"), - employee(2, "Alice", "2"), - employee(3, "Allen", "1"), - employee(4, "Ellis", "2"), - employee(5, "Gary", "3"), - employee(5, "Frank", "3") - ), - departments( - department(1, "1", "AWS"), - department(2, "2", "Retail") - ) - ), - MatcherUtils.hitsInOrder( - hit( - MatcherUtils.kv("id", null), - MatcherUtils.kv("lastname", "Gary") - ), - hit( - MatcherUtils.kv("id", null), - MatcherUtils.kv("lastname", "Frank") - ), - hit( - MatcherUtils.kv("id", "2"), - MatcherUtils.kv("lastname", "Ellis") - ), - hit( - MatcherUtils.kv("id", "2"), - MatcherUtils.kv("lastname", "Alice") - ), - hit( - MatcherUtils.kv("id", "1"), - MatcherUtils.kv("lastname", "Hank") - ), - hit( - MatcherUtils.kv("id", "1"), - MatcherUtils.kv("lastname", "Allen") - ) - ) - ); - } - - @Test - public void simpleCrossJoin() { - MatcherAssert.assertThat( - query( - "SELECT d.name AS dname, e.lastname AS ename FROM employee e JOIN department d", - employees( - employee(1, "Alice", "2"), - employee(2, "Hank", "3") - ), - departments( - department(1, "1", "AWS"), - department(2, "2", "Retail") - ) - ), - hits( - hit( - MatcherUtils.kv("dname", "AWS"), - MatcherUtils.kv("ename", "Alice") - ), - hit( - MatcherUtils.kv("dname", "AWS"), - MatcherUtils.kv("ename", "Hank") - ), - hit( - MatcherUtils.kv("dname", "Retail"), - MatcherUtils.kv("ename", "Alice") - ), - hit( - MatcherUtils.kv("dname", "Retail"), - MatcherUtils.kv("ename", "Hank") - ) - ) - ); - } - - @Test - public void simpleQueryWithTermsFilterOptimization() { - MatcherAssert.assertThat( - query( - "SELECT /*! HASH_WITH_TERMS_FILTER*/ " + // Be careful that no space between ...FILTER and */ - " e.lastname, d.id FROM employee e " + - " JOIN department d ON d.id = e.departmentId AND d.name = e.lastname", - employees( - employee(1, "Johnson", "1"), - employee(2, "Allen", "4"), - employee(3, "Ellis", "2"), - employee(4, "Dell", "1"), - employee(5, "Dell", "4") - ), - departments( - department(1, "1", "Johnson"), - department(1, "4", "Dell") - ) - ), - hits( - hit( - MatcherUtils.kv("e.lastname", "Johnson"), - MatcherUtils.kv("d.id", "1") - ), - hit( - MatcherUtils.kv("e.lastname", "Dell"), - MatcherUtils.kv("d.id", "4") - ) - ) - ); - } - - @Test - public void complexJoinWithMultipleConditions() { - MatcherAssert.assertThat( - query( - "SELECT d.name, e.lastname, d.id " + - " FROM employee e " + - " JOIN department d " + - " ON d.id = e.departmentId AND d.name = e.lastname" + - " WHERE d.region = 'US' AND e.age > 30", - employees( - employee(1, "Dell", "1"), - employee(2, "Hank", "1") - ), - departments( - department(1, "1", "Dell") - ) - ), - hits( - hit( - MatcherUtils.kv("d.name", "Dell"), - MatcherUtils.kv("e.lastname", "Dell"), - MatcherUtils.kv("d.id", "1") - ) - ) - ); - } - - @Test - public void complexJoinWithOrConditions() { - MatcherAssert.assertThat( - query( - "SELECT d.name, e.lastname " + - " FROM employee e " + - " JOIN department d " + - " ON d.id = e.departmentId OR d.name = e.lastname", - employees( - employee(1, "Alice", "1"), - employee(2, "Dell", "2"), - employee(3, "Hank", "3") - ), - departments( - department(1, "1", "Dell"), - department(2, "4", "AWS") - ) - ), - hits( - hit( - MatcherUtils.kv("d.name", "Dell"), - MatcherUtils.kv("e.lastname", "Alice") - ), - hit( - MatcherUtils.kv("d.name", "Dell"), - MatcherUtils.kv("e.lastname", "Dell") - ) - ) - ); - } - - @Test - public void complexJoinWithOrConditionsDuplicate() { - MatcherAssert.assertThat( - query( - "SELECT d.name, e.departmentId " + - " FROM employee e " + - " JOIN department d " + - " ON d.id = e.departmentId OR d.name = e.lastname", - employees( - employee(1, "Dell", "1") // Match both condition but should only show once in result - ), - departments( - department(1, "1", "Dell"), - department(2, "4", "AWS") - ) - ), - hits( - hit( - MatcherUtils.kv("d.name", "Dell"), - MatcherUtils.kv("e.departmentId", "1") - ) - ) - ); - } - - @Test - public void complexJoinWithOrConditionsAndTermsFilterOptimization() { - MatcherAssert.assertThat( - query( - "SELECT /*! HASH_WITH_TERMS_FILTER*/ " + - " d.name, e.lastname " + - " FROM employee e " + - " JOIN department d " + - " ON d.id = e.departmentId OR d.name = e.lastname", - employees( - employee(1, "Alice", "1"), - employee(2, "Dell", "2"), - employee(3, "Hank", "3") - ), - departments( - department(1, "1", "Dell"), - department(2, "4", "AWS") - ) - ), - hits( - hit( - MatcherUtils.kv("d.name", "Dell"), - MatcherUtils.kv("e.lastname", "Alice") - ), - hit( - MatcherUtils.kv("d.name", "Dell"), - MatcherUtils.kv("e.lastname", "Dell") - ) - ) - ); - } - - @Test - public void complexLeftJoinWithOrConditions() { - MatcherAssert.assertThat( - query( - "SELECT d.name, e.lastname " + - " FROM employee e " + - " LEFT JOIN department d " + - " ON d.id = e.departmentId OR d.name = e.lastname", - employees( - employee(1, "Alice", "1"), - employee(2, "Dell", "2"), - employee(3, "Hank", "3") - ), - departments( - department(1, "1", "Dell"), - department(2, "4", "AWS") - ) - ), - hits( - hit( - MatcherUtils.kv("d.name", "Dell"), - MatcherUtils.kv("e.lastname", "Alice") - ), - hit( - MatcherUtils.kv("d.name", "Dell"), - MatcherUtils.kv("e.lastname", "Dell") - ), - hit( - MatcherUtils.kv("d.name", null), - MatcherUtils.kv("e.lastname", "Hank") - ) - ) - ); - } - - @Test - public void complexJoinWithTableLimitHint() { - MatcherAssert.assertThat( - query( - "SELECT " + - " /*! JOIN_TABLES_LIMIT(2, 1)*/" + - " d.name, e.lastname " + - " FROM employee e " + - " JOIN department d " + - " ON d.id = e.departmentId", - employees( - employee(1, "Alice", "1"), // Only this and the second row will be pulled out - employee(2, "Dell", "4"), - employee(3, "Hank", "1") - ), - departments( - department(1, "1", "Dell"), // Only this row will be pulled out - department(2, "4", "AWS") - ) - ), - hits( - hit( - MatcherUtils.kv("d.name", "Dell"), - MatcherUtils.kv("e.lastname", "Alice") - ) - ) - ); - } - + @Test + public void simpleJoin() { + MatcherAssert.assertThat( + query( + "SELECT d.name, e.lastname FROM employee e " + + " JOIN department d ON d.id = e.departmentId " + + " WHERE d.region = 'US' AND e.age > 30", + employees(employee(1, "Alice", "1"), employee(2, "Hank", "1")), + departments(department(1, "1", "AWS"))), + hits( + hit(MatcherUtils.kv("d.name", "AWS"), MatcherUtils.kv("e.lastname", "Alice")), + hit(MatcherUtils.kv("d.name", "AWS"), MatcherUtils.kv("e.lastname", "Hank")))); + } + + @Test + public void simpleJoinWithSelectAll() { + MatcherAssert.assertThat( + query( + "SELECT * FROM employee e " + " JOIN department d ON d.id = e.departmentId ", + employees(employee(1, "Alice", "1"), employee(2, "Hank", "1")), + departments(department(1, "1", "AWS"))), + hits( + hit( + MatcherUtils.kv("d.name", "AWS"), + MatcherUtils.kv("d.id", "1"), + MatcherUtils.kv("e.lastname", "Alice"), + MatcherUtils.kv("e.departmentId", "1")), + hit( + MatcherUtils.kv("d.name", "AWS"), + MatcherUtils.kv("d.id", "1"), + MatcherUtils.kv("e.lastname", "Hank"), + MatcherUtils.kv("e.departmentId", "1")))); + } + + @Test + public void simpleLeftJoinWithSelectAllFromOneTable() { + MatcherAssert.assertThat( + query( + "SELECT e.lastname, d.* FROM employee e " + + " LEFT JOIN department d ON d.id = e.departmentId ", + employees( + employee(1, "Alice", "1"), employee(2, "Hank", "1"), employee(3, "Allen", "3")), + departments(department(1, "1", "AWS"), department(2, "2", "Retail"))), + hits( + hit( + MatcherUtils.kv("e.lastname", "Alice"), + MatcherUtils.kv("d.name", "AWS"), + MatcherUtils.kv("d.id", "1")), + hit( + MatcherUtils.kv("e.lastname", "Hank"), + MatcherUtils.kv("d.name", "AWS"), + MatcherUtils.kv("d.id", "1")), + hit( + MatcherUtils.kv("e.lastname", "Allen") + /* + * Not easy to figure out all column names for d.* without reading metadata + * or look into other rows from d. But in the extreme case, d could be empty table + * which requires metadata read anyway. + */ + // kv("d.name", null), + // kv("d.id", null) + ))); + } + + @Test + public void simpleJoinWithSelectAllFromBothTables() { + MatcherAssert.assertThat( + query( + "SELECT e.*, d.* FROM employee e " + " JOIN department d ON d.id = e.departmentId ", + employees(employee(1, "Alice", "1"), employee(2, "Hank", "1")), + departments(department(1, "1", "AWS"))), + hits( + hit( + MatcherUtils.kv("d.name", "AWS"), + MatcherUtils.kv("d.id", "1"), + MatcherUtils.kv("e.lastname", "Alice"), + MatcherUtils.kv("e.departmentId", "1")), + hit( + MatcherUtils.kv("d.name", "AWS"), + MatcherUtils.kv("d.id", "1"), + MatcherUtils.kv("e.lastname", "Hank"), + MatcherUtils.kv("e.departmentId", "1")))); + } + + @Test + public void simpleJoinWithoutMatch() { + MatcherAssert.assertThat( + query( + "SELECT d.name, e.lastname FROM employee e " + + " JOIN department d ON d.id = e.departmentId " + + " WHERE d.region = 'US' AND e.age > 30", + employees(employee(1, "Alice", "2"), employee(2, "Hank", "3")), + departments(department(1, "1", "AWS"))), + hits()); + } + + @Test + public void simpleJoinWithSomeMatches() { + MatcherAssert.assertThat( + query( + "SELECT d.name, e.lastname FROM employee e " + + " JOIN department d ON d.id = e.departmentId " + + " WHERE d.region = 'US' AND e.age > 30", + employees(employee(1, "Alice", "2"), employee(2, "Hank", "3")), + departments(department(1, "1", "AWS"), department(2, "2", "Retail"))), + hits(hit(MatcherUtils.kv("d.name", "Retail"), MatcherUtils.kv("e.lastname", "Alice")))); + } + + @Test + public void simpleJoinWithAllMatches() { + MatcherAssert.assertThat( + query( + "SELECT d.name, e.lastname FROM employee e " + + " JOIN department d ON d.id = e.departmentId " + + " WHERE d.region = 'US' AND e.age > 30", + employees( + employee(1, "Alice", "1"), employee(2, "Hank", "1"), employee(3, "Mike", "2")), + departments(department(1, "1", "AWS"), department(2, "2", "Retail"))), + hits( + hit(MatcherUtils.kv("d.name", "AWS"), MatcherUtils.kv("e.lastname", "Alice")), + hit(MatcherUtils.kv("d.name", "AWS"), MatcherUtils.kv("e.lastname", "Hank")), + hit(MatcherUtils.kv("d.name", "Retail"), MatcherUtils.kv("e.lastname", "Mike")))); + } + + @Test + public void simpleJoinWithNull() { + MatcherAssert.assertThat( + query( + "SELECT d.name, e.lastname FROM employee e " + + " JOIN department d ON d.id = e.departmentId " + + " WHERE d.region = 'US' AND e.age > 30", + employees( + employee(1, "Alice", "1"), employee(2, "Hank", null), employee(3, "Mike", "2")), + departments(department(1, "1", "AWS"), department(2, null, "Retail"))), + hits(hit(MatcherUtils.kv("d.name", "AWS"), MatcherUtils.kv("e.lastname", "Alice")))); + } + + @Test + public void simpleJoinWithColumnNameConflict() { + // Add a same column 'name' as in department on purpose + SearchHit alice = employee(1, "Alice", "1"); + alice.getSourceAsMap().put("name", "Alice Alice"); + SearchHit hank = employee(2, "Hank", "2"); + hank.getSourceAsMap().put("name", "Hank Hank"); + + MatcherAssert.assertThat( + query( + "SELECT d.name, e.name FROM employee e " + + " JOIN department d ON d.id = e.departmentId " + + " WHERE d.region = 'US' AND e.age > 30", + employees(alice, hank), + departments(department(1, "1", "AWS"))), + hits(hit(MatcherUtils.kv("d.name", "AWS"), MatcherUtils.kv("e.name", "Alice Alice")))); + } + + @Test + public void simpleJoinWithAliasInSelect() { + MatcherAssert.assertThat( + query( + "SELECT d.name AS dname, e.lastname AS ename FROM employee e " + + " JOIN department d ON d.id = e.departmentId " + + " WHERE d.region = 'US' AND e.age > 30", + employees(employee(1, "Alice", "2"), employee(2, "Hank", "3")), + departments(department(1, "1", "AWS"), department(2, "2", "Retail"))), + hits(hit(MatcherUtils.kv("dname", "Retail"), MatcherUtils.kv("ename", "Alice")))); + } + + @Test + public void simpleLeftJoinWithoutMatchInLeft() { + MatcherAssert.assertThat( + query( + "SELECT d.name, e.lastname FROM employee e " + + " LEFT JOIN department d ON d.id = e.departmentId " + + " WHERE d.region = 'US' AND e.age > 30", + employees(employee(1, "Alice", "2"), employee(2, "Hank", "3")), + departments(department(1, "1", "AWS"))), + hits( + hit(MatcherUtils.kv("d.name", null), MatcherUtils.kv("e.lastname", "Alice")), + hit(MatcherUtils.kv("d.name", null), MatcherUtils.kv("e.lastname", "Hank")))); + } + + @Test + public void simpleLeftJoinWithSomeMismatchesInLeft() { + MatcherAssert.assertThat( + query( + "SELECT d.name, e.lastname FROM employee e " + + " LEFT JOIN department d ON d.id = e.departmentId " + + " WHERE d.region = 'US' AND e.age > 30", + employees(employee(1, "Alice", "1"), employee(2, "Hank", "2")), + departments(department(1, "1", "AWS"))), + hits( + hit(MatcherUtils.kv("d.name", "AWS"), MatcherUtils.kv("e.lastname", "Alice")), + hit(MatcherUtils.kv("d.name", null), MatcherUtils.kv("e.lastname", "Hank")))); + } + + @Test + public void simpleLeftJoinWithSomeMismatchesInRight() { + MatcherAssert.assertThat( + query( + "SELECT d.name, e.lastname FROM employee e " + + " LEFT JOIN department d ON d.id = e.departmentId " + + " WHERE d.region = 'US' AND e.age > 30", + employees(employee(1, "Alice", "1"), employee(2, "Hank", "1")), + departments(department(1, "1", "AWS"), department(2, "2", "Retail"))), + hits( + hit(MatcherUtils.kv("d.name", "AWS"), MatcherUtils.kv("e.lastname", "Alice")), + hit(MatcherUtils.kv("d.name", "AWS"), MatcherUtils.kv("e.lastname", "Hank")))); + } + + @Test + public void simpleQueryWithTotalLimit() { + MatcherAssert.assertThat( + query( + "SELECT d.name, e.lastname FROM employee e JOIN department d ON d.id = e.departmentId" + + " LIMIT 1", + employees(employee(1, "Alice", "1"), employee(2, "Hank", "2")), + departments(department(1, "1", "AWS"), department(1, "2", "Retail"))), + hits(hit(MatcherUtils.kv("d.name", "AWS"), MatcherUtils.kv("e.lastname", "Alice")))); + } + + @Test + public void simpleQueryWithTableLimit() { + MatcherAssert.assertThat( + query( + "SELECT /*! JOIN_TABLES_LIMIT(1, 5) */ d.name, e.lastname FROM employee e JOIN" + + " department d ON d.id = e.departmentId", + employees(employee(1, "Alice", "1"), employee(2, "Hank", "1")), + departments(department(1, "1", "AWS"), department(1, "2", "Retail"))), + hits(hit(MatcherUtils.kv("d.name", "AWS"), MatcherUtils.kv("e.lastname", "Alice")))); + } + + @Test + public void simpleQueryWithOrderBy() { + MatcherAssert.assertThat( + query( + "SELECT d.name, e.lastname FROM employee e JOIN department d ON d.id = e.departmentId" + + " ORDER BY e.lastname", + employees( + employee(1, "Hank", "1"), + employee(2, "Alice", "2"), + employee(3, "Allen", "1"), + employee(4, "Ellis", "2"), + employee(5, "Frank", "2")), + departments(department(1, "1", "AWS"), department(2, "2", "Retail"))), + MatcherUtils.hitsInOrder( + hit(MatcherUtils.kv("d.name", "Retail"), MatcherUtils.kv("e.lastname", "Alice")), + hit(MatcherUtils.kv("d.name", "AWS"), MatcherUtils.kv("e.lastname", "Allen")), + hit(MatcherUtils.kv("d.name", "Retail"), MatcherUtils.kv("e.lastname", "Ellis")), + hit(MatcherUtils.kv("d.name", "Retail"), MatcherUtils.kv("e.lastname", "Frank")), + hit(MatcherUtils.kv("d.name", "AWS"), MatcherUtils.kv("e.lastname", "Hank")))); + } + + /** Doesn't support muliple columns from both tables (order is missing) */ + @Test + public void simpleQueryWithLeftJoinAndOrderByMultipleColumnsFromOneTableInDesc() { + MatcherAssert.assertThat( + query( + "SELECT d.id AS id, e.lastname AS lastname FROM employee e " + + " LEFT JOIN department d ON d.id = e.departmentId " + + " ORDER BY e.departmentId, e.lastname DESC", + employees( + employee(1, "Hank", "1"), + employee(2, "Alice", "2"), + employee(3, "Allen", "1"), + employee(4, "Ellis", "2"), + employee(5, "Gary", "3"), + employee(5, "Frank", "3")), + departments(department(1, "1", "AWS"), department(2, "2", "Retail"))), + MatcherUtils.hitsInOrder( + hit(MatcherUtils.kv("id", null), MatcherUtils.kv("lastname", "Gary")), + hit(MatcherUtils.kv("id", null), MatcherUtils.kv("lastname", "Frank")), + hit(MatcherUtils.kv("id", "2"), MatcherUtils.kv("lastname", "Ellis")), + hit(MatcherUtils.kv("id", "2"), MatcherUtils.kv("lastname", "Alice")), + hit(MatcherUtils.kv("id", "1"), MatcherUtils.kv("lastname", "Hank")), + hit(MatcherUtils.kv("id", "1"), MatcherUtils.kv("lastname", "Allen")))); + } + + @Test + public void simpleCrossJoin() { + MatcherAssert.assertThat( + query( + "SELECT d.name AS dname, e.lastname AS ename FROM employee e JOIN department d", + employees(employee(1, "Alice", "2"), employee(2, "Hank", "3")), + departments(department(1, "1", "AWS"), department(2, "2", "Retail"))), + hits( + hit(MatcherUtils.kv("dname", "AWS"), MatcherUtils.kv("ename", "Alice")), + hit(MatcherUtils.kv("dname", "AWS"), MatcherUtils.kv("ename", "Hank")), + hit(MatcherUtils.kv("dname", "Retail"), MatcherUtils.kv("ename", "Alice")), + hit(MatcherUtils.kv("dname", "Retail"), MatcherUtils.kv("ename", "Hank")))); + } + + @Test + public void simpleQueryWithTermsFilterOptimization() { + MatcherAssert.assertThat( + query( + "SELECT /*! HASH_WITH_TERMS_FILTER*/ " + + // Be careful that no space between ...FILTER and */ + " e.lastname, d.id FROM employee e " + + " JOIN department d ON d.id = e.departmentId AND d.name = e.lastname", + employees( + employee(1, "Johnson", "1"), + employee(2, "Allen", "4"), + employee(3, "Ellis", "2"), + employee(4, "Dell", "1"), + employee(5, "Dell", "4")), + departments(department(1, "1", "Johnson"), department(1, "4", "Dell"))), + hits( + hit(MatcherUtils.kv("e.lastname", "Johnson"), MatcherUtils.kv("d.id", "1")), + hit(MatcherUtils.kv("e.lastname", "Dell"), MatcherUtils.kv("d.id", "4")))); + } + + @Test + public void complexJoinWithMultipleConditions() { + MatcherAssert.assertThat( + query( + "SELECT d.name, e.lastname, d.id " + + " FROM employee e " + + " JOIN department d " + + " ON d.id = e.departmentId AND d.name = e.lastname" + + " WHERE d.region = 'US' AND e.age > 30", + employees(employee(1, "Dell", "1"), employee(2, "Hank", "1")), + departments(department(1, "1", "Dell"))), + hits( + hit( + MatcherUtils.kv("d.name", "Dell"), + MatcherUtils.kv("e.lastname", "Dell"), + MatcherUtils.kv("d.id", "1")))); + } + + @Test + public void complexJoinWithOrConditions() { + MatcherAssert.assertThat( + query( + "SELECT d.name, e.lastname " + + " FROM employee e " + + " JOIN department d " + + " ON d.id = e.departmentId OR d.name = e.lastname", + employees( + employee(1, "Alice", "1"), employee(2, "Dell", "2"), employee(3, "Hank", "3")), + departments(department(1, "1", "Dell"), department(2, "4", "AWS"))), + hits( + hit(MatcherUtils.kv("d.name", "Dell"), MatcherUtils.kv("e.lastname", "Alice")), + hit(MatcherUtils.kv("d.name", "Dell"), MatcherUtils.kv("e.lastname", "Dell")))); + } + + @Test + public void complexJoinWithOrConditionsDuplicate() { + MatcherAssert.assertThat( + query( + "SELECT d.name, e.departmentId " + + " FROM employee e " + + " JOIN department d " + + " ON d.id = e.departmentId OR d.name = e.lastname", + employees( + employee(1, "Dell", "1") // Match both condition but should only show once in result + ), + departments(department(1, "1", "Dell"), department(2, "4", "AWS"))), + hits(hit(MatcherUtils.kv("d.name", "Dell"), MatcherUtils.kv("e.departmentId", "1")))); + } + + @Test + public void complexJoinWithOrConditionsAndTermsFilterOptimization() { + MatcherAssert.assertThat( + query( + "SELECT /*! HASH_WITH_TERMS_FILTER*/ " + + " d.name, e.lastname " + + " FROM employee e " + + " JOIN department d " + + " ON d.id = e.departmentId OR d.name = e.lastname", + employees( + employee(1, "Alice", "1"), employee(2, "Dell", "2"), employee(3, "Hank", "3")), + departments(department(1, "1", "Dell"), department(2, "4", "AWS"))), + hits( + hit(MatcherUtils.kv("d.name", "Dell"), MatcherUtils.kv("e.lastname", "Alice")), + hit(MatcherUtils.kv("d.name", "Dell"), MatcherUtils.kv("e.lastname", "Dell")))); + } + + @Test + public void complexLeftJoinWithOrConditions() { + MatcherAssert.assertThat( + query( + "SELECT d.name, e.lastname " + + " FROM employee e " + + " LEFT JOIN department d " + + " ON d.id = e.departmentId OR d.name = e.lastname", + employees( + employee(1, "Alice", "1"), employee(2, "Dell", "2"), employee(3, "Hank", "3")), + departments(department(1, "1", "Dell"), department(2, "4", "AWS"))), + hits( + hit(MatcherUtils.kv("d.name", "Dell"), MatcherUtils.kv("e.lastname", "Alice")), + hit(MatcherUtils.kv("d.name", "Dell"), MatcherUtils.kv("e.lastname", "Dell")), + hit(MatcherUtils.kv("d.name", null), MatcherUtils.kv("e.lastname", "Hank")))); + } + + @Test + public void complexJoinWithTableLimitHint() { + MatcherAssert.assertThat( + query( + "SELECT " + + " /*! JOIN_TABLES_LIMIT(2, 1)*/" + + " d.name, e.lastname " + + " FROM employee e " + + " JOIN department d " + + " ON d.id = e.departmentId", + employees( + employee(1, "Alice", "1"), // Only this and the second row will be pulled out + employee(2, "Dell", "4"), + employee(3, "Hank", "1")), + departments( + department(1, "1", "Dell"), // Only this row will be pulled out + department(2, "4", "AWS"))), + hits(hit(MatcherUtils.kv("d.name", "Dell"), MatcherUtils.kv("e.lastname", "Alice")))); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerExplainTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerExplainTest.java index 2c92c91666..7f495935ca 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerExplainTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerExplainTest.java @@ -3,45 +3,41 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.planner; import org.junit.Test; import org.opensearch.sql.legacy.query.planner.core.QueryPlanner; -/** - * Query planner explanation unit test - */ +/** Query planner explanation unit test */ public class QueryPlannerExplainTest extends QueryPlannerTest { - @Test - public void explainInJson() { - QueryPlanner planner = plan( - "SELECT d.name, e.lastname FROM employee e " + - " JOIN department d ON d.id = e.departmentId " + - " WHERE d.region = 'US' AND e.age > 30" - ); - planner.explain(); - } - - @Test - public void explainInJsonWithComplicatedOn() { - QueryPlanner planner = plan( - "SELECT d.name, e.lastname FROM employee e " + - " JOIN department d ON d.id = e.departmentId AND d.location = e.region " + - " WHERE d.region = 'US' AND e.age > 30" - ); - planner.explain(); - } - - @Test - public void explainInJsonWithDuplicateColumnsPushedDown() { - QueryPlanner planner = plan( - "SELECT d.id, e.departmentId FROM employee e " + - " JOIN department d ON d.id = e.departmentId AND d.location = e.region " + - " WHERE d.region = 'US' AND e.age > 30" - ); - planner.explain(); - } - + @Test + public void explainInJson() { + QueryPlanner planner = + plan( + "SELECT d.name, e.lastname FROM employee e " + + " JOIN department d ON d.id = e.departmentId " + + " WHERE d.region = 'US' AND e.age > 30"); + planner.explain(); + } + + @Test + public void explainInJsonWithComplicatedOn() { + QueryPlanner planner = + plan( + "SELECT d.name, e.lastname FROM employee e " + + " JOIN department d ON d.id = e.departmentId AND d.location = e.region " + + " WHERE d.region = 'US' AND e.age > 30"); + planner.explain(); + } + + @Test + public void explainInJsonWithDuplicateColumnsPushedDown() { + QueryPlanner planner = + plan( + "SELECT d.id, e.departmentId FROM employee e " + + " JOIN department d ON d.id = e.departmentId AND d.location = e.region " + + " WHERE d.region = 'US' AND e.age > 30"); + planner.explain(); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerMonitorTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerMonitorTest.java index 66ce2411f4..9b1d307ebc 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerMonitorTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerMonitorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.planner; import static org.mockito.Mockito.doAnswer; @@ -18,109 +17,95 @@ import org.opensearch.sql.legacy.query.planner.resource.Stats; import org.opensearch.sql.legacy.query.planner.resource.Stats.MemStats; -/** - * Circuit breaker component test - */ +/** Circuit breaker component test */ @Ignore public class QueryPlannerMonitorTest extends QueryPlannerTest { - /** Configure back off strategy 1s, 1s and 1s - retry 4 times at most */ - private static final String TEST_SQL1 = - "SELECT /*! JOIN_BACK_OFF_RETRY_INTERVALS(1, 1, 1) */ " + - " /*! JOIN_CIRCUIT_BREAK_LIMIT("; - - private static final String TEST_SQL2 = - ") */ d.name, e.lastname FROM employee e " + - " JOIN department d ON d.id = e.departmentId " + - " WHERE d.region = 'US' AND e.age > 30"; - - private static final long[] PERCENT_USAGE_15 = freeAndTotalMem(85, 100); - private static final long[] PERCENT_USAGE_24 = freeAndTotalMem(76, 100); - private static final long[] PERCENT_USAGE_50 = freeAndTotalMem(50, 100); - - @Spy - private Stats stats = new Stats(client); - - @Test - public void reachedLimitAndRecoverAt1stAttempt() { - mockMemUsage(PERCENT_USAGE_15, PERCENT_USAGE_50, PERCENT_USAGE_24); - queryWithLimit(25); // TODO: assert if final result set is correct after recovery - } - - @Test - public void reachedLimitAndRecoverAt2ndAttempt() { - mockMemUsage(PERCENT_USAGE_15, PERCENT_USAGE_50, PERCENT_USAGE_50, PERCENT_USAGE_15); - queryWithLimit(25); - } - - @Test - public void reachedLimitAndRecoverAt3rdAttempt() { - mockMemUsage(PERCENT_USAGE_15, PERCENT_USAGE_50, PERCENT_USAGE_50, PERCENT_USAGE_50, PERCENT_USAGE_15); - queryWithLimit(25); - } - - @Test(expected = IllegalStateException.class) - public void reachedLimitAndFailFinally() { - mockMemUsage(PERCENT_USAGE_15, PERCENT_USAGE_50); - queryWithLimit(25); - } - - @Test(expected = IllegalStateException.class) - public void reachedLimitAndRejectNewRequest() { - mockMemUsage(PERCENT_USAGE_50); - queryWithLimit(25); - } - - @Test(expected = IllegalStateException.class) - public void timeOut() { - query( - "SELECT /*! JOIN_TIME_OUT(0) */ " + - " d.name FROM employee e JOIN department d ON d.id = e.departmentId", - employees( - employee(1, "Dell", "1") - ), - departments( - department(1, "1", "Dell") - ) - ); - } - - private void mockMemUsage(long[]... memUsages) { - doAnswer(new Answer() { - private int callCnt = -1; - - @Override - public MemStats answer(InvocationOnMock invocation) { + /** Configure back off strategy 1s, 1s and 1s - retry 4 times at most */ + private static final String TEST_SQL1 = + "SELECT /*! JOIN_BACK_OFF_RETRY_INTERVALS(1, 1, 1) */ " + " /*! JOIN_CIRCUIT_BREAK_LIMIT("; + + private static final String TEST_SQL2 = + ") */ d.name, e.lastname FROM employee e " + + " JOIN department d ON d.id = e.departmentId " + + " WHERE d.region = 'US' AND e.age > 30"; + + private static final long[] PERCENT_USAGE_15 = freeAndTotalMem(85, 100); + private static final long[] PERCENT_USAGE_24 = freeAndTotalMem(76, 100); + private static final long[] PERCENT_USAGE_50 = freeAndTotalMem(50, 100); + + @Spy private Stats stats = new Stats(client); + + @Test + public void reachedLimitAndRecoverAt1stAttempt() { + mockMemUsage(PERCENT_USAGE_15, PERCENT_USAGE_50, PERCENT_USAGE_24); + queryWithLimit(25); // TODO: assert if final result set is correct after recovery + } + + @Test + public void reachedLimitAndRecoverAt2ndAttempt() { + mockMemUsage(PERCENT_USAGE_15, PERCENT_USAGE_50, PERCENT_USAGE_50, PERCENT_USAGE_15); + queryWithLimit(25); + } + + @Test + public void reachedLimitAndRecoverAt3rdAttempt() { + mockMemUsage( + PERCENT_USAGE_15, PERCENT_USAGE_50, PERCENT_USAGE_50, PERCENT_USAGE_50, PERCENT_USAGE_15); + queryWithLimit(25); + } + + @Test(expected = IllegalStateException.class) + public void reachedLimitAndFailFinally() { + mockMemUsage(PERCENT_USAGE_15, PERCENT_USAGE_50); + queryWithLimit(25); + } + + @Test(expected = IllegalStateException.class) + public void reachedLimitAndRejectNewRequest() { + mockMemUsage(PERCENT_USAGE_50); + queryWithLimit(25); + } + + @Test(expected = IllegalStateException.class) + public void timeOut() { + query( + "SELECT /*! JOIN_TIME_OUT(0) */ " + + " d.name FROM employee e JOIN department d ON d.id = e.departmentId", + employees(employee(1, "Dell", "1")), + departments(department(1, "1", "Dell"))); + } + + private void mockMemUsage(long[]... memUsages) { + doAnswer( + new Answer() { + private int callCnt = -1; + + @Override + public MemStats answer(InvocationOnMock invocation) { callCnt = Math.min(callCnt + 1, memUsages.length - 1); - return new MemStats( - memUsages[callCnt][0], memUsages[callCnt][1] - ); - } - }).when(stats).collectMemStats(); - } - - private static long[] freeAndTotalMem(long free, long total) { - return new long[]{ free, total }; - } - - private SearchHits queryWithLimit(int limit) { - return query( - TEST_SQL1 + limit + TEST_SQL2, - employees( - employee(1, "Dell", "1"), - employee(2, "Hank", "1") - ), - departments( - department(1, "1", "Dell") - ) - ); - } - - @Override - protected QueryPlanner plan(String sql) { - QueryPlanner planner = super.plan(sql); - planner.setStats(stats); - return planner; - } - + return new MemStats(memUsages[callCnt][0], memUsages[callCnt][1]); + } + }) + .when(stats) + .collectMemStats(); + } + + private static long[] freeAndTotalMem(long free, long total) { + return new long[] {free, total}; + } + + private SearchHits queryWithLimit(int limit) { + return query( + TEST_SQL1 + limit + TEST_SQL2, + employees(employee(1, "Dell", "1"), employee(2, "Hank", "1")), + departments(department(1, "1", "Dell"))); + } + + @Override + protected QueryPlanner plan(String sql) { + QueryPlanner planner = super.plan(sql); + planner.setStats(stats); + return planner; + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerTest.java index 8401733529..4cda101ae4 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/QueryPlannerTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.planner; import static java.util.Collections.emptyList; @@ -19,7 +18,6 @@ import com.alibaba.druid.sql.parser.ParserException; import com.alibaba.druid.sql.parser.SQLExprParser; import com.alibaba.druid.sql.parser.Token; -import com.google.common.collect.ImmutableList; import java.util.Arrays; import java.util.List; import org.apache.lucene.search.TotalHits; @@ -39,9 +37,9 @@ import org.opensearch.client.Client; import org.opensearch.cluster.ClusterName; import org.opensearch.common.action.ActionFuture; -import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.unit.TimeValue; +import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.search.SearchHit; import org.opensearch.search.SearchHits; import org.opensearch.sql.legacy.domain.JoinSelect; @@ -59,246 +57,240 @@ import org.opensearch.sql.legacy.request.SqlRequest; import org.opensearch.sql.opensearch.setting.OpenSearchSettings; -/** - * Test base class for all query planner tests. - */ +/** Test base class for all query planner tests. */ @Ignore public abstract class QueryPlannerTest { - @Mock - protected Client client; + @Mock protected Client client; - @Mock - private SearchResponse response1; - private static final String SCROLL_ID1 = "1"; + @Mock private SearchResponse response1; + private static final String SCROLL_ID1 = "1"; - @Mock - private SearchResponse response2; - private static final String SCROLL_ID2 = "2"; + @Mock private SearchResponse response2; + private static final String SCROLL_ID2 = "2"; - @Mock - private ClusterSettings clusterSettings; + @Mock private ClusterSettings clusterSettings; - /* - @BeforeClass - public static void initLogger() { - ConfigurationBuilder builder = newConfigurationBuilder(); - AppenderComponentBuilder appender = builder.newAppender("stdout", "Console"); + /* + @BeforeClass + public static void initLogger() { + ConfigurationBuilder builder = newConfigurationBuilder(); + AppenderComponentBuilder appender = builder.newAppender("stdout", "Console"); - LayoutComponentBuilder standard = builder.newLayout("PatternLayout"); - standard.addAttribute("pattern", "%d [%t] %-5level: %msg%n%throwable"); - appender.add(standard); + LayoutComponentBuilder standard = builder.newLayout("PatternLayout"); + standard.addAttribute("pattern", "%d [%t] %-5level: %msg%n%throwable"); + appender.add(standard); - RootLoggerComponentBuilder rootLogger = builder.newRootLogger(Level.ERROR); - rootLogger.add(builder.newAppenderRef("stdout")); + RootLoggerComponentBuilder rootLogger = builder.newRootLogger(Level.ERROR); + rootLogger.add(builder.newAppenderRef("stdout")); - LoggerComponentBuilder logger = builder.newLogger("org.nlpcn.es4sql.query.planner", Level.TRACE); - logger.add(builder.newAppenderRef("stdout")); - //logger.addAttribute("additivity", false); + LoggerComponentBuilder logger = builder.newLogger("org.nlpcn.es4sql.query.planner", Level.TRACE); + logger.add(builder.newAppenderRef("stdout")); + //logger.addAttribute("additivity", false); - builder.add(logger); + builder.add(logger); - Configurator.initialize(builder.build()); - } - */ + Configurator.initialize(builder.build()); + } + */ - @Before - public void init() { - MockitoAnnotations.initMocks(this); - when(clusterSettings.get(ClusterName.CLUSTER_NAME_SETTING)).thenReturn(ClusterName.DEFAULT); - OpenSearchSettings settings = spy(new OpenSearchSettings(clusterSettings)); + @Before + public void init() { + MockitoAnnotations.initMocks(this); + when(clusterSettings.get(ClusterName.CLUSTER_NAME_SETTING)).thenReturn(ClusterName.DEFAULT); + OpenSearchSettings settings = spy(new OpenSearchSettings(clusterSettings)); - // Force return empty list to avoid ClusterSettings be invoked which is a final class and hard to mock. - // In this case, default value in Setting will be returned all the time. - doReturn(emptyList()).when(settings).getSettings(); - LocalClusterState.state().setPluginSettings(settings); + // Force return empty list to avoid ClusterSettings be invoked which is a final class and hard + // to mock. + // In this case, default value in Setting will be returned all the time. + doReturn(emptyList()).when(settings).getSettings(); + LocalClusterState.state().setPluginSettings(settings); - ActionFuture mockFuture = mock(ActionFuture.class); - when(client.execute(any(), any())).thenReturn(mockFuture); + ActionFuture mockFuture = mock(ActionFuture.class); + when(client.execute(any(), any())).thenReturn(mockFuture); - // Differentiate response for Scroll-1/2 by call count and scroll ID. - when(mockFuture.actionGet()).thenAnswer(new Answer() { - private int callCnt; + // Differentiate response for Scroll-1/2 by call count and scroll ID. + when(mockFuture.actionGet()) + .thenAnswer( + new Answer() { + private int callCnt; - @Override - public SearchResponse answer(InvocationOnMock invocation) { + @Override + public SearchResponse answer(InvocationOnMock invocation) { /* * This works based on assumption that first call comes from Scroll-1, all the following calls come from Scroll-2. * Because Scroll-1 only open scroll once and must be ahead of Scroll-2 which opens multiple times later. */ return callCnt++ == 0 ? response1 : response2; - } - }); - - doReturn(SCROLL_ID1).when(response1).getScrollId(); - doReturn(SCROLL_ID2).when(response2).getScrollId(); - - // Avoid NPE in empty SearchResponse - doReturn(0).when(response1).getFailedShards(); - doReturn(0).when(response2).getFailedShards(); - doReturn(false).when(response1).isTimedOut(); - doReturn(false).when(response2).isTimedOut(); - - returnMockResponse(SCROLL_ID1, response1); - returnMockResponse(SCROLL_ID2, response2); - - Metrics.getInstance().registerDefaultMetrics(); - } - - private void returnMockResponse(String scrollId, SearchResponse response) { - SearchScrollRequestBuilder mockReqBuilder = mock(SearchScrollRequestBuilder.class); - when(client.prepareSearchScroll(scrollId)).thenReturn(mockReqBuilder); - when(mockReqBuilder.setScroll(any(TimeValue.class))).thenReturn(mockReqBuilder); - when(mockReqBuilder.get()).thenReturn(response); - } - - protected SearchHits query(String sql, MockSearchHits mockHits1, MockSearchHits mockHits2) { - doAnswer(mockHits1).when(response1).getHits(); - doAnswer(mockHits2).when(response2).getHits(); - - try (MockedStatic backOffRetryStrategyMocked = - Mockito.mockStatic(BackOffRetryStrategy.class)) { - backOffRetryStrategyMocked.when(BackOffRetryStrategy::isHealthy).thenReturn(true); + } + }); - ClearScrollRequestBuilder mockReqBuilder = mock(ClearScrollRequestBuilder.class); - when(client.prepareClearScroll()).thenReturn(mockReqBuilder); - when(mockReqBuilder.addScrollId(any())).thenReturn(mockReqBuilder); - when(mockReqBuilder.get()).thenAnswer(new Answer() { + doReturn(SCROLL_ID1).when(response1).getScrollId(); + doReturn(SCROLL_ID2).when(response2).getScrollId(); + + // Avoid NPE in empty SearchResponse + doReturn(0).when(response1).getFailedShards(); + doReturn(0).when(response2).getFailedShards(); + doReturn(false).when(response1).isTimedOut(); + doReturn(false).when(response2).isTimedOut(); + + returnMockResponse(SCROLL_ID1, response1); + returnMockResponse(SCROLL_ID2, response2); + + Metrics.getInstance().registerDefaultMetrics(); + } + + private void returnMockResponse(String scrollId, SearchResponse response) { + SearchScrollRequestBuilder mockReqBuilder = mock(SearchScrollRequestBuilder.class); + when(client.prepareSearchScroll(scrollId)).thenReturn(mockReqBuilder); + when(mockReqBuilder.setScroll(any(TimeValue.class))).thenReturn(mockReqBuilder); + when(mockReqBuilder.get()).thenReturn(response); + } + + protected SearchHits query(String sql, MockSearchHits mockHits1, MockSearchHits mockHits2) { + doAnswer(mockHits1).when(response1).getHits(); + doAnswer(mockHits2).when(response2).getHits(); + + try (MockedStatic backOffRetryStrategyMocked = + Mockito.mockStatic(BackOffRetryStrategy.class)) { + backOffRetryStrategyMocked.when(BackOffRetryStrategy::isHealthy).thenReturn(true); + + ClearScrollRequestBuilder mockReqBuilder = mock(ClearScrollRequestBuilder.class); + when(client.prepareClearScroll()).thenReturn(mockReqBuilder); + when(mockReqBuilder.addScrollId(any())).thenReturn(mockReqBuilder); + when(mockReqBuilder.get()) + .thenAnswer( + new Answer() { @Override public ClearScrollResponse answer(InvocationOnMock invocation) throws Throwable { - mockHits2.reset(); - return new ClearScrollResponse(true, 0); + mockHits2.reset(); + return new ClearScrollResponse(true, 0); } - }); + }); - List hits = plan(sql).execute(); - return new SearchHits(hits.toArray(new SearchHit[0]), new TotalHits(hits.size(), Relation.EQUAL_TO), 0); - } + List hits = plan(sql).execute(); + return new SearchHits( + hits.toArray(new SearchHit[0]), new TotalHits(hits.size(), Relation.EQUAL_TO), 0); } + } - protected QueryPlanner plan(String sql) { - SqlElasticRequestBuilder request = createRequestBuilder(sql); - if (request instanceof HashJoinQueryPlanRequestBuilder) { - return ((HashJoinQueryPlanRequestBuilder) request).plan(); - } - throw new IllegalStateException("Not a JOIN query: " + sql); + protected QueryPlanner plan(String sql) { + SqlElasticRequestBuilder request = createRequestBuilder(sql); + if (request instanceof HashJoinQueryPlanRequestBuilder) { + return ((HashJoinQueryPlanRequestBuilder) request).plan(); } - - protected SqlElasticRequestBuilder createRequestBuilder(String sql) { - try { - SQLQueryExpr sqlExpr = (SQLQueryExpr) toSqlExpr(sql); - JoinSelect joinSelect = new SqlParser().parseJoinSelect(sqlExpr); // Ignore handleSubquery() - QueryAction queryAction = OpenSearchJoinQueryActionFactory - .createJoinAction(client, joinSelect); - queryAction.setSqlRequest(new SqlRequest(sql, null)); - return queryAction.explain(); - } - catch (SqlParseException e) { - throw new IllegalStateException("Invalid query: " + sql, e); - } + throw new IllegalStateException("Not a JOIN query: " + sql); + } + + protected SqlElasticRequestBuilder createRequestBuilder(String sql) { + try { + SQLQueryExpr sqlExpr = (SQLQueryExpr) toSqlExpr(sql); + JoinSelect joinSelect = new SqlParser().parseJoinSelect(sqlExpr); // Ignore handleSubquery() + QueryAction queryAction = + OpenSearchJoinQueryActionFactory.createJoinAction(client, joinSelect); + queryAction.setSqlRequest(new SqlRequest(sql, null)); + return queryAction.explain(); + } catch (SqlParseException e) { + throw new IllegalStateException("Invalid query: " + sql, e); } + } - private SQLExpr toSqlExpr(String sql) { - SQLExprParser parser = new ElasticSqlExprParser(sql); - SQLExpr expr = parser.expr(); + private SQLExpr toSqlExpr(String sql) { + SQLExprParser parser = new ElasticSqlExprParser(sql); + SQLExpr expr = parser.expr(); - if (parser.getLexer().token() != Token.EOF) { - throw new ParserException("illegal sql expr : " + sql); - } - return expr; + if (parser.getLexer().token() != Token.EOF) { + throw new ParserException("illegal sql expr : " + sql); } + return expr; + } - /** - * Mock SearchHits and slice and return in batch. - */ - protected static class MockSearchHits implements Answer { - - private final SearchHit[] allHits; - - private final int batchSize; //TODO: should be inferred from mock object dynamically - - private int callCnt; - - MockSearchHits(SearchHit[] allHits, int batchSize) { - this.allHits = allHits; - this.batchSize = batchSize; - } - - @Override - public SearchHits answer(InvocationOnMock invocation) { - SearchHit[] curBatch; - if (isNoMoreBatch()) { - curBatch = new SearchHit[0]; - } else { - curBatch = currentBatch(); - callCnt++; - } - return new SearchHits(curBatch, new TotalHits(allHits.length, Relation.EQUAL_TO), 0); - } - - private boolean isNoMoreBatch() { - return callCnt > allHits.length / batchSize; - } - - private SearchHit[] currentBatch() { - return Arrays.copyOfRange(allHits, startIndex(), endIndex()); - } - - private int startIndex() { - return callCnt * batchSize; - } - - private int endIndex() { - return Math.min(startIndex() + batchSize, allHits.length); - } - - private void reset() { - callCnt = 0; - } - } + /** Mock SearchHits and slice and return in batch. */ + protected static class MockSearchHits implements Answer { - protected MockSearchHits employees(SearchHit... mockHits) { - return employees(5, mockHits); + private final SearchHit[] allHits; + + private final int batchSize; // TODO: should be inferred from mock object dynamically + + private int callCnt; + + MockSearchHits(SearchHit[] allHits, int batchSize) { + this.allHits = allHits; + this.batchSize = batchSize; } - protected MockSearchHits employees(int pageSize, SearchHit... mockHits) { - return new MockSearchHits(mockHits, pageSize); + @Override + public SearchHits answer(InvocationOnMock invocation) { + SearchHit[] curBatch; + if (isNoMoreBatch()) { + curBatch = new SearchHit[0]; + } else { + curBatch = currentBatch(); + callCnt++; + } + return new SearchHits(curBatch, new TotalHits(allHits.length, Relation.EQUAL_TO), 0); } - protected MockSearchHits departments(SearchHit... mockHits) { - return departments(5, mockHits); + private boolean isNoMoreBatch() { + return callCnt > allHits.length / batchSize; } - protected MockSearchHits departments(int pageSize, SearchHit... mockHits) { - return new MockSearchHits(mockHits, pageSize); + private SearchHit[] currentBatch() { + return Arrays.copyOfRange(allHits, startIndex(), endIndex()); } - protected SearchHit employee(int docId, String lastname, String departmentId) { - SearchHit hit = new SearchHit(docId); - if (lastname == null) { - hit.sourceRef(new BytesArray("{\"departmentId\":\"" + departmentId + "\"}")); - } - else if (departmentId == null) { - hit.sourceRef(new BytesArray("{\"lastname\":\"" + lastname + "\"}")); - } - else { - hit.sourceRef(new BytesArray("{\"lastname\":\"" + lastname + "\",\"departmentId\":\"" + departmentId + "\"}")); - } - return hit; + private int startIndex() { + return callCnt * batchSize; } - protected SearchHit department(int docId, String id, String name) { - SearchHit hit = new SearchHit(docId); - if (id == null) { - hit.sourceRef(new BytesArray("{\"name\":\"" + name + "\"}")); - } - else if (name == null) { - hit.sourceRef(new BytesArray("{\"id\":\"" + id + "\"}")); - } - else { - hit.sourceRef(new BytesArray("{\"id\":\"" + id + "\",\"name\":\"" + name + "\"}")); - } - return hit; + private int endIndex() { + return Math.min(startIndex() + batchSize, allHits.length); } + private void reset() { + callCnt = 0; + } + } + + protected MockSearchHits employees(SearchHit... mockHits) { + return employees(5, mockHits); + } + + protected MockSearchHits employees(int pageSize, SearchHit... mockHits) { + return new MockSearchHits(mockHits, pageSize); + } + + protected MockSearchHits departments(SearchHit... mockHits) { + return departments(5, mockHits); + } + + protected MockSearchHits departments(int pageSize, SearchHit... mockHits) { + return new MockSearchHits(mockHits, pageSize); + } + + protected SearchHit employee(int docId, String lastname, String departmentId) { + SearchHit hit = new SearchHit(docId); + if (lastname == null) { + hit.sourceRef(new BytesArray("{\"departmentId\":\"" + departmentId + "\"}")); + } else if (departmentId == null) { + hit.sourceRef(new BytesArray("{\"lastname\":\"" + lastname + "\"}")); + } else { + hit.sourceRef( + new BytesArray( + "{\"lastname\":\"" + lastname + "\",\"departmentId\":\"" + departmentId + "\"}")); + } + return hit; + } + + protected SearchHit department(int docId, String id, String name) { + SearchHit hit = new SearchHit(docId); + if (id == null) { + hit.sourceRef(new BytesArray("{\"name\":\"" + name + "\"}")); + } else if (name == null) { + hit.sourceRef(new BytesArray("{\"id\":\"" + id + "\"}")); + } else { + hit.sourceRef(new BytesArray("{\"id\":\"" + id + "\",\"name\":\"" + name + "\"}")); + } + return hit; + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/converter/SQLAggregationParserTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/converter/SQLAggregationParserTest.java index bdf3c64fd8..855ed9e346 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/converter/SQLAggregationParserTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/converter/SQLAggregationParserTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.planner.converter; import static org.hamcrest.MatcherAssert.assertThat; @@ -36,321 +35,367 @@ @RunWith(MockitoJUnitRunner.class) public class SQLAggregationParserTest { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); - - @Test - public void parseAggWithoutExpressionShouldPass() { - String sql = "SELECT dayOfWeek, max(FlightDelayMin), MIN(FlightDelayMin) as min " + - "FROM opensearch_dashboards_sample_data_flights " + - "GROUP BY dayOfWeek"; - SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); - parser.parse(mYSqlSelectQueryBlock(sql)); - List sqlSelectItems = parser.selectItemList(); - List columnNodes = parser.getColumnNodes(); - - assertThat(sqlSelectItems, containsInAnyOrder(group("dayOfWeek", "dayOfWeek"), - agg("MAX", "FlightDelayMin", "MAX_0"), - agg("MIN", "FlightDelayMin", "min"))); - - assertThat(columnNodes, containsInAnyOrder(columnNode("dayOfWeek", null, ExpressionFactory.ref("dayOfWeek")), - columnNode("MAX(FlightDelayMin)", null, ExpressionFactory - .ref("MAX_0")), - columnNode("min", "min", ExpressionFactory.ref("min")))); - } - - @Test - public void parseAggWithFunctioniWithoutExpressionShouldPass() { - String sql = "SELECT dayOfWeek, max(FlightDelayMin), MIN(FlightDelayMin) as min " + - "FROM opensearch_dashboards_sample_data_flights " + - "GROUP BY dayOfWeek"; - SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); - parser.parse(mYSqlSelectQueryBlock(sql)); - List sqlSelectItems = parser.selectItemList(); - List columnNodes = parser.getColumnNodes(); - - assertThat(sqlSelectItems, containsInAnyOrder(group("dayOfWeek", "dayOfWeek"), - agg("MAX", "FlightDelayMin", "MAX_0"), - agg("MIN", "FlightDelayMin", "min"))); - - assertThat(columnNodes, containsInAnyOrder(columnNode("dayOfWeek", null, ExpressionFactory.ref("dayOfWeek")), - columnNode("MAX(FlightDelayMin)", null, ExpressionFactory - .ref("MAX_0")), - columnNode("min", "min", ExpressionFactory.ref("min")))); - } - - @Test - public void parseAggWithExpressionShouldPass() { - String sql = "SELECT dayOfWeek, max(FlightDelayMin) + MIN(FlightDelayMin) as sub " + - "FROM opensearch_dashboards_sample_data_flights " + - "GROUP BY dayOfWeek"; - SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); - parser.parse(mYSqlSelectQueryBlock(sql)); - List sqlSelectItems = parser.selectItemList(); - List columnNodes = parser.getColumnNodes(); - - assertThat(sqlSelectItems, containsInAnyOrder(group("dayOfWeek", "dayOfWeek"), - agg("MAX", "FlightDelayMin", "MAX_0"), - agg("MIN", "FlightDelayMin", "MIN_1"))); - - assertThat(columnNodes, containsInAnyOrder(columnNode("dayOfWeek", null, ExpressionFactory.ref("dayOfWeek")), - columnNode("sub", "sub", add(ExpressionFactory.ref("MAX_0"), ExpressionFactory - .ref("MIN_1"))))); - } - - @Test - public void parseWithRawSelectFuncnameShouldPass() { - String sql = "SELECT LOG(FlightDelayMin) " + - "FROM opensearch_dashboards_sample_data_flights " + - "GROUP BY log(FlightDelayMin)"; - SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); - parser.parse(mYSqlSelectQueryBlock(sql)); - List sqlSelectItems = parser.selectItemList(); - List columnNodes = parser.getColumnNodes(); - - assertThat(sqlSelectItems, containsInAnyOrder(group("log(FlightDelayMin)", "log(FlightDelayMin)"))); - - assertThat( - columnNodes, - containsInAnyOrder( - columnNode( - "LOG(FlightDelayMin)", - null, - ExpressionFactory.ref("log(FlightDelayMin)") - ) - ) - ); - } - - @Test - public void functionOverFiledShouldPass() { - String sql = "SELECT dayOfWeek, max(FlightDelayMin) + MIN(FlightDelayMin) as sub " + - "FROM opensearch_dashboards_sample_data_flights " + - "GROUP BY dayOfWeek"; - SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); - parser.parse(mYSqlSelectQueryBlock(sql)); - List sqlSelectItems = parser.selectItemList(); - List columnNodes = parser.getColumnNodes(); - - assertThat(sqlSelectItems, containsInAnyOrder(group("dayOfWeek", "dayOfWeek"), - agg("MAX", "FlightDelayMin", "MAX_0"), - agg("MIN", "FlightDelayMin", "MIN_1"))); - - assertThat(columnNodes, containsInAnyOrder(columnNode("dayOfWeek", null, ExpressionFactory.ref("dayOfWeek")), - columnNode("sub", "sub", add(ExpressionFactory.ref("MAX_0"), ExpressionFactory - .ref("MIN_1"))))); - } - - @Test - public void parseCompoundAggWithExpressionShouldPass() { - String sql = "SELECT ASCII(dayOfWeek), log(max(FlightDelayMin) + MIN(FlightDelayMin)) as log " + - "FROM opensearch_dashboards_sample_data_flights " + - "GROUP BY ASCII(dayOfWeek)"; - SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); - parser.parse(mYSqlSelectQueryBlock(sql)); - List sqlSelectItems = parser.selectItemList(); - List columnNodes = parser.getColumnNodes(); - - assertThat(sqlSelectItems, containsInAnyOrder(group("ASCII(dayOfWeek)", "ASCII(dayOfWeek)"), - agg("MAX", "FlightDelayMin", "MAX_0"), - agg("MIN", "FlightDelayMin", "MIN_1"))); - - assertThat(columnNodes, containsInAnyOrder(columnNode("ASCII(dayOfWeek)", null, ExpressionFactory - .ref("ASCII(dayOfWeek)")), - columnNode("log", "log", log(add(ExpressionFactory.ref("MAX_0"), ExpressionFactory - .ref("MIN_1")))))); - } - - @Test - public void parseSingleFunctionOverAggShouldPass() { - String sql = "SELECT log(max(age)) FROM accounts"; - SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); - parser.parse(mYSqlSelectQueryBlock(sql)); - List sqlSelectItems = parser.selectItemList(); - List columnNodes = parser.getColumnNodes(); - - assertThat(sqlSelectItems, containsInAnyOrder(agg("max", "age", "max_0"))); - assertThat(columnNodes, containsInAnyOrder(columnNode("log(max(age))", null, log( - ExpressionFactory.ref("max_0"))))); - } - - @Test - public void parseFunctionGroupColumnOverShouldPass() { - String sql = "SELECT CAST(balance AS FLOAT) FROM accounts GROUP BY balance"; - SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); - parser.parse(mYSqlSelectQueryBlock(sql)); - List sqlSelectItems = parser.selectItemList(); - List columnNodes = parser.getColumnNodes(); - - assertThat(sqlSelectItems, containsInAnyOrder(group("balance", "balance"))); - assertThat(columnNodes, containsInAnyOrder(columnNode("CAST(balance AS FLOAT)", null, cast( - ExpressionFactory.ref("balance"))))); - } - - @Test - public void withoutAggregationShouldPass() { - String sql = "SELECT age, gender FROM accounts GROUP BY age, gender"; - SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); - parser.parse(mYSqlSelectQueryBlock(sql)); - List sqlSelectItems = parser.selectItemList(); - List columnNodes = parser.getColumnNodes(); - - assertThat(sqlSelectItems, containsInAnyOrder( - group("age", "age"), - group("gender", "gender"))); - assertThat(columnNodes, containsInAnyOrder( - columnNode("age", null, ExpressionFactory.ref("age")), - columnNode("gender", null, ExpressionFactory.ref("gender")))); - } - - @Test - public void groupKeyInSelectWithFunctionShouldPass() { - String sql = "SELECT log(age), max(balance) FROM accounts GROUP BY age"; - SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); - parser.parse(mYSqlSelectQueryBlock(sql)); - List sqlSelectItems = parser.selectItemList(); - List columnNodes = parser.getColumnNodes(); - - assertThat(sqlSelectItems, containsInAnyOrder( - group("age", "age"), - agg("max", "balance", "max_0"))); - assertThat(columnNodes, containsInAnyOrder( - columnNode("log(age)", null, log(ExpressionFactory.ref("age"))), - columnNode("max(balance)", null, ExpressionFactory.ref("max_0")))); - } - - @Test - public void theDotInFieldNameShouldBeReplaceWithSharp() { - String sql = "SELECT name.lastname, max(balance) FROM accounts GROUP BY name.lastname"; - SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); - parser.parse(mYSqlSelectQueryBlock(sql)); - List sqlSelectItems = parser.selectItemList(); - List columnNodes = parser.getColumnNodes(); - - assertThat(sqlSelectItems, containsInAnyOrder( - group("name.lastname", "name#lastname"), - agg("max", "balance", "max_0"))); - assertThat(columnNodes, containsInAnyOrder( - columnNode("name.lastname", null, ExpressionFactory.ref("name#lastname")), - columnNode("max(balance)", null, ExpressionFactory.ref("max_0")))); - } - - @Test - public void noGroupKeyInSelectShouldPass() { - String sql = "SELECT AVG(age) FROM t GROUP BY age"; - SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); - parser.parse(mYSqlSelectQueryBlock(sql)); - List sqlSelectItems = parser.selectItemList(); - List columnNodes = parser.getColumnNodes(); - - assertThat(sqlSelectItems, containsInAnyOrder( - agg("avg", "age", "avg_0"))); - assertThat(columnNodes, containsInAnyOrder( - columnNode("avg(age)", null, ExpressionFactory.ref("avg_0")))); - } - - @Test - public void aggWithDistinctShouldPass() { - String sql = "SELECT count(distinct gender) FROM t GROUP BY age"; - SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); - parser.parse(mYSqlSelectQueryBlock(sql)); - List sqlSelectItems = parser.selectItemList(); - List columnNodes = parser.getColumnNodes(); - - assertThat(sqlSelectItems, containsInAnyOrder( - agg("count", "gender", "count_0"))); - assertThat(columnNodes, containsInAnyOrder( - columnNode("count(distinct gender)", null, ExpressionFactory.ref("count_0")))); - } - - /** - * TermQueryExplainIT.testNestedSingleGroupBy - */ - @Test - public void aggregationWithNestedShouldThrowException() { - exceptionRule.expect(RuntimeException.class); - exceptionRule.expectMessage("unsupported operator: nested"); - - String sql = "SELECT nested(projects.name, 'projects'),id " - + "FROM t " - + "GROUP BY nested(projects.name.keyword, 'projects')"; - SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); - parser.parse(mYSqlSelectQueryBlock(sql)); - } - - private MySqlSelectQueryBlock mYSqlSelectQueryBlock(String sql) { - String dbType = JdbcConstants.MYSQL; - SQLQueryExpr sqlQueryExpr = (SQLQueryExpr) SQLUtils.toSQLExpr(sql, dbType); - return ((MySqlSelectQueryBlock) sqlQueryExpr.getSubQuery().getQuery()); - } - - private TypeSafeMatcher columnNode(String name, String alias, Expression expr) { - return new TypeSafeMatcher() { - @Override - public void describeTo(Description description) { - description.appendText(String.format("(name=%s,alias=%s,expression=%s)", name, alias, expr)); - } - - @Override - protected boolean matchesSafely(ColumnNode item) { - if (name == null) { - return false; - } - if (alias == null && item.getAlias() != null) { - return false; - } - - return name.equalsIgnoreCase(item.getName()) && - ((alias == null && item.getAlias() == null) || alias.equals(item.getAlias())) && - expr.toString().equalsIgnoreCase(item.getExpr().toString()); - } - }; - } - - private TypeSafeMatcher agg(String methodName, String name, String alias) { - return new TypeSafeMatcher() { - @Override - public void describeTo(Description description) { - description.appendText(String.format("(methodName=%s, name=%s, alias=%s)", methodName, name, alias)); - } - - @Override - protected boolean matchesSafely(SQLSelectItem item) { - if (item.getExpr() instanceof SQLAggregateExpr) { - return ((SQLAggregateExpr) item.getExpr()).getMethodName().equalsIgnoreCase(methodName) && - ((SQLAggregateExpr) item.getExpr()).getArguments() - .get(0) - .toString() - .equalsIgnoreCase(name) && - ((item.getAlias() == null && alias == null) || item.getAlias().equalsIgnoreCase(alias)); - } else { - return false; - } - } - }; - } - - private TypeSafeMatcher group(String name, String alias) { - return new TypeSafeMatcher() { - @Override - public void describeTo(Description description) { - description.appendText(String.format("(name=%s, alias=%s)", name, alias)); - } - - @Override - protected boolean matchesSafely(SQLSelectItem item) { - boolean b = item.getExpr().toString().equalsIgnoreCase(name) && - ((item.getAlias() == null && alias == null) || item.getAlias().equalsIgnoreCase(alias)); - return b; - } - }; - } - - private Expression add(Expression... expressions) { - return of(ADD, Arrays.asList(expressions)); - } - - private Expression log(Expression... expressions) { - return of(LOG, Arrays.asList(expressions)); - } + @Rule public ExpectedException exceptionRule = ExpectedException.none(); + + @Test + public void parseAggWithoutExpressionShouldPass() { + String sql = + "SELECT dayOfWeek, max(FlightDelayMin), MIN(FlightDelayMin) as min " + + "FROM opensearch_dashboards_sample_data_flights " + + "GROUP BY dayOfWeek"; + SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); + parser.parse(mYSqlSelectQueryBlock(sql)); + List sqlSelectItems = parser.selectItemList(); + List columnNodes = parser.getColumnNodes(); + + assertThat( + sqlSelectItems, + containsInAnyOrder( + group("dayOfWeek", "dayOfWeek"), + agg("MAX", "FlightDelayMin", "MAX_0"), + agg("MIN", "FlightDelayMin", "min"))); + + assertThat( + columnNodes, + containsInAnyOrder( + columnNode("dayOfWeek", null, ExpressionFactory.ref("dayOfWeek")), + columnNode("MAX(FlightDelayMin)", null, ExpressionFactory.ref("MAX_0")), + columnNode("min", "min", ExpressionFactory.ref("min")))); + } + + @Test + public void parseAggWithFunctioniWithoutExpressionShouldPass() { + String sql = + "SELECT dayOfWeek, max(FlightDelayMin), MIN(FlightDelayMin) as min " + + "FROM opensearch_dashboards_sample_data_flights " + + "GROUP BY dayOfWeek"; + SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); + parser.parse(mYSqlSelectQueryBlock(sql)); + List sqlSelectItems = parser.selectItemList(); + List columnNodes = parser.getColumnNodes(); + + assertThat( + sqlSelectItems, + containsInAnyOrder( + group("dayOfWeek", "dayOfWeek"), + agg("MAX", "FlightDelayMin", "MAX_0"), + agg("MIN", "FlightDelayMin", "min"))); + + assertThat( + columnNodes, + containsInAnyOrder( + columnNode("dayOfWeek", null, ExpressionFactory.ref("dayOfWeek")), + columnNode("MAX(FlightDelayMin)", null, ExpressionFactory.ref("MAX_0")), + columnNode("min", "min", ExpressionFactory.ref("min")))); + } + + @Test + public void parseAggWithExpressionShouldPass() { + String sql = + "SELECT dayOfWeek, max(FlightDelayMin) + MIN(FlightDelayMin) as sub " + + "FROM opensearch_dashboards_sample_data_flights " + + "GROUP BY dayOfWeek"; + SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); + parser.parse(mYSqlSelectQueryBlock(sql)); + List sqlSelectItems = parser.selectItemList(); + List columnNodes = parser.getColumnNodes(); + + assertThat( + sqlSelectItems, + containsInAnyOrder( + group("dayOfWeek", "dayOfWeek"), + agg("MAX", "FlightDelayMin", "MAX_0"), + agg("MIN", "FlightDelayMin", "MIN_1"))); + + assertThat( + columnNodes, + containsInAnyOrder( + columnNode("dayOfWeek", null, ExpressionFactory.ref("dayOfWeek")), + columnNode( + "sub", + "sub", + add(ExpressionFactory.ref("MAX_0"), ExpressionFactory.ref("MIN_1"))))); + } + + @Test + public void parseWithRawSelectFuncnameShouldPass() { + String sql = + "SELECT LOG(FlightDelayMin) " + + "FROM opensearch_dashboards_sample_data_flights " + + "GROUP BY log(FlightDelayMin)"; + SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); + parser.parse(mYSqlSelectQueryBlock(sql)); + List sqlSelectItems = parser.selectItemList(); + List columnNodes = parser.getColumnNodes(); + + assertThat( + sqlSelectItems, containsInAnyOrder(group("log(FlightDelayMin)", "log(FlightDelayMin)"))); + + assertThat( + columnNodes, + containsInAnyOrder( + columnNode("LOG(FlightDelayMin)", null, ExpressionFactory.ref("log(FlightDelayMin)")))); + } + + @Test + public void functionOverFiledShouldPass() { + String sql = + "SELECT dayOfWeek, max(FlightDelayMin) + MIN(FlightDelayMin) as sub " + + "FROM opensearch_dashboards_sample_data_flights " + + "GROUP BY dayOfWeek"; + SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); + parser.parse(mYSqlSelectQueryBlock(sql)); + List sqlSelectItems = parser.selectItemList(); + List columnNodes = parser.getColumnNodes(); + + assertThat( + sqlSelectItems, + containsInAnyOrder( + group("dayOfWeek", "dayOfWeek"), + agg("MAX", "FlightDelayMin", "MAX_0"), + agg("MIN", "FlightDelayMin", "MIN_1"))); + + assertThat( + columnNodes, + containsInAnyOrder( + columnNode("dayOfWeek", null, ExpressionFactory.ref("dayOfWeek")), + columnNode( + "sub", + "sub", + add(ExpressionFactory.ref("MAX_0"), ExpressionFactory.ref("MIN_1"))))); + } + + @Test + public void parseCompoundAggWithExpressionShouldPass() { + String sql = + "SELECT ASCII(dayOfWeek), log(max(FlightDelayMin) + MIN(FlightDelayMin)) as log " + + "FROM opensearch_dashboards_sample_data_flights " + + "GROUP BY ASCII(dayOfWeek)"; + SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); + parser.parse(mYSqlSelectQueryBlock(sql)); + List sqlSelectItems = parser.selectItemList(); + List columnNodes = parser.getColumnNodes(); + + assertThat( + sqlSelectItems, + containsInAnyOrder( + group("ASCII(dayOfWeek)", "ASCII(dayOfWeek)"), + agg("MAX", "FlightDelayMin", "MAX_0"), + agg("MIN", "FlightDelayMin", "MIN_1"))); + + assertThat( + columnNodes, + containsInAnyOrder( + columnNode("ASCII(dayOfWeek)", null, ExpressionFactory.ref("ASCII(dayOfWeek)")), + columnNode( + "log", + "log", + log(add(ExpressionFactory.ref("MAX_0"), ExpressionFactory.ref("MIN_1")))))); + } + + @Test + public void parseSingleFunctionOverAggShouldPass() { + String sql = "SELECT log(max(age)) FROM accounts"; + SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); + parser.parse(mYSqlSelectQueryBlock(sql)); + List sqlSelectItems = parser.selectItemList(); + List columnNodes = parser.getColumnNodes(); + + assertThat(sqlSelectItems, containsInAnyOrder(agg("max", "age", "max_0"))); + assertThat( + columnNodes, + containsInAnyOrder(columnNode("log(max(age))", null, log(ExpressionFactory.ref("max_0"))))); + } + + @Test + public void parseFunctionGroupColumnOverShouldPass() { + String sql = "SELECT CAST(balance AS FLOAT) FROM accounts GROUP BY balance"; + SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); + parser.parse(mYSqlSelectQueryBlock(sql)); + List sqlSelectItems = parser.selectItemList(); + List columnNodes = parser.getColumnNodes(); + + assertThat(sqlSelectItems, containsInAnyOrder(group("balance", "balance"))); + assertThat( + columnNodes, + containsInAnyOrder( + columnNode("CAST(balance AS FLOAT)", null, cast(ExpressionFactory.ref("balance"))))); + } + + @Test + public void withoutAggregationShouldPass() { + String sql = "SELECT age, gender FROM accounts GROUP BY age, gender"; + SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); + parser.parse(mYSqlSelectQueryBlock(sql)); + List sqlSelectItems = parser.selectItemList(); + List columnNodes = parser.getColumnNodes(); + + assertThat(sqlSelectItems, containsInAnyOrder(group("age", "age"), group("gender", "gender"))); + assertThat( + columnNodes, + containsInAnyOrder( + columnNode("age", null, ExpressionFactory.ref("age")), + columnNode("gender", null, ExpressionFactory.ref("gender")))); + } + + @Test + public void groupKeyInSelectWithFunctionShouldPass() { + String sql = "SELECT log(age), max(balance) FROM accounts GROUP BY age"; + SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); + parser.parse(mYSqlSelectQueryBlock(sql)); + List sqlSelectItems = parser.selectItemList(); + List columnNodes = parser.getColumnNodes(); + + assertThat( + sqlSelectItems, containsInAnyOrder(group("age", "age"), agg("max", "balance", "max_0"))); + assertThat( + columnNodes, + containsInAnyOrder( + columnNode("log(age)", null, log(ExpressionFactory.ref("age"))), + columnNode("max(balance)", null, ExpressionFactory.ref("max_0")))); + } + + @Test + public void theDotInFieldNameShouldBeReplaceWithSharp() { + String sql = "SELECT name.lastname, max(balance) FROM accounts GROUP BY name.lastname"; + SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); + parser.parse(mYSqlSelectQueryBlock(sql)); + List sqlSelectItems = parser.selectItemList(); + List columnNodes = parser.getColumnNodes(); + + assertThat( + sqlSelectItems, + containsInAnyOrder( + group("name.lastname", "name#lastname"), agg("max", "balance", "max_0"))); + assertThat( + columnNodes, + containsInAnyOrder( + columnNode("name.lastname", null, ExpressionFactory.ref("name#lastname")), + columnNode("max(balance)", null, ExpressionFactory.ref("max_0")))); + } + + @Test + public void noGroupKeyInSelectShouldPass() { + String sql = "SELECT AVG(age) FROM t GROUP BY age"; + SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); + parser.parse(mYSqlSelectQueryBlock(sql)); + List sqlSelectItems = parser.selectItemList(); + List columnNodes = parser.getColumnNodes(); + + assertThat(sqlSelectItems, containsInAnyOrder(agg("avg", "age", "avg_0"))); + assertThat( + columnNodes, + containsInAnyOrder(columnNode("avg(age)", null, ExpressionFactory.ref("avg_0")))); + } + + @Test + public void aggWithDistinctShouldPass() { + String sql = "SELECT count(distinct gender) FROM t GROUP BY age"; + SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); + parser.parse(mYSqlSelectQueryBlock(sql)); + List sqlSelectItems = parser.selectItemList(); + List columnNodes = parser.getColumnNodes(); + + assertThat(sqlSelectItems, containsInAnyOrder(agg("count", "gender", "count_0"))); + assertThat( + columnNodes, + containsInAnyOrder( + columnNode("count(distinct gender)", null, ExpressionFactory.ref("count_0")))); + } + + /** TermQueryExplainIT.testNestedSingleGroupBy */ + @Test + public void aggregationWithNestedShouldThrowException() { + exceptionRule.expect(RuntimeException.class); + exceptionRule.expectMessage("unsupported operator: nested"); + + String sql = + "SELECT nested(projects.name, 'projects'),id " + + "FROM t " + + "GROUP BY nested(projects.name.keyword, 'projects')"; + SQLAggregationParser parser = new SQLAggregationParser(new ColumnTypeProvider()); + parser.parse(mYSqlSelectQueryBlock(sql)); + } + + private MySqlSelectQueryBlock mYSqlSelectQueryBlock(String sql) { + String dbType = JdbcConstants.MYSQL; + SQLQueryExpr sqlQueryExpr = (SQLQueryExpr) SQLUtils.toSQLExpr(sql, dbType); + return ((MySqlSelectQueryBlock) sqlQueryExpr.getSubQuery().getQuery()); + } + + private TypeSafeMatcher columnNode(String name, String alias, Expression expr) { + return new TypeSafeMatcher() { + @Override + public void describeTo(Description description) { + description.appendText( + String.format("(name=%s,alias=%s,expression=%s)", name, alias, expr)); + } + + @Override + protected boolean matchesSafely(ColumnNode item) { + if (name == null) { + return false; + } + if (alias == null && item.getAlias() != null) { + return false; + } + + return name.equalsIgnoreCase(item.getName()) + && ((alias == null && item.getAlias() == null) || alias.equals(item.getAlias())) + && expr.toString().equalsIgnoreCase(item.getExpr().toString()); + } + }; + } + + private TypeSafeMatcher agg(String methodName, String name, String alias) { + return new TypeSafeMatcher() { + @Override + public void describeTo(Description description) { + description.appendText( + String.format("(methodName=%s, name=%s, alias=%s)", methodName, name, alias)); + } + + @Override + protected boolean matchesSafely(SQLSelectItem item) { + if (item.getExpr() instanceof SQLAggregateExpr) { + return ((SQLAggregateExpr) item.getExpr()).getMethodName().equalsIgnoreCase(methodName) + && ((SQLAggregateExpr) item.getExpr()) + .getArguments() + .get(0) + .toString() + .equalsIgnoreCase(name) + && ((item.getAlias() == null && alias == null) + || item.getAlias().equalsIgnoreCase(alias)); + } else { + return false; + } + } + }; + } + + private TypeSafeMatcher group(String name, String alias) { + return new TypeSafeMatcher() { + @Override + public void describeTo(Description description) { + description.appendText(String.format("(name=%s, alias=%s)", name, alias)); + } + + @Override + protected boolean matchesSafely(SQLSelectItem item) { + boolean b = + item.getExpr().toString().equalsIgnoreCase(name) + && ((item.getAlias() == null && alias == null) + || item.getAlias().equalsIgnoreCase(alias)); + return b; + } + }; + } + + private Expression add(Expression... expressions) { + return of(ADD, Arrays.asList(expressions)); + } + + private Expression log(Expression... expressions) { + return of(LOG, Arrays.asList(expressions)); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/converter/SQLExprToExpressionConverterTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/converter/SQLExprToExpressionConverterTest.java index e297c2c1d4..ac949eb0d7 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/converter/SQLExprToExpressionConverterTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/converter/SQLExprToExpressionConverterTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.planner.converter; import static org.junit.Assert.assertEquals; @@ -34,118 +33,125 @@ @RunWith(MockitoJUnitRunner.class) public class SQLExprToExpressionConverterTest { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); - - private SQLExprToExpressionConverter converter; - private SQLAggregationParser.Context context; - private final SQLAggregateExpr maxA = new SQLAggregateExpr("MAX"); - private final SQLAggregateExpr maxB = new SQLAggregateExpr("MAX"); - private final SQLAggregateExpr minA = new SQLAggregateExpr("MIN"); - private final SQLIdentifierExpr groupG = new SQLIdentifierExpr("A"); - private final SQLIdentifierExpr aggA = new SQLIdentifierExpr("A"); - private final SQLIdentifierExpr aggB = new SQLIdentifierExpr("B"); - private final SQLIntegerExpr one = new SQLIntegerExpr(1); - - @Before - public void setup() { - maxA.getArguments().add(aggA); - maxB.getArguments().add(aggB); - minA.getArguments().add(aggA); - context = new SQLAggregationParser.Context(ImmutableMap.of()); - converter = new SQLExprToExpressionConverter(context); - } - - @Test - public void identifierShouldReturnVarExpression() { - context.addGroupKeyExpr(groupG); - Expression expression = converter.convert(groupG); - - assertEquals(ref("A").toString(), expression.toString()); - } - - @Test - public void binaryOperatorAddShouldReturnAddExpression() { - context.addAggregationExpr(maxA); - context.addAggregationExpr(minA); - - Expression expression = converter.convert(new SQLBinaryOpExpr(maxA, SQLBinaryOperator.Add, minA)); - assertEquals(add(ref("MAX_0"), ref("MIN_1")).toString(), expression.toString()); - } - - @Test - public void compoundBinaryOperatorShouldReturnCorrectExpression() { - context.addAggregationExpr(maxA); - context.addAggregationExpr(minA); - - Expression expression = converter.convert(new SQLBinaryOpExpr(maxA, SQLBinaryOperator.Add, - new SQLBinaryOpExpr(maxA, SQLBinaryOperator.Add, - minA))); - assertEquals(add(ref("MAX_0"), add(ref("MAX_0"), ref("MIN_1"))).toString(), expression.toString()); - } - - @Test - public void functionOverCompoundBinaryOperatorShouldReturnCorrectExpression() { - context.addAggregationExpr(maxA); - context.addAggregationExpr(minA); - - SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr("LOG"); - methodInvokeExpr.addParameter(new SQLBinaryOpExpr(maxA, SQLBinaryOperator.Add, - new SQLBinaryOpExpr(maxA, SQLBinaryOperator.Add, - minA))); - - Expression expression = converter.convert(methodInvokeExpr); - assertEquals(log(add(ref("MAX_0"), add(ref("MAX_0"), ref("MIN_1")))).toString(), expression.toString()); - } - - @Test - public void functionOverGroupColumn() { - context.addAggregationExpr(maxA); - context.addAggregationExpr(minA); - - SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr("LOG"); - methodInvokeExpr.addParameter(new SQLBinaryOpExpr(maxA, SQLBinaryOperator.Add, - new SQLBinaryOpExpr(maxA, SQLBinaryOperator.Add, - minA))); - - Expression expression = converter.convert(methodInvokeExpr); - assertEquals(log(add(ref("MAX_0"), add(ref("MAX_0"), ref("MIN_1")))).toString(), expression.toString()); - } - - @Test - public void binaryOperatorWithLiteralAddShouldReturnAddExpression() { - context.addAggregationExpr(maxA); - - Expression expression = converter.convert(new SQLBinaryOpExpr(maxA, SQLBinaryOperator.Add, one)); - assertEquals(add(ref("MAX_0"), literal(integerValue(1))).toString(), expression.toString()); - } - - @Test - public void unknownIdentifierShouldThrowException() { - context.addAggregationExpr(maxA); - context.addAggregationExpr(minA); - - exceptionRule.expect(RuntimeException.class); - exceptionRule.expectMessage("unsupported expr"); - converter.convert(new SQLBinaryOpExpr(maxA, SQLBinaryOperator.Add, maxB)); - } - - @Test - public void unsupportOperationShouldThrowException() { - exceptionRule.expect(UnsupportedOperationException.class); - exceptionRule.expectMessage("unsupported operator: cot"); - - context.addAggregationExpr(maxA); - SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr("cot"); - methodInvokeExpr.addParameter(maxA); - converter.convert(methodInvokeExpr); - } - - private Expression add(Expression... expressions) { - return of(ADD, Arrays.asList(expressions)); - } - - private Expression log(Expression... expressions) { - return of(LOG, Arrays.asList(expressions)); - } + @Rule public ExpectedException exceptionRule = ExpectedException.none(); + + private SQLExprToExpressionConverter converter; + private SQLAggregationParser.Context context; + private final SQLAggregateExpr maxA = new SQLAggregateExpr("MAX"); + private final SQLAggregateExpr maxB = new SQLAggregateExpr("MAX"); + private final SQLAggregateExpr minA = new SQLAggregateExpr("MIN"); + private final SQLIdentifierExpr groupG = new SQLIdentifierExpr("A"); + private final SQLIdentifierExpr aggA = new SQLIdentifierExpr("A"); + private final SQLIdentifierExpr aggB = new SQLIdentifierExpr("B"); + private final SQLIntegerExpr one = new SQLIntegerExpr(1); + + @Before + public void setup() { + maxA.getArguments().add(aggA); + maxB.getArguments().add(aggB); + minA.getArguments().add(aggA); + context = new SQLAggregationParser.Context(ImmutableMap.of()); + converter = new SQLExprToExpressionConverter(context); + } + + @Test + public void identifierShouldReturnVarExpression() { + context.addGroupKeyExpr(groupG); + Expression expression = converter.convert(groupG); + + assertEquals(ref("A").toString(), expression.toString()); + } + + @Test + public void binaryOperatorAddShouldReturnAddExpression() { + context.addAggregationExpr(maxA); + context.addAggregationExpr(minA); + + Expression expression = + converter.convert(new SQLBinaryOpExpr(maxA, SQLBinaryOperator.Add, minA)); + assertEquals(add(ref("MAX_0"), ref("MIN_1")).toString(), expression.toString()); + } + + @Test + public void compoundBinaryOperatorShouldReturnCorrectExpression() { + context.addAggregationExpr(maxA); + context.addAggregationExpr(minA); + + Expression expression = + converter.convert( + new SQLBinaryOpExpr( + maxA, + SQLBinaryOperator.Add, + new SQLBinaryOpExpr(maxA, SQLBinaryOperator.Add, minA))); + assertEquals( + add(ref("MAX_0"), add(ref("MAX_0"), ref("MIN_1"))).toString(), expression.toString()); + } + + @Test + public void functionOverCompoundBinaryOperatorShouldReturnCorrectExpression() { + context.addAggregationExpr(maxA); + context.addAggregationExpr(minA); + + SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr("LOG"); + methodInvokeExpr.addParameter( + new SQLBinaryOpExpr( + maxA, SQLBinaryOperator.Add, new SQLBinaryOpExpr(maxA, SQLBinaryOperator.Add, minA))); + + Expression expression = converter.convert(methodInvokeExpr); + assertEquals( + log(add(ref("MAX_0"), add(ref("MAX_0"), ref("MIN_1")))).toString(), expression.toString()); + } + + @Test + public void functionOverGroupColumn() { + context.addAggregationExpr(maxA); + context.addAggregationExpr(minA); + + SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr("LOG"); + methodInvokeExpr.addParameter( + new SQLBinaryOpExpr( + maxA, SQLBinaryOperator.Add, new SQLBinaryOpExpr(maxA, SQLBinaryOperator.Add, minA))); + + Expression expression = converter.convert(methodInvokeExpr); + assertEquals( + log(add(ref("MAX_0"), add(ref("MAX_0"), ref("MIN_1")))).toString(), expression.toString()); + } + + @Test + public void binaryOperatorWithLiteralAddShouldReturnAddExpression() { + context.addAggregationExpr(maxA); + + Expression expression = + converter.convert(new SQLBinaryOpExpr(maxA, SQLBinaryOperator.Add, one)); + assertEquals(add(ref("MAX_0"), literal(integerValue(1))).toString(), expression.toString()); + } + + @Test + public void unknownIdentifierShouldThrowException() { + context.addAggregationExpr(maxA); + context.addAggregationExpr(minA); + + exceptionRule.expect(RuntimeException.class); + exceptionRule.expectMessage("unsupported expr"); + converter.convert(new SQLBinaryOpExpr(maxA, SQLBinaryOperator.Add, maxB)); + } + + @Test + public void unsupportOperationShouldThrowException() { + exceptionRule.expect(UnsupportedOperationException.class); + exceptionRule.expectMessage("unsupported operator: cot"); + + context.addAggregationExpr(maxA); + SQLMethodInvokeExpr methodInvokeExpr = new SQLMethodInvokeExpr("cot"); + methodInvokeExpr.addParameter(maxA); + converter.convert(methodInvokeExpr); + } + + private Expression add(Expression... expressions) { + return of(ADD, Arrays.asList(expressions)); + } + + private Expression log(Expression... expressions) { + return of(LOG, Arrays.asList(expressions)); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/converter/SQLToOperatorConverterTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/converter/SQLToOperatorConverterTest.java index f64a550a13..578fb9bcff 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/converter/SQLToOperatorConverterTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/converter/SQLToOperatorConverterTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.planner.converter; import static org.junit.Assert.assertTrue; @@ -25,51 +24,53 @@ @RunWith(MockitoJUnitRunner.class) public class SQLToOperatorConverterTest { - @Mock - private Client client; + @Mock private Client client; - private SQLToOperatorConverter converter; + private SQLToOperatorConverter converter; - @Before - public void setup() { - converter = new SQLToOperatorConverter(client, new ColumnTypeProvider()); - } + @Before + public void setup() { + converter = new SQLToOperatorConverter(client, new ColumnTypeProvider()); + } - @Test - public void convertAggShouldPass() { - String sql = "SELECT dayOfWeek, max(FlightDelayMin), MIN(FlightDelayMin) as min " + - "FROM opensearch_dashboards_sample_data_flights " + - "GROUP BY dayOfWeek"; - toExpr(sql).accept(converter); - PhysicalOperator physicalOperator = converter.getPhysicalOperator(); + @Test + public void convertAggShouldPass() { + String sql = + "SELECT dayOfWeek, max(FlightDelayMin), MIN(FlightDelayMin) as min " + + "FROM opensearch_dashboards_sample_data_flights " + + "GROUP BY dayOfWeek"; + toExpr(sql).accept(converter); + PhysicalOperator physicalOperator = converter.getPhysicalOperator(); - assertTrue(physicalOperator instanceof PhysicalProject); - } + assertTrue(physicalOperator instanceof PhysicalProject); + } - @Test - public void convertMaxMinusMinShouldPass() { - String sql = "SELECT dayOfWeek, max(FlightDelayMin) - MIN(FlightDelayMin) as diff " + - "FROM opensearch_dashboards_sample_data_flights " + - "GROUP BY dayOfWeek"; - toExpr(sql).accept(converter); - PhysicalOperator physicalOperator = converter.getPhysicalOperator(); + @Test + public void convertMaxMinusMinShouldPass() { + String sql = + "SELECT dayOfWeek, max(FlightDelayMin) - MIN(FlightDelayMin) as diff " + + "FROM opensearch_dashboards_sample_data_flights " + + "GROUP BY dayOfWeek"; + toExpr(sql).accept(converter); + PhysicalOperator physicalOperator = converter.getPhysicalOperator(); - assertTrue(physicalOperator instanceof PhysicalProject); - } + assertTrue(physicalOperator instanceof PhysicalProject); + } - @Test - public void convertDistinctPass() { - String sql = "SELECT dayOfWeek, max(FlightDelayMin) - MIN(FlightDelayMin) as diff " + - "FROM opensearch_dashboards_sample_data_flights " + - "GROUP BY dayOfWeek"; - toExpr(sql).accept(converter); - PhysicalOperator physicalOperator = converter.getPhysicalOperator(); + @Test + public void convertDistinctPass() { + String sql = + "SELECT dayOfWeek, max(FlightDelayMin) - MIN(FlightDelayMin) as diff " + + "FROM opensearch_dashboards_sample_data_flights " + + "GROUP BY dayOfWeek"; + toExpr(sql).accept(converter); + PhysicalOperator physicalOperator = converter.getPhysicalOperator(); - assertTrue(physicalOperator instanceof PhysicalProject); - } + assertTrue(physicalOperator instanceof PhysicalProject); + } - private SQLQueryExpr toExpr(String sql) { - String dbType = JdbcConstants.MYSQL; - return (SQLQueryExpr) SQLUtils.toSQLExpr(sql, dbType); - } + private SQLQueryExpr toExpr(String sql) { + String dbType = JdbcConstants.MYSQL; + return (SQLQueryExpr) SQLUtils.toSQLExpr(sql, dbType); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/physical/SearchAggregationResponseHelperTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/physical/SearchAggregationResponseHelperTest.java index 589dab8905..630ea840cf 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/physical/SearchAggregationResponseHelperTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/planner/physical/SearchAggregationResponseHelperTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.planner.physical; import static org.hamcrest.MatcherAssert.assertThat; @@ -29,305 +28,318 @@ @RunWith(MockitoJUnitRunner.class) public class SearchAggregationResponseHelperTest { - /** - * SELECT MAX(age) as max - * FROM accounts - */ - @Test - public void noBucketOneMetricShouldPass() { - String json = "{\n" - + " \"max#max\": {\n" - + " \"value\": 40\n" - + " }\n" - + "}"; - List> result = SearchAggregationResponseHelper.flatten(AggregationUtils.fromJson(json)); - assertThat(result, contains(allOf(hasEntry("max", 40d)))); - } + /** SELECT MAX(age) as max FROM accounts */ + @Test + public void noBucketOneMetricShouldPass() { + String json = "{\n" + " \"max#max\": {\n" + " \"value\": 40\n" + " }\n" + "}"; + List> result = + SearchAggregationResponseHelper.flatten(AggregationUtils.fromJson(json)); + assertThat(result, contains(allOf(hasEntry("max", 40d)))); + } - /** - * SELECT MAX(age) as max, MIN(age) as min - * FROM accounts - */ - @Test - public void noBucketMultipleMetricShouldPass() { - String json = "{\n" - + " \"max#max\": {\n" - + " \"value\": 40\n" - + " },\n" - + " \"min#min\": {\n" - + " \"value\": 20\n" - + " }\n" - + "}"; - List> result = SearchAggregationResponseHelper.flatten(AggregationUtils.fromJson(json)); - assertThat(result, contains(allOf(hasEntry("max", 40d), hasEntry("min", 20d)))); - } + /** SELECT MAX(age) as max, MIN(age) as min FROM accounts */ + @Test + public void noBucketMultipleMetricShouldPass() { + String json = + "{\n" + + " \"max#max\": {\n" + + " \"value\": 40\n" + + " },\n" + + " \"min#min\": {\n" + + " \"value\": 20\n" + + " }\n" + + "}"; + List> result = + SearchAggregationResponseHelper.flatten(AggregationUtils.fromJson(json)); + assertThat(result, contains(allOf(hasEntry("max", 40d), hasEntry("min", 20d)))); + } - /** - * SELECT gender, MAX(age) as max, MIN(age) as min - * FROM accounts - * GROUP BY gender - */ - @Test - public void oneBucketMultipleMetricShouldPass() { - String json = "{\n" - + " \"sterms#gender\": {\n" - + " \"buckets\": [\n" - + " {\n" - + " \"key\": \"m\",\n" - + " \"doc_count\": 507,\n" - + " \"min#min\": {\n" - + " \"value\": 10\n" - + " },\n" - + " \"max#max\": {\n" - + " \"value\": 20\n" - + " }\n" - + " },\n" - + " {\n" - + " \"key\": \"f\",\n" - + " \"doc_count\": 493,\n" - + " \"min#min\": {\n" - + " \"value\": 20\n" - + " },\n" - + " \"max#max\": {\n" - + " \"value\": 40\n" - + " }\n" - + " }\n" - + " ]\n" - + " }\n" - + "}"; - List> result = SearchAggregationResponseHelper.flatten(AggregationUtils.fromJson(json)); - assertThat(result, contains(allOf(hasEntry("gender", (Object) "m"), hasEntry("min", 10d), hasEntry("max", 20d)), - allOf(hasEntry("gender", (Object) "f"), hasEntry("min", 20d), - hasEntry("max", 40d)))); - } + /** SELECT gender, MAX(age) as max, MIN(age) as min FROM accounts GROUP BY gender */ + @Test + public void oneBucketMultipleMetricShouldPass() { + String json = + "{\n" + + " \"sterms#gender\": {\n" + + " \"buckets\": [\n" + + " {\n" + + " \"key\": \"m\",\n" + + " \"doc_count\": 507,\n" + + " \"min#min\": {\n" + + " \"value\": 10\n" + + " },\n" + + " \"max#max\": {\n" + + " \"value\": 20\n" + + " }\n" + + " },\n" + + " {\n" + + " \"key\": \"f\",\n" + + " \"doc_count\": 493,\n" + + " \"min#min\": {\n" + + " \"value\": 20\n" + + " },\n" + + " \"max#max\": {\n" + + " \"value\": 40\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + "}"; + List> result = + SearchAggregationResponseHelper.flatten(AggregationUtils.fromJson(json)); + assertThat( + result, + contains( + allOf(hasEntry("gender", (Object) "m"), hasEntry("min", 10d), hasEntry("max", 20d)), + allOf(hasEntry("gender", (Object) "f"), hasEntry("min", 20d), hasEntry("max", 40d)))); + } - /** - * SELECT gender, state, MAX(age) as max, MIN(age) as min - * FROM accounts - * GROUP BY gender, state - */ - @Test - public void multipleBucketMultipleMetricShouldPass() { - String json = "{\n" - + " \"sterms#gender\": {\n" - + " \"buckets\": [\n" - + " {\n" - + " \"key\": \"m\",\n" - + " \"sterms#state\": {\n" - + " \"buckets\": [\n" - + " {\n" - + " \"key\": \"MD\",\n" - + " \"min#min\": {\n" - + " \"value\": 22\n" - + " },\n" - + " \"max#max\": {\n" - + " \"value\": 39\n" - + " }\n" - + " },\n" - + " {\n" - + " \"key\": \"ID\",\n" - + " \"min#min\": {\n" - + " \"value\": 23\n" - + " },\n" - + " \"max#max\": {\n" - + " \"value\": 40\n" - + " }\n" - + " }\n" - + " ]\n" - + " }\n" - + " },\n" - + " {\n" - + " \"key\": \"f\",\n" - + " \"sterms#state\": {\n" - + " \"buckets\": [\n" - + " {\n" - + " \"key\": \"TX\",\n" - + " \"min#min\": {\n" - + " \"value\": 20\n" - + " },\n" - + " \"max#max\": {\n" - + " \"value\": 38\n" - + " }\n" - + " },\n" - + " {\n" - + " \"key\": \"MI\",\n" - + " \"min#min\": {\n" - + " \"value\": 22\n" - + " },\n" - + " \"max#max\": {\n" - + " \"value\": 40\n" - + " }\n" - + " }\n" - + " ]\n" - + " }\n" - + " }\n" - + " ]\n" - + " }\n" - + "}"; - List> result = SearchAggregationResponseHelper.flatten(AggregationUtils.fromJson(json)); - assertThat(result, contains( - allOf(hasEntry("gender", (Object) "m"), hasEntry("state", (Object) "MD"), hasEntry("min", 22d), - hasEntry("max", 39d)), - allOf(hasEntry("gender", (Object) "m"), hasEntry("state", (Object) "ID"), hasEntry("min", 23d), - hasEntry("max", 40d)), - allOf(hasEntry("gender", (Object) "f"), hasEntry("state", (Object) "TX"), hasEntry("min", 20d), - hasEntry("max", 38d)), - allOf(hasEntry("gender", (Object) "f"), hasEntry("state", (Object) "MI"), hasEntry("min", 22d), - hasEntry("max", 40d)))); - } + /** SELECT gender, state, MAX(age) as max, MIN(age) as min FROM accounts GROUP BY gender, state */ + @Test + public void multipleBucketMultipleMetricShouldPass() { + String json = + "{\n" + + " \"sterms#gender\": {\n" + + " \"buckets\": [\n" + + " {\n" + + " \"key\": \"m\",\n" + + " \"sterms#state\": {\n" + + " \"buckets\": [\n" + + " {\n" + + " \"key\": \"MD\",\n" + + " \"min#min\": {\n" + + " \"value\": 22\n" + + " },\n" + + " \"max#max\": {\n" + + " \"value\": 39\n" + + " }\n" + + " },\n" + + " {\n" + + " \"key\": \"ID\",\n" + + " \"min#min\": {\n" + + " \"value\": 23\n" + + " },\n" + + " \"max#max\": {\n" + + " \"value\": 40\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + " },\n" + + " {\n" + + " \"key\": \"f\",\n" + + " \"sterms#state\": {\n" + + " \"buckets\": [\n" + + " {\n" + + " \"key\": \"TX\",\n" + + " \"min#min\": {\n" + + " \"value\": 20\n" + + " },\n" + + " \"max#max\": {\n" + + " \"value\": 38\n" + + " }\n" + + " },\n" + + " {\n" + + " \"key\": \"MI\",\n" + + " \"min#min\": {\n" + + " \"value\": 22\n" + + " },\n" + + " \"max#max\": {\n" + + " \"value\": 40\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + "}"; + List> result = + SearchAggregationResponseHelper.flatten(AggregationUtils.fromJson(json)); + assertThat( + result, + contains( + allOf( + hasEntry("gender", (Object) "m"), + hasEntry("state", (Object) "MD"), + hasEntry("min", 22d), + hasEntry("max", 39d)), + allOf( + hasEntry("gender", (Object) "m"), + hasEntry("state", (Object) "ID"), + hasEntry("min", 23d), + hasEntry("max", 40d)), + allOf( + hasEntry("gender", (Object) "f"), + hasEntry("state", (Object) "TX"), + hasEntry("min", 20d), + hasEntry("max", 38d)), + allOf( + hasEntry("gender", (Object) "f"), + hasEntry("state", (Object) "MI"), + hasEntry("min", 22d), + hasEntry("max", 40d)))); + } - /** - * SELECT age, gender FROM accounts GROUP BY age, gender - */ - @Test - public void multipleBucketWithoutMetricShouldPass() { - String json = "{\n" - + " \"lterms#age\": {\n" - + " \"buckets\": [\n" - + " {\n" - + " \"key\": 31,\n" - + " \"doc_count\": 61,\n" - + " \"sterms#gender\": {\n" - + " \"buckets\": [\n" - + " {\n" - + " \"key\": \"m\",\n" - + " \"doc_count\": 35\n" - + " },\n" - + " {\n" - + " \"key\": \"f\",\n" - + " \"doc_count\": 26\n" - + " }\n" - + " ]\n" - + " }\n" - + " },\n" - + " {\n" - + " \"key\": 39,\n" - + " \"doc_count\": 60,\n" - + " \"sterms#gender\": {\n" - + " \"buckets\": [\n" - + " {\n" - + " \"key\": \"f\",\n" - + " \"doc_count\": 38\n" - + " },\n" - + " {\n" - + " \"key\": \"m\",\n" - + " \"doc_count\": 22\n" - + " }\n" - + " ]\n" - + " }\n" - + " }\n" - + " ]\n" - + " }\n" - + "}"; - List> result = SearchAggregationResponseHelper.flatten(AggregationUtils.fromJson(json)); - assertThat(result, containsInAnyOrder( - allOf(hasEntry("age", (Object) 31L), hasEntry("gender","m")), - allOf(hasEntry("age", (Object) 31L), hasEntry("gender","f")), - allOf(hasEntry("age", (Object) 39L), hasEntry("gender","m")), - allOf(hasEntry("age", (Object) 39L), hasEntry("gender","f")))); - } + /** SELECT age, gender FROM accounts GROUP BY age, gender */ + @Test + public void multipleBucketWithoutMetricShouldPass() { + String json = + "{\n" + + " \"lterms#age\": {\n" + + " \"buckets\": [\n" + + " {\n" + + " \"key\": 31,\n" + + " \"doc_count\": 61,\n" + + " \"sterms#gender\": {\n" + + " \"buckets\": [\n" + + " {\n" + + " \"key\": \"m\",\n" + + " \"doc_count\": 35\n" + + " },\n" + + " {\n" + + " \"key\": \"f\",\n" + + " \"doc_count\": 26\n" + + " }\n" + + " ]\n" + + " }\n" + + " },\n" + + " {\n" + + " \"key\": 39,\n" + + " \"doc_count\": 60,\n" + + " \"sterms#gender\": {\n" + + " \"buckets\": [\n" + + " {\n" + + " \"key\": \"f\",\n" + + " \"doc_count\": 38\n" + + " },\n" + + " {\n" + + " \"key\": \"m\",\n" + + " \"doc_count\": 22\n" + + " }\n" + + " ]\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + "}"; + List> result = + SearchAggregationResponseHelper.flatten(AggregationUtils.fromJson(json)); + assertThat( + result, + containsInAnyOrder( + allOf(hasEntry("age", (Object) 31L), hasEntry("gender", "m")), + allOf(hasEntry("age", (Object) 31L), hasEntry("gender", "f")), + allOf(hasEntry("age", (Object) 39L), hasEntry("gender", "m")), + allOf(hasEntry("age", (Object) 39L), hasEntry("gender", "f")))); + } - /** - * SELECT PERCENTILES(age) FROM accounts - */ - @Test - public void noBucketPercentilesShouldPass() { - String json = "{\n" - + " \"percentiles_bucket#age\": {\n" - + " \"values\": {\n" - + " \"1.0\": 20,\n" - + " \"5.0\": 21,\n" - + " \"25.0\": 25,\n" - + " \"50.0\": 30.90909090909091,\n" - + " \"75.0\": 35,\n" - + " \"95.0\": 39,\n" - + " \"99.0\": 40\n" - + " }\n" - + " }\n" - + "}"; - List> result = SearchAggregationResponseHelper.flatten(AggregationUtils.fromJson(json)); - assertThat(result, contains(allOf(hasEntry("age_1.0", 20d)))); - } + /** SELECT PERCENTILES(age) FROM accounts */ + @Test + public void noBucketPercentilesShouldPass() { + String json = + "{\n" + + " \"percentiles_bucket#age\": {\n" + + " \"values\": {\n" + + " \"1.0\": 20,\n" + + " \"5.0\": 21,\n" + + " \"25.0\": 25,\n" + + " \"50.0\": 30.90909090909091,\n" + + " \"75.0\": 35,\n" + + " \"95.0\": 39,\n" + + " \"99.0\": 40\n" + + " }\n" + + " }\n" + + "}"; + List> result = + SearchAggregationResponseHelper.flatten(AggregationUtils.fromJson(json)); + assertThat(result, contains(allOf(hasEntry("age_1.0", 20d)))); + } - /** - * SELECT count(*) from online - * GROUP BY date_histogram('field'='insert_time','interval'='4d','alias'='days') - */ - @Test - public void populateShouldPass() { - String json = "{\n" - + " \"date_histogram#days\": {\n" - + " \"buckets\": [\n" - + " {\n" - + " \"key_as_string\": \"2014-08-14 00:00:00\",\n" - + " \"key\": 1407974400000,\n" - + " \"doc_count\": 477,\n" - + " \"value_count#COUNT_0\": {\n" - + " \"value\": 477\n" - + " }\n" - + " }\n" - + " ]\n" - + " }\n" - + "}"; - List> result = SearchAggregationResponseHelper.flatten(AggregationUtils.fromJson(json)); - assertThat(result, containsInAnyOrder( - allOf(hasEntry("days", (Object) "2014-08-14 00:00:00"), hasEntry("COUNT_0",477d)))); - } + /** + * SELECT count(*) from online GROUP BY + * date_histogram('field'='insert_time','interval'='4d','alias'='days') + */ + @Test + public void populateShouldPass() { + String json = + "{\n" + + " \"date_histogram#days\": {\n" + + " \"buckets\": [\n" + + " {\n" + + " \"key_as_string\": \"2014-08-14 00:00:00\",\n" + + " \"key\": 1407974400000,\n" + + " \"doc_count\": 477,\n" + + " \"value_count#COUNT_0\": {\n" + + " \"value\": 477\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + "}"; + List> result = + SearchAggregationResponseHelper.flatten(AggregationUtils.fromJson(json)); + assertThat( + result, + containsInAnyOrder( + allOf(hasEntry("days", (Object) "2014-08-14 00:00:00"), hasEntry("COUNT_0", 477d)))); + } - /** - * SELECT s - */ - @Test - public void populateSearchAggregationResponeShouldPass() { - String json = "{\n" - + " \"lterms#age\": {\n" - + " \"buckets\": [\n" - + " {\n" - + " \"key\": 31,\n" - + " \"doc_count\": 61,\n" - + " \"sterms#gender\": {\n" - + " \"buckets\": [\n" - + " {\n" - + " \"key\": \"m\",\n" - + " \"doc_count\": 35\n" - + " },\n" - + " {\n" - + " \"key\": \"f\",\n" - + " \"doc_count\": 26\n" - + " }\n" - + " ]\n" - + " }\n" - + " },\n" - + " {\n" - + " \"key\": 39,\n" - + " \"doc_count\": 60,\n" - + " \"sterms#gender\": {\n" - + " \"buckets\": [\n" - + " {\n" - + " \"key\": \"f\",\n" - + " \"doc_count\": 38\n" - + " },\n" - + " {\n" - + " \"key\": \"m\",\n" - + " \"doc_count\": 22\n" - + " }\n" - + " ]\n" - + " }\n" - + " }\n" - + " ]\n" - + " }\n" - + "}"; - List bindingTupleRows = - SearchAggregationResponseHelper.populateSearchAggregationResponse(AggregationUtils.fromJson(json)); - assertEquals(4, bindingTupleRows.size()); - assertThat(bindingTupleRows, containsInAnyOrder( - bindingTupleRow(BindingTuple.from(ImmutableMap.of("age", 31L, "gender", "m"))), - bindingTupleRow(BindingTuple.from(ImmutableMap.of("age", 31L, "gender", "f"))), - bindingTupleRow(BindingTuple.from(ImmutableMap.of("age", 39L, "gender", "m"))), - bindingTupleRow(BindingTuple.from(ImmutableMap.of("age", 39L, "gender", "f"))))); - } + /** SELECT s */ + @Test + public void populateSearchAggregationResponeShouldPass() { + String json = + "{\n" + + " \"lterms#age\": {\n" + + " \"buckets\": [\n" + + " {\n" + + " \"key\": 31,\n" + + " \"doc_count\": 61,\n" + + " \"sterms#gender\": {\n" + + " \"buckets\": [\n" + + " {\n" + + " \"key\": \"m\",\n" + + " \"doc_count\": 35\n" + + " },\n" + + " {\n" + + " \"key\": \"f\",\n" + + " \"doc_count\": 26\n" + + " }\n" + + " ]\n" + + " }\n" + + " },\n" + + " {\n" + + " \"key\": 39,\n" + + " \"doc_count\": 60,\n" + + " \"sterms#gender\": {\n" + + " \"buckets\": [\n" + + " {\n" + + " \"key\": \"f\",\n" + + " \"doc_count\": 38\n" + + " },\n" + + " {\n" + + " \"key\": \"m\",\n" + + " \"doc_count\": 22\n" + + " }\n" + + " ]\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + "}"; + List bindingTupleRows = + SearchAggregationResponseHelper.populateSearchAggregationResponse( + AggregationUtils.fromJson(json)); + assertEquals(4, bindingTupleRows.size()); + assertThat( + bindingTupleRows, + containsInAnyOrder( + bindingTupleRow(BindingTuple.from(ImmutableMap.of("age", 31L, "gender", "m"))), + bindingTupleRow(BindingTuple.from(ImmutableMap.of("age", 31L, "gender", "f"))), + bindingTupleRow(BindingTuple.from(ImmutableMap.of("age", 39L, "gender", "m"))), + bindingTupleRow(BindingTuple.from(ImmutableMap.of("age", 39L, "gender", "f"))))); + } - private static Matcher bindingTupleRow(BindingTuple bindingTuple) { - return featureValueOf("BindingTuple", equalTo(bindingTuple), BindingTupleRow::data); - } + private static Matcher bindingTupleRow(BindingTuple bindingTuple) { + return featureValueOf("BindingTuple", equalTo(bindingTuple), BindingTupleRow::data); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/query/DefaultQueryActionTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/query/DefaultQueryActionTest.java index 57530692d4..11e14e9b48 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/query/DefaultQueryActionTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/query/DefaultQueryActionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.query; import static org.hamcrest.Matchers.equalTo; @@ -42,228 +41,225 @@ public class DefaultQueryActionTest { - private DefaultQueryAction queryAction; + private DefaultQueryAction queryAction; - private Client mockClient; + private Client mockClient; - private Select mockSelect; + private Select mockSelect; - private SearchRequestBuilder mockRequestBuilder; + private SearchRequestBuilder mockRequestBuilder; - @Before - public void initDefaultQueryAction() { + @Before + public void initDefaultQueryAction() { - mockClient = mock(Client.class); - mockSelect = mock(Select.class); - mockRequestBuilder = mock(SearchRequestBuilder.class); + mockClient = mock(Client.class); + mockSelect = mock(Select.class); + mockRequestBuilder = mock(SearchRequestBuilder.class); - List fields = new LinkedList<>(); - fields.add(new Field("balance", "bbb")); + List fields = new LinkedList<>(); + fields.add(new Field("balance", "bbb")); - doReturn(fields).when(mockSelect).getFields(); - doReturn(null).when(mockRequestBuilder).setFetchSource(any(String[].class), any(String[].class)); - doReturn(null).when(mockRequestBuilder).addScriptField(anyString(), any(Script.class)); + doReturn(fields).when(mockSelect).getFields(); + doReturn(null) + .when(mockRequestBuilder) + .setFetchSource(any(String[].class), any(String[].class)); + doReturn(null).when(mockRequestBuilder).addScriptField(anyString(), any(Script.class)); - queryAction = new DefaultQueryAction(mockClient, mockSelect); - queryAction.initialize(mockRequestBuilder); - } + queryAction = new DefaultQueryAction(mockClient, mockSelect); + queryAction.initialize(mockRequestBuilder); + } - @After - public void cleanup() { - LocalClusterState.state(null); - } + @After + public void cleanup() { + LocalClusterState.state(null); + } - @Test - public void scriptFieldWithTwoParams() throws SqlParseException { + @Test + public void scriptFieldWithTwoParams() throws SqlParseException { - List fields = new LinkedList<>(); - fields.add(createScriptField("script1", "doc['balance'] * 2", - false, true, false)); + List fields = new LinkedList<>(); + fields.add(createScriptField("script1", "doc['balance'] * 2", false, true, false)); - queryAction.setFields(fields); + queryAction.setFields(fields); - final Optional> fieldNames = queryAction.getFieldNames(); - Assert.assertTrue("Field names have not been set", fieldNames.isPresent()); - Assert.assertThat(fieldNames.get().size(), equalTo(1)); - Assert.assertThat(fieldNames.get().get(0), equalTo("script1")); + final Optional> fieldNames = queryAction.getFieldNames(); + Assert.assertTrue("Field names have not been set", fieldNames.isPresent()); + Assert.assertThat(fieldNames.get().size(), equalTo(1)); + Assert.assertThat(fieldNames.get().get(0), equalTo("script1")); - Mockito.verify(mockRequestBuilder).addScriptField(eq("script1"), any(Script.class)); - } + Mockito.verify(mockRequestBuilder).addScriptField(eq("script1"), any(Script.class)); + } - @Test - public void scriptFieldWithThreeParams() throws SqlParseException { + @Test + public void scriptFieldWithThreeParams() throws SqlParseException { - List fields = new LinkedList<>(); - fields.add(createScriptField("script1", "doc['balance'] * 2", - true, true, false)); + List fields = new LinkedList<>(); + fields.add(createScriptField("script1", "doc['balance'] * 2", true, true, false)); - queryAction.setFields(fields); + queryAction.setFields(fields); - final Optional> fieldNames = queryAction.getFieldNames(); - Assert.assertTrue("Field names have not been set", fieldNames.isPresent()); - Assert.assertThat(fieldNames.get().size(), equalTo(1)); - Assert.assertThat(fieldNames.get().get(0), equalTo("script1")); + final Optional> fieldNames = queryAction.getFieldNames(); + Assert.assertTrue("Field names have not been set", fieldNames.isPresent()); + Assert.assertThat(fieldNames.get().size(), equalTo(1)); + Assert.assertThat(fieldNames.get().get(0), equalTo("script1")); - Mockito.verify(mockRequestBuilder).addScriptField(eq("script1"), any(Script.class)); - } + Mockito.verify(mockRequestBuilder).addScriptField(eq("script1"), any(Script.class)); + } - @Test(expected = SqlParseException.class) - public void scriptFieldWithLessThanTwoParams() throws SqlParseException { + @Test(expected = SqlParseException.class) + public void scriptFieldWithLessThanTwoParams() throws SqlParseException { - List fields = new LinkedList<>(); - fields.add(createScriptField("script1", "doc['balance'] * 2", - false, false, false)); + List fields = new LinkedList<>(); + fields.add(createScriptField("script1", "doc['balance'] * 2", false, false, false)); - queryAction.setFields(fields); - } + queryAction.setFields(fields); + } - @Test - public void scriptFieldWithMoreThanThreeParams() throws SqlParseException { + @Test + public void scriptFieldWithMoreThanThreeParams() throws SqlParseException { - List fields = new LinkedList<>(); - fields.add(createScriptField("script1", "doc['balance'] * 2", - false, true, true)); - - queryAction.setFields(fields); - } - - @Test - public void testIfScrollShouldBeOpenWithDifferentFormats() { - int settingFetchSize = 500; - TimeValue timeValue = new TimeValue(120000); - int limit = 2300; - mockLocalClusterStateAndInitializeMetrics(timeValue); - - doReturn(limit).when(mockSelect).getRowCount(); - doReturn(mockRequestBuilder).when(mockRequestBuilder).setSize(settingFetchSize); - SqlRequest mockSqlRequest = mock(SqlRequest.class); - doReturn(settingFetchSize).when(mockSqlRequest).fetchSize(); - queryAction.setSqlRequest(mockSqlRequest); - - Format[] formats = new Format[] {Format.CSV, Format.RAW, Format.JSON, Format.TABLE}; - for (Format format : formats) { - queryAction.setFormat(format); - queryAction.checkAndSetScroll(); - } - - Mockito.verify(mockRequestBuilder, times(4)).setSize(limit); - Mockito.verify(mockRequestBuilder, never()).setScroll(any(TimeValue.class)); - - queryAction.setFormat(Format.JDBC); - queryAction.checkAndSetScroll(); - Mockito.verify(mockRequestBuilder).setSize(settingFetchSize); - Mockito.verify(mockRequestBuilder).setScroll(timeValue); - - } + List fields = new LinkedList<>(); + fields.add(createScriptField("script1", "doc['balance'] * 2", false, true, true)); - @Test - public void testIfScrollShouldBeOpen() { - int settingFetchSize = 500; - TimeValue timeValue = new TimeValue(120000); - int limit = 2300; + queryAction.setFields(fields); + } - doReturn(limit).when(mockSelect).getRowCount(); - doReturn(mockRequestBuilder).when(mockRequestBuilder).setSize(settingFetchSize); - SqlRequest mockSqlRequest = mock(SqlRequest.class); - doReturn(settingFetchSize).when(mockSqlRequest).fetchSize(); - queryAction.setSqlRequest(mockSqlRequest); - queryAction.setFormat(Format.JDBC); + @Test + public void testIfScrollShouldBeOpenWithDifferentFormats() { + int settingFetchSize = 500; + TimeValue timeValue = new TimeValue(120000); + int limit = 2300; + mockLocalClusterStateAndInitializeMetrics(timeValue); - mockLocalClusterStateAndInitializeMetrics(timeValue); - queryAction.checkAndSetScroll(); - Mockito.verify(mockRequestBuilder).setSize(settingFetchSize); - Mockito.verify(mockRequestBuilder).setScroll(timeValue); + doReturn(limit).when(mockSelect).getRowCount(); + doReturn(mockRequestBuilder).when(mockRequestBuilder).setSize(settingFetchSize); + SqlRequest mockSqlRequest = mock(SqlRequest.class); + doReturn(settingFetchSize).when(mockSqlRequest).fetchSize(); + queryAction.setSqlRequest(mockSqlRequest); + Format[] formats = new Format[] {Format.CSV, Format.RAW, Format.JSON, Format.TABLE}; + for (Format format : formats) { + queryAction.setFormat(format); + queryAction.checkAndSetScroll(); } - @Test - public void testIfScrollShouldBeOpenWithDifferentFetchSize() { - TimeValue timeValue = new TimeValue(120000); - int limit = 2300; - mockLocalClusterStateAndInitializeMetrics(timeValue); - - doReturn(limit).when(mockSelect).getRowCount(); - SqlRequest mockSqlRequest = mock(SqlRequest.class); - queryAction.setSqlRequest(mockSqlRequest); - queryAction.setFormat(Format.JDBC); - - int[] fetchSizes = new int[] {0, -10}; - for (int fetch : fetchSizes) { - doReturn(fetch).when(mockSqlRequest).fetchSize(); - queryAction.checkAndSetScroll(); - } - Mockito.verify(mockRequestBuilder, times(2)).setSize(limit); - Mockito.verify(mockRequestBuilder, never()).setScroll(timeValue); - - int userFetchSize = 20; - doReturn(userFetchSize).when(mockSqlRequest).fetchSize(); - doReturn(mockRequestBuilder).when(mockRequestBuilder).setSize(userFetchSize); - queryAction.checkAndSetScroll(); - Mockito.verify(mockRequestBuilder).setSize(20); - Mockito.verify(mockRequestBuilder).setScroll(timeValue); + Mockito.verify(mockRequestBuilder, times(4)).setSize(limit); + Mockito.verify(mockRequestBuilder, never()).setScroll(any(TimeValue.class)); + + queryAction.setFormat(Format.JDBC); + queryAction.checkAndSetScroll(); + Mockito.verify(mockRequestBuilder).setSize(settingFetchSize); + Mockito.verify(mockRequestBuilder).setScroll(timeValue); + } + + @Test + public void testIfScrollShouldBeOpen() { + int settingFetchSize = 500; + TimeValue timeValue = new TimeValue(120000); + int limit = 2300; + + doReturn(limit).when(mockSelect).getRowCount(); + doReturn(mockRequestBuilder).when(mockRequestBuilder).setSize(settingFetchSize); + SqlRequest mockSqlRequest = mock(SqlRequest.class); + doReturn(settingFetchSize).when(mockSqlRequest).fetchSize(); + queryAction.setSqlRequest(mockSqlRequest); + queryAction.setFormat(Format.JDBC); + + mockLocalClusterStateAndInitializeMetrics(timeValue); + queryAction.checkAndSetScroll(); + Mockito.verify(mockRequestBuilder).setSize(settingFetchSize); + Mockito.verify(mockRequestBuilder).setScroll(timeValue); + } + + @Test + public void testIfScrollShouldBeOpenWithDifferentFetchSize() { + TimeValue timeValue = new TimeValue(120000); + int limit = 2300; + mockLocalClusterStateAndInitializeMetrics(timeValue); + + doReturn(limit).when(mockSelect).getRowCount(); + SqlRequest mockSqlRequest = mock(SqlRequest.class); + queryAction.setSqlRequest(mockSqlRequest); + queryAction.setFormat(Format.JDBC); + + int[] fetchSizes = new int[] {0, -10}; + for (int fetch : fetchSizes) { + doReturn(fetch).when(mockSqlRequest).fetchSize(); + queryAction.checkAndSetScroll(); } - - - @Test - public void testIfScrollShouldBeOpenWithDifferentValidFetchSizeAndLimit() { - TimeValue timeValue = new TimeValue(120000); - mockLocalClusterStateAndInitializeMetrics(timeValue); - - int limit = 2300; - doReturn(limit).when(mockSelect).getRowCount(); - SqlRequest mockSqlRequest = mock(SqlRequest.class); - - /** fetchSize <= LIMIT - open scroll*/ - int userFetchSize = 1500; - doReturn(userFetchSize).when(mockSqlRequest).fetchSize(); - doReturn(mockRequestBuilder).when(mockRequestBuilder).setSize(userFetchSize); - queryAction.setSqlRequest(mockSqlRequest); - queryAction.setFormat(Format.JDBC); - - queryAction.checkAndSetScroll(); - Mockito.verify(mockRequestBuilder).setSize(userFetchSize); - Mockito.verify(mockRequestBuilder).setScroll(timeValue); - - /** fetchSize > LIMIT - no scroll */ - userFetchSize = 5000; - doReturn(userFetchSize).when(mockSqlRequest).fetchSize(); - mockRequestBuilder = mock(SearchRequestBuilder.class); - queryAction.initialize(mockRequestBuilder); - queryAction.checkAndSetScroll(); - Mockito.verify(mockRequestBuilder).setSize(limit); - Mockito.verify(mockRequestBuilder, never()).setScroll(timeValue); + Mockito.verify(mockRequestBuilder, times(2)).setSize(limit); + Mockito.verify(mockRequestBuilder, never()).setScroll(timeValue); + + int userFetchSize = 20; + doReturn(userFetchSize).when(mockSqlRequest).fetchSize(); + doReturn(mockRequestBuilder).when(mockRequestBuilder).setSize(userFetchSize); + queryAction.checkAndSetScroll(); + Mockito.verify(mockRequestBuilder).setSize(20); + Mockito.verify(mockRequestBuilder).setScroll(timeValue); + } + + @Test + public void testIfScrollShouldBeOpenWithDifferentValidFetchSizeAndLimit() { + TimeValue timeValue = new TimeValue(120000); + mockLocalClusterStateAndInitializeMetrics(timeValue); + + int limit = 2300; + doReturn(limit).when(mockSelect).getRowCount(); + SqlRequest mockSqlRequest = mock(SqlRequest.class); + + /** fetchSize <= LIMIT - open scroll */ + int userFetchSize = 1500; + doReturn(userFetchSize).when(mockSqlRequest).fetchSize(); + doReturn(mockRequestBuilder).when(mockRequestBuilder).setSize(userFetchSize); + queryAction.setSqlRequest(mockSqlRequest); + queryAction.setFormat(Format.JDBC); + + queryAction.checkAndSetScroll(); + Mockito.verify(mockRequestBuilder).setSize(userFetchSize); + Mockito.verify(mockRequestBuilder).setScroll(timeValue); + + /** fetchSize > LIMIT - no scroll */ + userFetchSize = 5000; + doReturn(userFetchSize).when(mockSqlRequest).fetchSize(); + mockRequestBuilder = mock(SearchRequestBuilder.class); + queryAction.initialize(mockRequestBuilder); + queryAction.checkAndSetScroll(); + Mockito.verify(mockRequestBuilder).setSize(limit); + Mockito.verify(mockRequestBuilder, never()).setScroll(timeValue); + } + + private void mockLocalClusterStateAndInitializeMetrics(TimeValue time) { + LocalClusterState mockLocalClusterState = mock(LocalClusterState.class); + LocalClusterState.state(mockLocalClusterState); + doReturn(time).when(mockLocalClusterState).getSettingValue(Settings.Key.SQL_CURSOR_KEEP_ALIVE); + doReturn(3600L) + .when(mockLocalClusterState) + .getSettingValue(Settings.Key.METRICS_ROLLING_WINDOW); + doReturn(2L).when(mockLocalClusterState).getSettingValue(Settings.Key.METRICS_ROLLING_INTERVAL); + + Metrics.getInstance().registerDefaultMetrics(); + } + + private Field createScriptField( + final String name, + final String script, + final boolean addScriptLanguage, + final boolean addScriptParam, + final boolean addRedundantParam) { + + final List params = new ArrayList<>(); + + params.add(new KVValue("alias", name)); + if (addScriptLanguage) { + params.add(new KVValue("painless")); } - - private void mockLocalClusterStateAndInitializeMetrics(TimeValue time) { - LocalClusterState mockLocalClusterState = mock(LocalClusterState.class); - LocalClusterState.state(mockLocalClusterState); - doReturn(time).when(mockLocalClusterState).getSettingValue( - Settings.Key.SQL_CURSOR_KEEP_ALIVE); - doReturn(3600L).when(mockLocalClusterState).getSettingValue( - Settings.Key.METRICS_ROLLING_WINDOW); - doReturn(2L).when(mockLocalClusterState).getSettingValue( - Settings.Key.METRICS_ROLLING_INTERVAL); - - Metrics.getInstance().registerDefaultMetrics(); - + if (addScriptParam) { + params.add(new KVValue(script)); } - - private Field createScriptField(final String name, final String script, final boolean addScriptLanguage, - final boolean addScriptParam, final boolean addRedundantParam) { - - final List params = new ArrayList<>(); - - params.add(new KVValue("alias", name)); - if (addScriptLanguage) { - params.add(new KVValue("painless")); - } - if (addScriptParam) { - params.add(new KVValue(script)); - } - if (addRedundantParam) { - params.add(new KVValue("Fail the test")); - } - - return new MethodField("script", params, null, null); + if (addRedundantParam) { + params.add(new KVValue("Fail the test")); } + + return new MethodField("script", params, null, null); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/RewriteRuleExecutorTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/RewriteRuleExecutorTest.java index 632cd2d7ea..9c13e1fc71 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/RewriteRuleExecutorTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/RewriteRuleExecutorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.rewriter; import static org.mockito.Mockito.never; @@ -23,31 +22,29 @@ @RunWith(MockitoJUnitRunner.class) public class RewriteRuleExecutorTest { - @Mock - private RewriteRule rewriter; - @Mock - private SQLQueryExpr expr; + @Mock private RewriteRule rewriter; + @Mock private SQLQueryExpr expr; - private RewriteRuleExecutor ruleExecutor; + private RewriteRuleExecutor ruleExecutor; - @Before - public void setup() { - ruleExecutor = RewriteRuleExecutor.builder().withRule(rewriter).build(); - } + @Before + public void setup() { + ruleExecutor = RewriteRuleExecutor.builder().withRule(rewriter).build(); + } - @Test - public void optimize() throws SQLFeatureNotSupportedException { - when(rewriter.match(expr)).thenReturn(true); + @Test + public void optimize() throws SQLFeatureNotSupportedException { + when(rewriter.match(expr)).thenReturn(true); - ruleExecutor.executeOn(expr); - verify(rewriter, times(1)).rewrite(expr); - } + ruleExecutor.executeOn(expr); + verify(rewriter, times(1)).rewrite(expr); + } - @Test - public void noOptimize() throws SQLFeatureNotSupportedException { - when(rewriter.match(expr)).thenReturn(false); + @Test + public void noOptimize() throws SQLFeatureNotSupportedException { + when(rewriter.match(expr)).thenReturn(false); - ruleExecutor.executeOn(expr); - verify(rewriter, never()).rewrite(expr); - } + ruleExecutor.executeOn(expr); + verify(rewriter, never()).rewrite(expr); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/identifier/UnquoteIdentifierRuleTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/identifier/UnquoteIdentifierRuleTest.java index 41f7b111b0..30bbac861a 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/identifier/UnquoteIdentifierRuleTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/identifier/UnquoteIdentifierRuleTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.rewriter.identifier; import com.alibaba.druid.sql.SQLUtils; @@ -13,75 +12,70 @@ import org.opensearch.sql.legacy.rewriter.identifier.UnquoteIdentifierRule; import org.opensearch.sql.legacy.util.SqlParserUtils; - -/** - * Test cases for backticks quoted identifiers - */ +/** Test cases for backticks quoted identifiers */ public class UnquoteIdentifierRuleTest { - @Test - public void queryWithQuotedIndex() { - query("SELECT lastname FROM `bank` WHERE balance > 1000 ORDER BY age" - ).shouldBeAfterRewrite("SELECT lastname FROM bank WHERE balance > 1000 ORDER BY age"); - } - - @Test - public void queryWithQuotedField() { - query("SELECT `lastname` FROM bank ORDER BY age" - ).shouldBeAfterRewrite("SELECT lastname FROM bank ORDER BY age"); - - query("SELECT b.`lastname` FROM bank AS b ORDER BY age" - ).shouldBeAfterRewrite("SELECT b.lastname FROM bank AS b ORDER BY age"); - } - - @Test - public void queryWithQuotedAlias() { - query("SELECT `b`.lastname FROM bank AS `b` ORDER BY age" - ).shouldBeAfterRewrite("SELECT b.lastname FROM bank AS b ORDER BY age"); - - query("SELECT `b`.`lastname` FROM bank AS `b` ORDER BY age" - ).shouldBeAfterRewrite("SELECT b.lastname FROM bank AS b ORDER BY age"); - - query("SELECT `b`.`lastname` AS `name` FROM bank AS `b` ORDER BY age" - ).shouldBeAfterRewrite("SELECT b.lastname AS name FROM bank AS b ORDER BY age"); + @Test + public void queryWithQuotedIndex() { + query("SELECT lastname FROM `bank` WHERE balance > 1000 ORDER BY age") + .shouldBeAfterRewrite("SELECT lastname FROM bank WHERE balance > 1000 ORDER BY age"); + } + + @Test + public void queryWithQuotedField() { + query("SELECT `lastname` FROM bank ORDER BY age") + .shouldBeAfterRewrite("SELECT lastname FROM bank ORDER BY age"); + + query("SELECT b.`lastname` FROM bank AS b ORDER BY age") + .shouldBeAfterRewrite("SELECT b.lastname FROM bank AS b ORDER BY age"); + } + + @Test + public void queryWithQuotedAlias() { + query("SELECT `b`.lastname FROM bank AS `b` ORDER BY age") + .shouldBeAfterRewrite("SELECT b.lastname FROM bank AS b ORDER BY age"); + + query("SELECT `b`.`lastname` FROM bank AS `b` ORDER BY age") + .shouldBeAfterRewrite("SELECT b.lastname FROM bank AS b ORDER BY age"); + + query("SELECT `b`.`lastname` AS `name` FROM bank AS `b` ORDER BY age") + .shouldBeAfterRewrite("SELECT b.lastname AS name FROM bank AS b ORDER BY age"); + } + + @Test + public void selectSpecificFieldsUsingQuotedTableNamePrefix() { + query("SELECT `bank`.`lastname` FROM `bank`") + .shouldBeAfterRewrite("SELECT bank.lastname FROM bank"); + } + + @Test + public void queryWithQuotedAggrAndFunc() { + query( + "" + + "SELECT `b`.`lastname` AS `name`, AVG(`b`.`balance`) FROM `bank` AS `b` " + + "WHERE ABS(`b`.`age`) > 20 GROUP BY `b`.`lastname` ORDER BY `b`.`lastname`") + .shouldBeAfterRewrite( + "SELECT b.lastname AS name, AVG(b.balance) FROM bank AS b " + + "WHERE ABS(b.age) > 20 GROUP BY b.lastname ORDER BY b.lastname"); + } + + private QueryAssertion query(String sql) { + return new QueryAssertion(sql); + } + + private static class QueryAssertion { + + private UnquoteIdentifierRule rule = new UnquoteIdentifierRule(); + private SQLQueryExpr expr; + + QueryAssertion(String sql) { + this.expr = SqlParserUtils.parse(sql); } - @Test - public void selectSpecificFieldsUsingQuotedTableNamePrefix() { - query("SELECT `bank`.`lastname` FROM `bank`" - ).shouldBeAfterRewrite("SELECT bank.lastname FROM bank"); - } - - @Test - public void queryWithQuotedAggrAndFunc() { - query("" + - "SELECT `b`.`lastname` AS `name`, AVG(`b`.`balance`) FROM `bank` AS `b` " + - "WHERE ABS(`b`.`age`) > 20 GROUP BY `b`.`lastname` ORDER BY `b`.`lastname`" - ).shouldBeAfterRewrite( - "SELECT b.lastname AS name, AVG(b.balance) FROM bank AS b " + - "WHERE ABS(b.age) > 20 GROUP BY b.lastname ORDER BY b.lastname" - ); - } - - private QueryAssertion query(String sql) { - return new QueryAssertion(sql); - } - - private static class QueryAssertion { - - private UnquoteIdentifierRule rule = new UnquoteIdentifierRule(); - private SQLQueryExpr expr; - - QueryAssertion(String sql) { - this.expr = SqlParserUtils.parse(sql); - } - - void shouldBeAfterRewrite(String expected) { - rule.rewrite(expr); - Assert.assertEquals( - SQLUtils.toMySqlString(SqlParserUtils.parse(expected)), - SQLUtils.toMySqlString(expr) - ); - } + void shouldBeAfterRewrite(String expected) { + rule.rewrite(expr); + Assert.assertEquals( + SQLUtils.toMySqlString(SqlParserUtils.parse(expected)), SQLUtils.toMySqlString(expr)); } + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/inline/AliasInliningTests.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/inline/AliasInliningTests.java index 0c16a3264a..168725ed11 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/inline/AliasInliningTests.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/inline/AliasInliningTests.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.rewriter.inline; import static org.hamcrest.MatcherAssert.assertThat; @@ -29,101 +28,111 @@ public class AliasInliningTests { - private static final String TEST_MAPPING_FILE = "mappings/semantics.json"; - @Before - public void setUp() throws IOException { - URL url = Resources.getResource(TEST_MAPPING_FILE); - String mappings = Resources.toString(url, Charsets.UTF_8); - mockLocalClusterState(mappings); - } - - @Test - public void orderByAliasedFieldTest() throws SqlParseException { - String originalQuery = "SELECT utc_time date " + - "FROM opensearch_dashboards_sample_data_logs " + - "ORDER BY date DESC"; - String originalDsl = parseAsSimpleQuery(originalQuery); - - String rewrittenQuery = - "SELECT utc_time date " + - "FROM opensearch_dashboards_sample_data_logs " + - "ORDER BY utc_time DESC"; - - String rewrittenDsl = parseAsSimpleQuery(rewrittenQuery); - - assertThat(originalDsl, equalTo(rewrittenDsl)); - } - - @Test - public void orderByAliasedScriptedField() throws SqlParseException { - String originalDsl = parseAsSimpleQuery("SELECT date_format(birthday, 'dd-MM-YYYY') date " + - "FROM bank " + - "ORDER BY date"); - String rewrittenQuery = "SELECT date_format(birthday, 'dd-MM-YYYY') date " + - "FROM bank " + - "ORDER BY date_format(birthday, 'dd-MM-YYYY')"; - - String rewrittenDsl = parseAsSimpleQuery(rewrittenQuery); - assertThat(originalDsl, equalTo(rewrittenDsl)); - } - - @Test - public void groupByAliasedFieldTest() throws SqlParseException { - String originalQuery = "SELECT utc_time date " + - "FROM opensearch_dashboards_sample_data_logs " + - "GROUP BY date"; - - String originalDsl = parseAsAggregationQuery(originalQuery); - - String rewrittenQuery = "SELECT utc_time date " + - "FROM opensearch_dashboards_sample_data_logs " + - "GROUP BY utc_time DESC"; - - String rewrittenDsl = parseAsAggregationQuery(rewrittenQuery); - - assertThat(originalDsl, equalTo(rewrittenDsl)); - } - - @Test - public void groupAndSortBySameExprAlias() throws SqlParseException { - String query = "SELECT date_format(timestamp, 'yyyy-MM') opensearch-table.timestamp_tg, COUNT(*) count, COUNT(DistanceKilometers) opensearch-table.DistanceKilometers_count\n" + - "FROM opensearch_dashboards_sample_data_flights\n" + - "GROUP BY date_format(timestamp, 'yyyy-MM')\n" + - "ORDER BY date_format(timestamp, 'yyyy-MM') DESC\n" + - "LIMIT 2500"; - String dsl = parseAsAggregationQuery(query); - - JSONObject parseQuery = new JSONObject(dsl); - - assertThat(parseQuery.query("/aggregations/opensearch-table.timestamp_tg/terms/script"), notNullValue()); - - } - - @Test - public void groupByAndSortAliased() throws SqlParseException { - String dsl = parseAsAggregationQuery( - "SELECT date_format(utc_time, 'dd-MM-YYYY') date " + - "FROM opensearch_dashboards_sample_data_logs " + - "GROUP BY date " + - "ORDER BY date DESC"); - - JSONObject parsedQuery = new JSONObject(dsl); - - JSONObject query = (JSONObject)parsedQuery.query("/aggregations/date/terms/script"); - - assertThat(query, notNullValue()); - } - - private String parseAsSimpleQuery(String originalQuery) throws SqlParseException { - SqlRequest sqlRequest = new SqlRequest(originalQuery, new JSONObject()); - DefaultQueryAction defaultQueryAction = new DefaultQueryAction(mock(Client.class), - new SqlParser().parseSelect(parse(originalQuery))); - defaultQueryAction.setSqlRequest(sqlRequest); - return defaultQueryAction.explain().explain(); - } - - private String parseAsAggregationQuery(String originalQuery) throws SqlParseException { - return new AggregationQueryAction(mock(Client.class), - new SqlParser().parseSelect(parse(originalQuery))).explain().explain(); - } + private static final String TEST_MAPPING_FILE = "mappings/semantics.json"; + + @Before + public void setUp() throws IOException { + URL url = Resources.getResource(TEST_MAPPING_FILE); + String mappings = Resources.toString(url, Charsets.UTF_8); + mockLocalClusterState(mappings); + } + + @Test + public void orderByAliasedFieldTest() throws SqlParseException { + String originalQuery = + "SELECT utc_time date " + + "FROM opensearch_dashboards_sample_data_logs " + + "ORDER BY date DESC"; + String originalDsl = parseAsSimpleQuery(originalQuery); + + String rewrittenQuery = + "SELECT utc_time date " + + "FROM opensearch_dashboards_sample_data_logs " + + "ORDER BY utc_time DESC"; + + String rewrittenDsl = parseAsSimpleQuery(rewrittenQuery); + + assertThat(originalDsl, equalTo(rewrittenDsl)); + } + + @Test + public void orderByAliasedScriptedField() throws SqlParseException { + String originalDsl = + parseAsSimpleQuery( + "SELECT date_format(birthday, 'dd-MM-YYYY') date " + "FROM bank " + "ORDER BY date"); + String rewrittenQuery = + "SELECT date_format(birthday, 'dd-MM-YYYY') date " + + "FROM bank " + + "ORDER BY date_format(birthday, 'dd-MM-YYYY')"; + + String rewrittenDsl = parseAsSimpleQuery(rewrittenQuery); + assertThat(originalDsl, equalTo(rewrittenDsl)); + } + + @Test + public void groupByAliasedFieldTest() throws SqlParseException { + String originalQuery = + "SELECT utc_time date " + "FROM opensearch_dashboards_sample_data_logs " + "GROUP BY date"; + + String originalDsl = parseAsAggregationQuery(originalQuery); + + String rewrittenQuery = + "SELECT utc_time date " + + "FROM opensearch_dashboards_sample_data_logs " + + "GROUP BY utc_time DESC"; + + String rewrittenDsl = parseAsAggregationQuery(rewrittenQuery); + + assertThat(originalDsl, equalTo(rewrittenDsl)); + } + + @Test + public void groupAndSortBySameExprAlias() throws SqlParseException { + String query = + "SELECT date_format(timestamp, 'yyyy-MM') opensearch-table.timestamp_tg, COUNT(*) count," + + " COUNT(DistanceKilometers) opensearch-table.DistanceKilometers_count\n" + + "FROM opensearch_dashboards_sample_data_flights\n" + + "GROUP BY date_format(timestamp, 'yyyy-MM')\n" + + "ORDER BY date_format(timestamp, 'yyyy-MM') DESC\n" + + "LIMIT 2500"; + String dsl = parseAsAggregationQuery(query); + + JSONObject parseQuery = new JSONObject(dsl); + + assertThat( + parseQuery.query("/aggregations/opensearch-table.timestamp_tg/terms/script"), + notNullValue()); + } + + @Test + public void groupByAndSortAliased() throws SqlParseException { + String dsl = + parseAsAggregationQuery( + "SELECT date_format(utc_time, 'dd-MM-YYYY') date " + + "FROM opensearch_dashboards_sample_data_logs " + + "GROUP BY date " + + "ORDER BY date DESC"); + + JSONObject parsedQuery = new JSONObject(dsl); + + JSONObject query = (JSONObject) parsedQuery.query("/aggregations/date/terms/script"); + + assertThat(query, notNullValue()); + } + + private String parseAsSimpleQuery(String originalQuery) throws SqlParseException { + SqlRequest sqlRequest = new SqlRequest(originalQuery, new JSONObject()); + DefaultQueryAction defaultQueryAction = + new DefaultQueryAction( + mock(Client.class), new SqlParser().parseSelect(parse(originalQuery))); + defaultQueryAction.setSqlRequest(sqlRequest); + return defaultQueryAction.explain().explain(); + } + + private String parseAsAggregationQuery(String originalQuery) throws SqlParseException { + return new AggregationQueryAction( + mock(Client.class), new SqlParser().parseSelect(parse(originalQuery))) + .explain() + .explain(); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/ordinal/OrdinalRewriterRuleTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/ordinal/OrdinalRewriterRuleTest.java index 3f4f799d66..d27967e361 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/ordinal/OrdinalRewriterRuleTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/ordinal/OrdinalRewriterRuleTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.rewriter.ordinal; import com.alibaba.druid.sql.SQLUtils; @@ -16,141 +15,139 @@ import org.opensearch.sql.legacy.rewriter.ordinal.OrdinalRewriterRule; import org.opensearch.sql.legacy.util.SqlParserUtils; -/** - * Test cases for ordinal aliases in GROUP BY and ORDER BY - */ - +/** Test cases for ordinal aliases in GROUP BY and ORDER BY */ public class OrdinalRewriterRuleTest { - @Rule - public ExpectedException exception = ExpectedException.none(); - - @Test - public void ordinalInGroupByShouldMatch() { - query("SELECT lastname FROM bank GROUP BY 1").shouldMatchRule(); - } - - @Test - public void ordinalInOrderByShouldMatch() { - query("SELECT lastname FROM bank ORDER BY 1").shouldMatchRule(); - } - - @Test - public void ordinalInGroupAndOrderByShouldMatch() { - query("SELECT lastname, age FROM bank GROUP BY 2, 1 ORDER BY 1").shouldMatchRule(); - } - - @Test - public void noOrdinalInGroupByShouldNotMatch() { - query("SELECT lastname FROM bank GROUP BY lastname").shouldNotMatchRule(); + @Rule public ExpectedException exception = ExpectedException.none(); + + @Test + public void ordinalInGroupByShouldMatch() { + query("SELECT lastname FROM bank GROUP BY 1").shouldMatchRule(); + } + + @Test + public void ordinalInOrderByShouldMatch() { + query("SELECT lastname FROM bank ORDER BY 1").shouldMatchRule(); + } + + @Test + public void ordinalInGroupAndOrderByShouldMatch() { + query("SELECT lastname, age FROM bank GROUP BY 2, 1 ORDER BY 1").shouldMatchRule(); + } + + @Test + public void noOrdinalInGroupByShouldNotMatch() { + query("SELECT lastname FROM bank GROUP BY lastname").shouldNotMatchRule(); + } + + @Test + public void noOrdinalInOrderByShouldNotMatch() { + query("SELECT lastname, age FROM bank ORDER BY age").shouldNotMatchRule(); + } + + @Test + public void noOrdinalInGroupAndOrderByShouldNotMatch() { + query("SELECT lastname, age FROM bank GROUP BY lastname, age ORDER BY age") + .shouldNotMatchRule(); + } + + @Test + public void simpleGroupByOrdinal() { + query("SELECT lastname FROM bank GROUP BY 1") + .shouldBeAfterRewrite("SELECT lastname FROM bank GROUP BY lastname"); + } + + @Test + public void multipleGroupByOrdinal() { + query("SELECT lastname, age FROM bank GROUP BY 1, 2 ") + .shouldBeAfterRewrite("SELECT lastname, age FROM bank GROUP BY lastname, age"); + + query("SELECT lastname, age FROM bank GROUP BY 2, 1") + .shouldBeAfterRewrite("SELECT lastname, age FROM bank GROUP BY age, lastname"); + + query("SELECT lastname, age, firstname FROM bank GROUP BY 2, firstname, 1") + .shouldBeAfterRewrite( + "SELECT lastname, age, firstname FROM bank GROUP BY age, firstname, lastname"); + + query("SELECT lastname, age, firstname FROM bank GROUP BY 2, something, 1") + .shouldBeAfterRewrite( + "SELECT lastname, age, firstname FROM bank GROUP BY age, something, lastname"); + } + + @Test + public void simpleOrderByOrdinal() { + query("SELECT lastname FROM bank ORDER BY 1") + .shouldBeAfterRewrite("SELECT lastname FROM bank ORDER BY lastname"); + } + + @Test + public void multipleOrderByOrdinal() { + query("SELECT lastname, age FROM bank ORDER BY 1, 2 ") + .shouldBeAfterRewrite("SELECT lastname, age FROM bank ORDER BY lastname, age"); + + query("SELECT lastname, age FROM bank ORDER BY 2, 1") + .shouldBeAfterRewrite("SELECT lastname, age FROM bank ORDER BY age, lastname"); + + query("SELECT lastname, age, firstname FROM bank ORDER BY 2, firstname, 1") + .shouldBeAfterRewrite( + "SELECT lastname, age, firstname FROM bank ORDER BY age, firstname, lastname"); + + query("SELECT lastname, age, firstname FROM bank ORDER BY 2, department, 1") + .shouldBeAfterRewrite( + "SELECT lastname, age, firstname FROM bank ORDER BY age, department, lastname"); + } + + // Tests invalid Ordinals, non-positive ordinal values are already validated by semantic analyzer + @Test + public void invalidGroupByOrdinalShouldThrowException() { + exception.expect(VerificationException.class); + exception.expectMessage("Invalid ordinal [3] specified in [GROUP BY 3]"); + query("SELECT lastname, MAX(lastname) FROM bank GROUP BY 3 ").rewrite(); + } + + @Test + public void invalidOrderByOrdinalShouldThrowException() { + exception.expect(VerificationException.class); + exception.expectMessage("Invalid ordinal [4] specified in [ORDER BY 4]"); + query("SELECT `lastname`, `age`, `firstname` FROM bank ORDER BY 4 IS NOT NULL").rewrite(); + } + + private QueryAssertion query(String sql) { + return new QueryAssertion(sql); + } + + private static class QueryAssertion { + + private OrdinalRewriterRule rule; + private SQLQueryExpr expr; + + QueryAssertion(String sql) { + this.expr = SqlParserUtils.parse(sql); + this.rule = new OrdinalRewriterRule(sql); } - @Test - public void noOrdinalInOrderByShouldNotMatch() { - query("SELECT lastname, age FROM bank ORDER BY age").shouldNotMatchRule(); + void shouldBeAfterRewrite(String expected) { + shouldMatchRule(); + rule.rewrite(expr); + Assert.assertEquals( + SQLUtils.toMySqlString(SqlParserUtils.parse(expected)), SQLUtils.toMySqlString(expr)); } - @Test - public void noOrdinalInGroupAndOrderByShouldNotMatch() { - query("SELECT lastname, age FROM bank GROUP BY lastname, age ORDER BY age").shouldNotMatchRule(); + void shouldMatchRule() { + Assert.assertTrue(match()); } - @Test - public void simpleGroupByOrdinal() { - query("SELECT lastname FROM bank GROUP BY 1" - ).shouldBeAfterRewrite("SELECT lastname FROM bank GROUP BY lastname"); + void shouldNotMatchRule() { + Assert.assertFalse(match()); } - @Test - public void multipleGroupByOrdinal() { - query("SELECT lastname, age FROM bank GROUP BY 1, 2 " - ).shouldBeAfterRewrite("SELECT lastname, age FROM bank GROUP BY lastname, age"); - - query("SELECT lastname, age FROM bank GROUP BY 2, 1" - ).shouldBeAfterRewrite("SELECT lastname, age FROM bank GROUP BY age, lastname"); - - query("SELECT lastname, age, firstname FROM bank GROUP BY 2, firstname, 1" - ).shouldBeAfterRewrite("SELECT lastname, age, firstname FROM bank GROUP BY age, firstname, lastname"); - - query("SELECT lastname, age, firstname FROM bank GROUP BY 2, something, 1" - ).shouldBeAfterRewrite("SELECT lastname, age, firstname FROM bank GROUP BY age, something, lastname"); - + void rewrite() { + shouldMatchRule(); + rule.rewrite(expr); } - @Test - public void simpleOrderByOrdinal() { - query("SELECT lastname FROM bank ORDER BY 1" - ).shouldBeAfterRewrite("SELECT lastname FROM bank ORDER BY lastname"); - } - - @Test - public void multipleOrderByOrdinal() { - query("SELECT lastname, age FROM bank ORDER BY 1, 2 " - ).shouldBeAfterRewrite("SELECT lastname, age FROM bank ORDER BY lastname, age"); - - query("SELECT lastname, age FROM bank ORDER BY 2, 1" - ).shouldBeAfterRewrite("SELECT lastname, age FROM bank ORDER BY age, lastname"); - - query("SELECT lastname, age, firstname FROM bank ORDER BY 2, firstname, 1" - ).shouldBeAfterRewrite("SELECT lastname, age, firstname FROM bank ORDER BY age, firstname, lastname"); - - query("SELECT lastname, age, firstname FROM bank ORDER BY 2, department, 1" - ).shouldBeAfterRewrite("SELECT lastname, age, firstname FROM bank ORDER BY age, department, lastname"); - } - - // Tests invalid Ordinals, non-positive ordinal values are already validated by semantic analyzer - @Test - public void invalidGroupByOrdinalShouldThrowException() { - exception.expect(VerificationException.class); - exception.expectMessage("Invalid ordinal [3] specified in [GROUP BY 3]"); - query("SELECT lastname, MAX(lastname) FROM bank GROUP BY 3 ").rewrite(); - } - - @Test - public void invalidOrderByOrdinalShouldThrowException() { - exception.expect(VerificationException.class); - exception.expectMessage("Invalid ordinal [4] specified in [ORDER BY 4]"); - query("SELECT `lastname`, `age`, `firstname` FROM bank ORDER BY 4 IS NOT NULL").rewrite(); - } - - - private QueryAssertion query(String sql) { - return new QueryAssertion(sql); - } - private static class QueryAssertion { - - private OrdinalRewriterRule rule; - private SQLQueryExpr expr; - - QueryAssertion(String sql) { - this.expr = SqlParserUtils.parse(sql); - this.rule = new OrdinalRewriterRule(sql); - } - - void shouldBeAfterRewrite(String expected) { - shouldMatchRule(); - rule.rewrite(expr); - Assert.assertEquals( - SQLUtils.toMySqlString(SqlParserUtils.parse(expected)), - SQLUtils.toMySqlString(expr) - ); - } - - void shouldMatchRule() { - Assert.assertTrue(match()); - } - - void shouldNotMatchRule() { - Assert.assertFalse(match()); - } - - void rewrite() { - shouldMatchRule(); - rule.rewrite(expr); - } - - private boolean match() { - return rule.match(expr); - } + private boolean match() { + return rule.match(expr); } + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/parent/SQLExprParentSetterRuleTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/parent/SQLExprParentSetterRuleTest.java index 15d97d362d..0fdf16e40e 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/parent/SQLExprParentSetterRuleTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/parent/SQLExprParentSetterRuleTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.rewriter.parent; import static org.junit.Assert.assertTrue; @@ -18,13 +17,12 @@ @RunWith(MockitoJUnitRunner.class) public class SQLExprParentSetterRuleTest { - @Mock - private SQLQueryExpr queryExpr; + @Mock private SQLQueryExpr queryExpr; - private SQLExprParentSetterRule rule = new SQLExprParentSetterRule(); + private SQLExprParentSetterRule rule = new SQLExprParentSetterRule(); - @Test - public void match() { - assertTrue(rule.match(queryExpr)); - } + @Test + public void match() { + assertTrue(rule.match(queryExpr)); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/parent/SQLExprParentSetterTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/parent/SQLExprParentSetterTest.java index 49023f522a..40bdc262b4 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/parent/SQLExprParentSetterTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/parent/SQLExprParentSetterTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.rewriter.parent; import static org.junit.Assert.assertNotNull; @@ -17,36 +16,31 @@ public class SQLExprParentSetterTest { - @Test - public void testSQLInSubQueryExprHasParent() { - String sql = - "SELECT * FROM TbA " + - "WHERE a IN (SELECT b FROM TbB)"; - SQLQueryExpr expr = SqlParserUtils.parse(sql); - expr.accept(new SQLExprParentExistsValidator()); - } - - @Test - public void testSQLInListExprHasParent() { - String sql = - "SELECT * FROM TbA " + - "WHERE a IN (10, 20)"; - SQLQueryExpr expr = SqlParserUtils.parse(sql); - expr.accept(new SQLExprParentExistsValidator()); + @Test + public void testSQLInSubQueryExprHasParent() { + String sql = "SELECT * FROM TbA " + "WHERE a IN (SELECT b FROM TbB)"; + SQLQueryExpr expr = SqlParserUtils.parse(sql); + expr.accept(new SQLExprParentExistsValidator()); + } + + @Test + public void testSQLInListExprHasParent() { + String sql = "SELECT * FROM TbA " + "WHERE a IN (10, 20)"; + SQLQueryExpr expr = SqlParserUtils.parse(sql); + expr.accept(new SQLExprParentExistsValidator()); + } + + static class SQLExprParentExistsValidator extends MySqlASTVisitorAdapter { + @Override + public boolean visit(SQLInSubQueryExpr expr) { + assertNotNull(expr.getExpr().getParent()); + return true; } - static class SQLExprParentExistsValidator extends MySqlASTVisitorAdapter { - @Override - public boolean visit(SQLInSubQueryExpr expr) { - assertNotNull(expr.getExpr().getParent()); - return true; - } - - @Override - public boolean visit(SQLInListExpr expr) { - assertNotNull(expr.getExpr().getParent()); - return true; - } + @Override + public boolean visit(SQLInListExpr expr) { + assertNotNull(expr.getExpr().getParent()); + return true; } - + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/ExistsSubQueryRewriterTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/ExistsSubQueryRewriterTest.java index 9b88336a85..5f42e4a9c4 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/ExistsSubQueryRewriterTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/ExistsSubQueryRewriterTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.rewriter.subquery; import static org.junit.Assert.assertEquals; @@ -14,101 +13,104 @@ public class ExistsSubQueryRewriterTest extends SubQueryRewriterTestBase { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); - - @Test - public void nonCorrelatedExists() { - assertEquals( - sqlString(expr( - "SELECT e.name " + - "FROM employee e, e.projects p " + - "WHERE p IS NOT MISSING")), - sqlString(rewrite(expr( - "SELECT e.name " + - "FROM employee as e " + - "WHERE EXISTS (SELECT * FROM e.projects as p)"))) - ); - } + @Rule public ExpectedException exceptionRule = ExpectedException.none(); - @Test - public void nonCorrelatedExistsWhere() { - assertEquals( - sqlString(expr( - "SELECT e.name " + - "FROM employee e, e.projects p " + - "WHERE p IS NOT MISSING AND p.name LIKE 'security'")), - sqlString(rewrite(expr( - "SELECT e.name " + - "FROM employee as e " + - "WHERE EXISTS (SELECT * FROM e.projects as p WHERE p.name LIKE 'security')"))) - ); - } + @Test + public void nonCorrelatedExists() { + assertEquals( + sqlString( + expr("SELECT e.name " + "FROM employee e, e.projects p " + "WHERE p IS NOT MISSING")), + sqlString( + rewrite( + expr( + "SELECT e.name " + + "FROM employee as e " + + "WHERE EXISTS (SELECT * FROM e.projects as p)")))); + } - @Test - public void nonCorrelatedExistsParentWhere() { - assertEquals( - sqlString(expr( - "SELECT e.name " + - "FROM employee e, e.projects p " + - "WHERE p IS NOT MISSING AND e.name LIKE 'security'")), - sqlString(rewrite(expr( - "SELECT e.name " + - "FROM employee as e " + - "WHERE EXISTS (SELECT * FROM e.projects as p) AND e.name LIKE 'security'"))) - ); - } + @Test + public void nonCorrelatedExistsWhere() { + assertEquals( + sqlString( + expr( + "SELECT e.name " + + "FROM employee e, e.projects p " + + "WHERE p IS NOT MISSING AND p.name LIKE 'security'")), + sqlString( + rewrite( + expr( + "SELECT e.name FROM employee as e WHERE EXISTS (SELECT * FROM e.projects as p" + + " WHERE p.name LIKE 'security')")))); + } - @Test - public void nonCorrelatedNotExists() { - assertEquals( - sqlString(expr( - "SELECT e.name " + - "FROM employee e, e.projects p " + - "WHERE NOT (p IS NOT MISSING)")), - sqlString(rewrite(expr( - "SELECT e.name " + - "FROM employee as e " + - "WHERE NOT EXISTS (SELECT * FROM e.projects as p)"))) - ); - } + @Test + public void nonCorrelatedExistsParentWhere() { + assertEquals( + sqlString( + expr( + "SELECT e.name " + + "FROM employee e, e.projects p " + + "WHERE p IS NOT MISSING AND e.name LIKE 'security'")), + sqlString( + rewrite( + expr( + "SELECT e.name FROM employee as e WHERE EXISTS (SELECT * FROM e.projects as p)" + + " AND e.name LIKE 'security'")))); + } - @Test - public void nonCorrelatedNotExistsWhere() { - assertEquals( - sqlString(expr( - "SELECT e.name " + - "FROM employee e, e.projects p " + - "WHERE NOT (p IS NOT MISSING AND p.name LIKE 'security')")), - sqlString(rewrite(expr( - "SELECT e.name " + - "FROM employee as e " + - "WHERE NOT EXISTS (SELECT * FROM e.projects as p WHERE p.name LIKE 'security')"))) - ); - } + @Test + public void nonCorrelatedNotExists() { + assertEquals( + sqlString( + expr( + "SELECT e.name " + + "FROM employee e, e.projects p " + + "WHERE NOT (p IS NOT MISSING)")), + sqlString( + rewrite( + expr( + "SELECT e.name " + + "FROM employee as e " + + "WHERE NOT EXISTS (SELECT * FROM e.projects as p)")))); + } - @Test - public void nonCorrelatedNotExistsParentWhere() { - assertEquals( - sqlString(expr( - "SELECT e.name " + - "FROM employee e, e.projects p " + - "WHERE NOT (p IS NOT MISSING) AND e.name LIKE 'security'")), - sqlString(rewrite(expr( - "SELECT e.name " + - "FROM employee as e " + - "WHERE NOT EXISTS (SELECT * FROM e.projects as p) AND e.name LIKE 'security'"))) - ); - } + @Test + public void nonCorrelatedNotExistsWhere() { + assertEquals( + sqlString( + expr( + "SELECT e.name " + + "FROM employee e, e.projects p " + + "WHERE NOT (p IS NOT MISSING AND p.name LIKE 'security')")), + sqlString( + rewrite( + expr( + "SELECT e.name FROM employee as e WHERE NOT EXISTS (SELECT * FROM e.projects as" + + " p WHERE p.name LIKE 'security')")))); + } - @Test - public void nonCorrelatedExistsAnd() { - exceptionRule.expect(IllegalStateException.class); - exceptionRule.expectMessage("Unsupported subquery"); - rewrite(expr( - "SELECT e.name " + - "FROM employee as e " + - "WHERE EXISTS (SELECT * FROM e.projects as p) AND EXISTS (SELECT * FROM e.comments as c)")); - } + @Test + public void nonCorrelatedNotExistsParentWhere() { + assertEquals( + sqlString( + expr( + "SELECT e.name " + + "FROM employee e, e.projects p " + + "WHERE NOT (p IS NOT MISSING) AND e.name LIKE 'security'")), + sqlString( + rewrite( + expr( + "SELECT e.name FROM employee as e WHERE NOT EXISTS (SELECT * FROM e.projects as" + + " p) AND e.name LIKE 'security'")))); + } + @Test + public void nonCorrelatedExistsAnd() { + exceptionRule.expect(IllegalStateException.class); + exceptionRule.expectMessage("Unsupported subquery"); + rewrite( + expr( + "SELECT e.name FROM employee as e WHERE EXISTS (SELECT * FROM e.projects as p) AND" + + " EXISTS (SELECT * FROM e.comments as c)")); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/InSubqueryRewriterTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/InSubqueryRewriterTest.java index e6bd42a273..596252c4cd 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/InSubqueryRewriterTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/InSubqueryRewriterTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.rewriter.subquery; import static org.junit.Assert.assertEquals; @@ -14,73 +13,62 @@ public class InSubqueryRewriterTest extends SubQueryRewriterTestBase { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); - - @Test - public void nonCorrleatedIn() throws Exception { - assertEquals( - sqlString(expr( - "SELECT TbA_0.* " + - "FROM TbA as TbA_0 " + - "JOIN TbB as TbB_1 " + - "ON TbA_0.a = TbB_1.b " + - "WHERE TbB_1.b IS NOT NULL")), - sqlString(rewrite(expr( - "SELECT * FROM TbA " + - "WHERE a in (SELECT b FROM TbB)"))) - ); - } + @Rule public ExpectedException exceptionRule = ExpectedException.none(); - @Test - public void nonCorrleatedInWithWhere() throws Exception { - assertEquals( - sqlString(expr( - "SELECT TbA_0.* " + - "FROM TbA as TbA_0 " + - "JOIN TbB as TbB_1 " + - "ON TbA_0.a = TbB_1.b " + - "WHERE TbB_1.b IS NOT NULL AND TbB_1.b > 0")), - sqlString(rewrite(expr( - "SELECT * " + - "FROM TbA " + - "WHERE a in (SELECT b FROM TbB WHERE b > 0)"))) - ); - } + @Test + public void nonCorrleatedIn() throws Exception { + assertEquals( + sqlString( + expr( + "SELECT TbA_0.* " + + "FROM TbA as TbA_0 " + + "JOIN TbB as TbB_1 " + + "ON TbA_0.a = TbB_1.b " + + "WHERE TbB_1.b IS NOT NULL")), + sqlString(rewrite(expr("SELECT * FROM TbA " + "WHERE a in (SELECT b FROM TbB)")))); + } - @Test - public void nonCorrleatedInWithOuterWhere() throws Exception { - assertEquals( - sqlString(expr( - "SELECT TbA_0.* " + - "FROM TbA as TbA_0 " + - "JOIN TbB as TbB_1 " + - "ON TbA_0.a = TbB_1.b " + - "WHERE TbB_1.b IS NOT NULL AND TbA_0.a > 10")), - sqlString(rewrite(expr( - "SELECT * " + - "FROM TbA " + - "WHERE a in (SELECT b FROM TbB) AND a > 10"))) - ); - } + @Test + public void nonCorrleatedInWithWhere() throws Exception { + assertEquals( + sqlString( + expr( + "SELECT TbA_0.* " + + "FROM TbA as TbA_0 " + + "JOIN TbB as TbB_1 " + + "ON TbA_0.a = TbB_1.b " + + "WHERE TbB_1.b IS NOT NULL AND TbB_1.b > 0")), + sqlString( + rewrite( + expr("SELECT * " + "FROM TbA " + "WHERE a in (SELECT b FROM TbB WHERE b > 0)")))); + } + @Test + public void nonCorrleatedInWithOuterWhere() throws Exception { + assertEquals( + sqlString( + expr( + "SELECT TbA_0.* " + + "FROM TbA as TbA_0 " + + "JOIN TbB as TbB_1 " + + "ON TbA_0.a = TbB_1.b " + + "WHERE TbB_1.b IS NOT NULL AND TbA_0.a > 10")), + sqlString( + rewrite( + expr("SELECT * " + "FROM TbA " + "WHERE a in (SELECT b FROM TbB) AND a > 10")))); + } - @Test - public void notInUnsupported() throws Exception { - exceptionRule.expect(IllegalStateException.class); - exceptionRule.expectMessage("Unsupported subquery"); - rewrite(expr( - "SELECT * FROM TbA " + - "WHERE a not in (SELECT b FROM TbB)")); - } + @Test + public void notInUnsupported() throws Exception { + exceptionRule.expect(IllegalStateException.class); + exceptionRule.expectMessage("Unsupported subquery"); + rewrite(expr("SELECT * FROM TbA " + "WHERE a not in (SELECT b FROM TbB)")); + } - @Test - public void testMultipleSelectException() throws Exception { - exceptionRule.expect(IllegalStateException.class); - exceptionRule.expectMessage("Unsupported subquery with multiple select [TbB_1.b1, TbB_1.b2]"); - rewrite(expr( - "SELECT * " + - "FROM TbA " + - "WHERE a in (SELECT b1, b2 FROM TbB) AND a > 10")); - } + @Test + public void testMultipleSelectException() throws Exception { + exceptionRule.expect(IllegalStateException.class); + exceptionRule.expectMessage("Unsupported subquery with multiple select [TbB_1.b1, TbB_1.b2]"); + rewrite(expr("SELECT * " + "FROM TbA " + "WHERE a in (SELECT b1, b2 FROM TbB) AND a > 10")); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/NestedQueryContextTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/NestedQueryContextTest.java index a94b3e6112..3e20e8edf6 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/NestedQueryContextTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/NestedQueryContextTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.rewriter.subquery; import static org.junit.Assert.assertFalse; @@ -16,42 +15,48 @@ import org.junit.Test; import org.opensearch.sql.legacy.rewriter.subquery.NestedQueryContext; - public class NestedQueryContextTest { - @Test - public void isNested() { - NestedQueryContext nestedQueryDetector = new NestedQueryContext(); - nestedQueryDetector.add(new SQLExprTableSource(new SQLIdentifierExpr("employee"), "e")); - - assertFalse(nestedQueryDetector.isNested(new SQLExprTableSource(new SQLIdentifierExpr("e"), "e1"))); - assertTrue(nestedQueryDetector.isNested(new SQLExprTableSource(new SQLIdentifierExpr("e.projects"), "p"))); - - nestedQueryDetector.add(new SQLExprTableSource(new SQLIdentifierExpr("e.projects"), "p")); - assertTrue(nestedQueryDetector.isNested(new SQLExprTableSource(new SQLIdentifierExpr("p")))); - } - - @Test - public void isNestedJoin() { - NestedQueryContext nestedQueryDetector = new NestedQueryContext(); - SQLJoinTableSource joinTableSource = new SQLJoinTableSource(); - joinTableSource.setLeft(new SQLExprTableSource(new SQLIdentifierExpr("employee"), "e")); - joinTableSource.setRight(new SQLExprTableSource(new SQLIdentifierExpr("e.projects"), "p")); - joinTableSource.setJoinType(JoinType.COMMA); - nestedQueryDetector.add(joinTableSource); - - assertFalse(nestedQueryDetector.isNested(new SQLExprTableSource(new SQLIdentifierExpr("e"), "e1"))); - assertTrue(nestedQueryDetector.isNested(new SQLExprTableSource(new SQLIdentifierExpr("e.projects"), "p"))); - assertTrue(nestedQueryDetector.isNested(new SQLExprTableSource(new SQLIdentifierExpr("p")))); - } - - @Test - public void notNested() { - NestedQueryContext nestedQueryDetector = new NestedQueryContext(); - nestedQueryDetector.add(new SQLExprTableSource(new SQLIdentifierExpr("employee"), "e")); - nestedQueryDetector.add(new SQLExprTableSource(new SQLIdentifierExpr("projects"), "p")); - - assertFalse(nestedQueryDetector.isNested(new SQLExprTableSource(new SQLIdentifierExpr("e"), "e1"))); - assertFalse(nestedQueryDetector.isNested(new SQLExprTableSource(new SQLIdentifierExpr("p")))); - } + @Test + public void isNested() { + NestedQueryContext nestedQueryDetector = new NestedQueryContext(); + nestedQueryDetector.add(new SQLExprTableSource(new SQLIdentifierExpr("employee"), "e")); + + assertFalse( + nestedQueryDetector.isNested(new SQLExprTableSource(new SQLIdentifierExpr("e"), "e1"))); + assertTrue( + nestedQueryDetector.isNested( + new SQLExprTableSource(new SQLIdentifierExpr("e.projects"), "p"))); + + nestedQueryDetector.add(new SQLExprTableSource(new SQLIdentifierExpr("e.projects"), "p")); + assertTrue(nestedQueryDetector.isNested(new SQLExprTableSource(new SQLIdentifierExpr("p")))); + } + + @Test + public void isNestedJoin() { + NestedQueryContext nestedQueryDetector = new NestedQueryContext(); + SQLJoinTableSource joinTableSource = new SQLJoinTableSource(); + joinTableSource.setLeft(new SQLExprTableSource(new SQLIdentifierExpr("employee"), "e")); + joinTableSource.setRight(new SQLExprTableSource(new SQLIdentifierExpr("e.projects"), "p")); + joinTableSource.setJoinType(JoinType.COMMA); + nestedQueryDetector.add(joinTableSource); + + assertFalse( + nestedQueryDetector.isNested(new SQLExprTableSource(new SQLIdentifierExpr("e"), "e1"))); + assertTrue( + nestedQueryDetector.isNested( + new SQLExprTableSource(new SQLIdentifierExpr("e.projects"), "p"))); + assertTrue(nestedQueryDetector.isNested(new SQLExprTableSource(new SQLIdentifierExpr("p")))); + } + + @Test + public void notNested() { + NestedQueryContext nestedQueryDetector = new NestedQueryContext(); + nestedQueryDetector.add(new SQLExprTableSource(new SQLIdentifierExpr("employee"), "e")); + nestedQueryDetector.add(new SQLExprTableSource(new SQLIdentifierExpr("projects"), "p")); + + assertFalse( + nestedQueryDetector.isNested(new SQLExprTableSource(new SQLIdentifierExpr("e"), "e1"))); + assertFalse(nestedQueryDetector.isNested(new SQLExprTableSource(new SQLIdentifierExpr("p")))); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/SubQueryRewriteRuleTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/SubQueryRewriteRuleTest.java index a01988d965..20f4170a76 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/SubQueryRewriteRuleTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/SubQueryRewriteRuleTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.rewriter.subquery; import static org.junit.Assert.assertFalse; @@ -18,67 +17,57 @@ public class SubQueryRewriteRuleTest { - final SubQueryRewriteRule rewriteRule = new SubQueryRewriteRule(); + final SubQueryRewriteRule rewriteRule = new SubQueryRewriteRule(); - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); + @Rule public ExpectedException exceptionRule = ExpectedException.none(); - @Test - public void isInMatch() throws SQLFeatureNotSupportedException { - String sql = "SELECT * " + - "FROM A " + - "WHERE a IN (SELECT b FROM B)"; - assertTrue(rewriteRule.match(SqlParserUtils.parse(sql))); - } + @Test + public void isInMatch() throws SQLFeatureNotSupportedException { + String sql = "SELECT * " + "FROM A " + "WHERE a IN (SELECT b FROM B)"; + assertTrue(rewriteRule.match(SqlParserUtils.parse(sql))); + } - @Test - public void isNotInMatch() throws SQLFeatureNotSupportedException { - String sql = "SELECT * " + - "FROM A " + - "WHERE a NOT IN (SELECT b FROM B)"; - assertTrue(rewriteRule.match(SqlParserUtils.parse(sql))); - } + @Test + public void isNotInMatch() throws SQLFeatureNotSupportedException { + String sql = "SELECT * " + "FROM A " + "WHERE a NOT IN (SELECT b FROM B)"; + assertTrue(rewriteRule.match(SqlParserUtils.parse(sql))); + } - @Test - public void isExistsMatch() throws SQLFeatureNotSupportedException { - String sql = "SELECT * " + - "FROM A WHERE " + - "EXISTS (SELECT 1 FROM B WHERE A.a_v = B.b_v)"; - assertTrue(rewriteRule.match(SqlParserUtils.parse(sql))); - } + @Test + public void isExistsMatch() throws SQLFeatureNotSupportedException { + String sql = "SELECT * " + "FROM A WHERE " + "EXISTS (SELECT 1 FROM B WHERE A.a_v = B.b_v)"; + assertTrue(rewriteRule.match(SqlParserUtils.parse(sql))); + } - @Test - public void isNotExistsMatch() throws SQLFeatureNotSupportedException { - String sql = "SELECT * " + - "FROM A " + - "WHERE NOT EXISTS (SELECT 1 FROM B WHERE A.a_v = B.b_v)"; - assertTrue(rewriteRule.match(SqlParserUtils.parse(sql))); - } + @Test + public void isNotExistsMatch() throws SQLFeatureNotSupportedException { + String sql = "SELECT * " + "FROM A " + "WHERE NOT EXISTS (SELECT 1 FROM B WHERE A.a_v = B.b_v)"; + assertTrue(rewriteRule.match(SqlParserUtils.parse(sql))); + } - @Test - public void subQueryInSelectNotMatch() throws SQLFeatureNotSupportedException { - String sql = "SELECT A.v as v, (SELECT MAX(b) FROM B WHERE A.id = B.id) as max_age " + - "FROM A"; - assertFalse(rewriteRule.match(SqlParserUtils.parse(sql))); - } + @Test + public void subQueryInSelectNotMatch() throws SQLFeatureNotSupportedException { + String sql = "SELECT A.v as v, (SELECT MAX(b) FROM B WHERE A.id = B.id) as max_age " + "FROM A"; + assertFalse(rewriteRule.match(SqlParserUtils.parse(sql))); + } - @Test - public void moreThanOneInIsNotSupporeted() throws SQLFeatureNotSupportedException { - String sql = "SELECT * " + - "FROM A " + - "WHERE a IN (SELECT b FROM B) and d IN (SELECT e FROM F)"; - exceptionRule.expect(SQLFeatureNotSupportedException.class); - exceptionRule.expectMessage("Unsupported subquery. Only one EXISTS or IN is supported"); - rewriteRule.match(SqlParserUtils.parse(sql)); - } + @Test + public void moreThanOneInIsNotSupporeted() throws SQLFeatureNotSupportedException { + String sql = + "SELECT * " + "FROM A " + "WHERE a IN (SELECT b FROM B) and d IN (SELECT e FROM F)"; + exceptionRule.expect(SQLFeatureNotSupportedException.class); + exceptionRule.expectMessage("Unsupported subquery. Only one EXISTS or IN is supported"); + rewriteRule.match(SqlParserUtils.parse(sql)); + } - @Test - public void moreThanOneExistsIsNotSupporeted() throws SQLFeatureNotSupportedException { - String sql = "SELECT * " + - "FROM A WHERE " + - "EXISTS (SELECT 1 FROM B WHERE A.a_v = B.b_v) AND EXISTS (SELECT 1 FROM C)"; - exceptionRule.expect(SQLFeatureNotSupportedException.class); - exceptionRule.expectMessage("Unsupported subquery. Only one EXISTS or IN is supported"); - rewriteRule.match(SqlParserUtils.parse(sql)); - } + @Test + public void moreThanOneExistsIsNotSupporeted() throws SQLFeatureNotSupportedException { + String sql = + "SELECT * " + + "FROM A WHERE " + + "EXISTS (SELECT 1 FROM B WHERE A.a_v = B.b_v) AND EXISTS (SELECT 1 FROM C)"; + exceptionRule.expect(SQLFeatureNotSupportedException.class); + exceptionRule.expectMessage("Unsupported subquery. Only one EXISTS or IN is supported"); + rewriteRule.match(SqlParserUtils.parse(sql)); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/SubQueryRewriterTestBase.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/SubQueryRewriterTestBase.java index 036d0fc86a..ef7098004f 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/SubQueryRewriterTestBase.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/SubQueryRewriterTestBase.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.rewriter.subquery; import com.alibaba.druid.sql.SQLUtils; @@ -14,19 +13,19 @@ public abstract class SubQueryRewriterTestBase { - SQLQueryExpr expr(String query) { - return SqlParserUtils.parse(query); - } + SQLQueryExpr expr(String query) { + return SqlParserUtils.parse(query); + } - SQLQueryExpr rewrite(SQLQueryExpr expr) { - new SubQueryRewriteRule().rewrite(expr); - return expr; - } + SQLQueryExpr rewrite(SQLQueryExpr expr) { + new SubQueryRewriteRule().rewrite(expr); + return expr; + } - String sqlString(SQLObject expr) { - return SQLUtils.toMySqlString(expr) - .replaceAll("\n", " ") - .replaceAll("\t", " ") - .replaceAll(" +", " "); - } + String sqlString(SQLObject expr) { + return SQLUtils.toMySqlString(expr) + .replaceAll("\n", " ") + .replaceAll("\t", " ") + .replaceAll(" +", " "); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/rewriter/SubqueryAliasRewriterTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/rewriter/SubqueryAliasRewriterTest.java index b729b7ad59..5c5bc40bda 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/rewriter/SubqueryAliasRewriterTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/rewriter/SubqueryAliasRewriterTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.rewriter.subquery.rewriter; import static org.junit.Assert.assertEquals; @@ -17,121 +16,137 @@ public class SubqueryAliasRewriterTest { - @Test - public void testWithoutAlias() { - assertEquals( - sqlString(parse( - "SELECT TbA_0.* " + - "FROM TbA as TbA_0 " + - "WHERE TbA_0.a IN (SELECT TbB_1.b FROM TbB as TbB_1) and TbA_0.c > 10")), - sqlString(rewrite(parse( - "SELECT * " + - "FROM TbA " + - "WHERE a IN (SELECT b FROM TbB) and c > 10")))); - } - - @Test - public void testWithAlias() { - assertEquals( - sqlString(parse( - "SELECT A.* " + - "FROM TbA as A " + - "WHERE A.a IN (SELECT B.b FROM TbB as B) " + - "AND A.c > 10")), - sqlString(rewrite(parse( - "SELECT A.* " + - "FROM TbA as A " + - "WHERE A.a IN (SELECT B.b FROM TbB as B) " + - "AND A.c > 10")))); - } - - @Test - public void testOuterWithoutAliasInnerWithAlias() { - assertEquals( - sqlString(parse( - "SELECT TbA_0.* " + - "FROM TbA as TbA_0 " + - "WHERE TbA_0.a IN (SELECT TbB.b FROM TbB as TbB) " + - "AND TbA_0.c > 10")), - sqlString(rewrite(parse( - "SELECT * " + - "FROM TbA " + - "WHERE a IN (SELECT TbB.b FROM TbB as TbB) " + - "AND c > 10")))); - } - - @Test - public void testOuterWithoutAliasInnerMixAlias() { - String expect = - "SELECT TbA_0.* " + - "FROM TbA as TbA_0 " + - "WHERE TbA_0.a IN (SELECT B.b FROM TbB as B) " + - "AND TbA_0.c > 10"; - - assertEquals( - sqlString(parse(expect)), - sqlString(rewrite(parse( - "SELECT * " + - "FROM TbA " + - "WHERE a IN (SELECT b FROM TbB as B) " + - "AND c > 10")))); - - assertEquals( - sqlString(parse(expect)), - sqlString(rewrite(parse( - "SELECT * " + - "FROM TbA " + - "WHERE a IN (SELECT TbB.b FROM TbB as B) " + - "AND c > 10")))); - } - - @Test - public void testOuterWithAliasInnerWithoutAlias() { - assertEquals( - sqlString(parse( - "SELECT TbA.* " + - "FROM TbA as TbA " + - "WHERE TbA.a IN (SELECT TbB_0.b FROM TbB as TbB_0) " + - "AND TbA.c > 10")), - sqlString(rewrite(parse( - "SELECT TbA.* " + - "FROM TbA as TbA " + - "WHERE TbA.a IN (SELECT b FROM TbB ) " + - "AND TbA.c > 10")))); - } - - @Test - public void testOuterMixAliasInnerWithoutAlias() { - String expect = - "SELECT A.* " + - "FROM TbA as A " + - "WHERE A.a IN (SELECT TbB_0.b FROM TbB as TbB_0) " + - "AND A.c > 10"; - - assertEquals( - sqlString(parse(expect)), - sqlString(rewrite(parse( - "SELECT TbA.* " + - "FROM TbA as A " + - "WHERE a IN (SELECT b FROM TbB ) " + - "AND TbA.c > 10")))); - - assertEquals( - sqlString(parse(expect)), - sqlString(rewrite(parse( - "SELECT * " + - "FROM TbA as A " + - "WHERE TbA.a IN (SELECT b FROM TbB ) " + - "AND TbA.c > 10")))); - } - - - private String sqlString(SQLExpr expr) { - return SQLUtils.toMySqlString(expr); - } - - private SQLQueryExpr rewrite(SQLQueryExpr expr) { - expr.accept(new SubqueryAliasRewriter()); - return expr; - } + @Test + public void testWithoutAlias() { + assertEquals( + sqlString( + parse( + "SELECT TbA_0.* " + + "FROM TbA as TbA_0 " + + "WHERE TbA_0.a IN (SELECT TbB_1.b FROM TbB as TbB_1) and TbA_0.c > 10")), + sqlString( + rewrite( + parse("SELECT * " + "FROM TbA " + "WHERE a IN (SELECT b FROM TbB) and c > 10")))); + } + + @Test + public void testWithAlias() { + assertEquals( + sqlString( + parse( + "SELECT A.* " + + "FROM TbA as A " + + "WHERE A.a IN (SELECT B.b FROM TbB as B) " + + "AND A.c > 10")), + sqlString( + rewrite( + parse( + "SELECT A.* " + + "FROM TbA as A " + + "WHERE A.a IN (SELECT B.b FROM TbB as B) " + + "AND A.c > 10")))); + } + + @Test + public void testOuterWithoutAliasInnerWithAlias() { + assertEquals( + sqlString( + parse( + "SELECT TbA_0.* " + + "FROM TbA as TbA_0 " + + "WHERE TbA_0.a IN (SELECT TbB.b FROM TbB as TbB) " + + "AND TbA_0.c > 10")), + sqlString( + rewrite( + parse( + "SELECT * " + + "FROM TbA " + + "WHERE a IN (SELECT TbB.b FROM TbB as TbB) " + + "AND c > 10")))); + } + + @Test + public void testOuterWithoutAliasInnerMixAlias() { + String expect = + "SELECT TbA_0.* " + + "FROM TbA as TbA_0 " + + "WHERE TbA_0.a IN (SELECT B.b FROM TbB as B) " + + "AND TbA_0.c > 10"; + + assertEquals( + sqlString(parse(expect)), + sqlString( + rewrite( + parse( + "SELECT * " + + "FROM TbA " + + "WHERE a IN (SELECT b FROM TbB as B) " + + "AND c > 10")))); + + assertEquals( + sqlString(parse(expect)), + sqlString( + rewrite( + parse( + "SELECT * " + + "FROM TbA " + + "WHERE a IN (SELECT TbB.b FROM TbB as B) " + + "AND c > 10")))); + } + + @Test + public void testOuterWithAliasInnerWithoutAlias() { + assertEquals( + sqlString( + parse( + "SELECT TbA.* " + + "FROM TbA as TbA " + + "WHERE TbA.a IN (SELECT TbB_0.b FROM TbB as TbB_0) " + + "AND TbA.c > 10")), + sqlString( + rewrite( + parse( + "SELECT TbA.* " + + "FROM TbA as TbA " + + "WHERE TbA.a IN (SELECT b FROM TbB ) " + + "AND TbA.c > 10")))); + } + + @Test + public void testOuterMixAliasInnerWithoutAlias() { + String expect = + "SELECT A.* " + + "FROM TbA as A " + + "WHERE A.a IN (SELECT TbB_0.b FROM TbB as TbB_0) " + + "AND A.c > 10"; + + assertEquals( + sqlString(parse(expect)), + sqlString( + rewrite( + parse( + "SELECT TbA.* " + + "FROM TbA as A " + + "WHERE a IN (SELECT b FROM TbB ) " + + "AND TbA.c > 10")))); + + assertEquals( + sqlString(parse(expect)), + sqlString( + rewrite( + parse( + "SELECT * " + + "FROM TbA as A " + + "WHERE TbA.a IN (SELECT b FROM TbB ) " + + "AND TbA.c > 10")))); + } + + private String sqlString(SQLExpr expr) { + return SQLUtils.toMySqlString(expr); + } + + private SQLQueryExpr rewrite(SQLQueryExpr expr) { + expr.accept(new SubqueryAliasRewriter()); + return expr; + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/utils/FindSubQueryTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/utils/FindSubQueryTest.java index 34a915ac2b..7f97e601d6 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/utils/FindSubQueryTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/subquery/utils/FindSubQueryTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.rewriter.subquery.utils; import static org.junit.Assert.assertEquals; @@ -16,36 +15,34 @@ public class FindSubQueryTest { - @Test - public void hasInSubQuery() { - FindSubQuery findSubQuery = new FindSubQuery(); - - parse("SELECT * FROM TbA " + - "WHERE a in (SELECT b FROM TbB)").accept(findSubQuery); - assertTrue(findSubQuery.hasSubQuery()); - assertFalse(findSubQuery.getSqlInSubQueryExprs().isEmpty()); - assertEquals(1, findSubQuery.getSqlInSubQueryExprs().size()); - } - - @Test - public void hasExistSubQuery() { - FindSubQuery findSubQuery = new FindSubQuery(); - - parse("SELECT * FROM TbA " + - "WHERE EXISTS (SELECT * FROM TbB)").accept(findSubQuery); - assertTrue(findSubQuery.hasSubQuery()); - assertFalse(findSubQuery.getSqlExistsExprs().isEmpty()); - assertEquals(1, findSubQuery.getSqlExistsExprs().size()); - } - - @Test - public void stopVisitWhenFound() { - FindSubQuery findSubQuery = new FindSubQuery().continueVisitWhenFound(false); - - parse("SELECT * FROM TbA " + - "WHERE a in (SELECT b FROM TbB WHERE b2 in (SELECT c FROM Tbc))").accept(findSubQuery); - assertTrue(findSubQuery.hasSubQuery()); - assertFalse(findSubQuery.getSqlInSubQueryExprs().isEmpty()); - assertEquals(1, findSubQuery.getSqlInSubQueryExprs().size()); - } + @Test + public void hasInSubQuery() { + FindSubQuery findSubQuery = new FindSubQuery(); + + parse("SELECT * FROM TbA " + "WHERE a in (SELECT b FROM TbB)").accept(findSubQuery); + assertTrue(findSubQuery.hasSubQuery()); + assertFalse(findSubQuery.getSqlInSubQueryExprs().isEmpty()); + assertEquals(1, findSubQuery.getSqlInSubQueryExprs().size()); + } + + @Test + public void hasExistSubQuery() { + FindSubQuery findSubQuery = new FindSubQuery(); + + parse("SELECT * FROM TbA " + "WHERE EXISTS (SELECT * FROM TbB)").accept(findSubQuery); + assertTrue(findSubQuery.hasSubQuery()); + assertFalse(findSubQuery.getSqlExistsExprs().isEmpty()); + assertEquals(1, findSubQuery.getSqlExistsExprs().size()); + } + + @Test + public void stopVisitWhenFound() { + FindSubQuery findSubQuery = new FindSubQuery().continueVisitWhenFound(false); + + parse("SELECT * FROM TbA " + "WHERE a in (SELECT b FROM TbB WHERE b2 in (SELECT c FROM Tbc))") + .accept(findSubQuery); + assertTrue(findSubQuery.hasSubQuery()); + assertFalse(findSubQuery.getSqlInSubQueryExprs().isEmpty()); + assertEquals(1, findSubQuery.getSqlInSubQueryExprs().size()); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/term/TermFieldRewriterTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/term/TermFieldRewriterTest.java index d001e0e1d0..44d3e2cbc0 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/term/TermFieldRewriterTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/rewriter/term/TermFieldRewriterTest.java @@ -3,10 +3,8 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.rewriter.term; - import static org.hamcrest.MatcherAssert.assertThat; import static org.opensearch.sql.legacy.util.MultipleIndexClusterUtils.mockMultipleIndexEnv; @@ -22,88 +20,93 @@ import org.opensearch.sql.legacy.util.SqlParserUtils; public class TermFieldRewriterTest { - @Rule - public ExpectedException exception = ExpectedException.none(); - - @Before - public void setup() { - mockMultipleIndexEnv(); - } - - @Test - public void testFromSubqueryShouldPass() { - String sql = "SELECT t.age as a FROM (SELECT age FROM account1 WHERE address = 'sea') t"; - String expected = "SELECT t.age as a FROM (SELECT age FROM account1 WHERE address.keyword = 'sea') t"; - - assertThat(rewriteTerm(sql), - MatcherUtils.IsEqualIgnoreCaseAndWhiteSpace.equalToIgnoreCaseAndWhiteSpace(expected)); - } - - @Test - public void testFromSubqueryWithoutTermShouldPass() { - String sql = "SELECT t.age as a FROM (SELECT age FROM account1 WHERE age = 10) t"; - String expected = sql; - - assertThat(rewriteTerm(sql), - MatcherUtils.IsEqualIgnoreCaseAndWhiteSpace.equalToIgnoreCaseAndWhiteSpace(expected)); - } - - @Test - public void testFieldShouldBeRewritten() { - String sql = "SELECT age FROM account1 WHERE address = 'sea'"; - String expected = "SELECT age FROM account1 WHERE address.keyword = 'sea'"; - - assertThat(rewriteTerm(sql), - MatcherUtils.IsEqualIgnoreCaseAndWhiteSpace.equalToIgnoreCaseAndWhiteSpace(expected)); - } - - @Test - public void testSelectTheFieldWithCompatibleMappingShouldPass() { - String sql = "SELECT id FROM account* WHERE id = 10"; - String expected = sql; - - assertThat(rewriteTerm(sql), - MatcherUtils.IsEqualIgnoreCaseAndWhiteSpace.equalToIgnoreCaseAndWhiteSpace(expected)); - } - - @Test - public void testSelectTheFieldOnlyInOneIndexShouldPass() { - String sql = "SELECT address FROM account*"; - String expected = sql; - - assertThat(rewriteTerm(sql), - MatcherUtils.IsEqualIgnoreCaseAndWhiteSpace.equalToIgnoreCaseAndWhiteSpace(expected)); - } - - /** - * Ideally, it should fail. There are two reasons we didn't cover it now. - * 1. The semantic check already done that. - * 2. The {@link TermFieldRewriter} didn't touch allcolumn case. - */ - @Test - public void testSelectAllFieldWithConflictMappingShouldPass() { - String sql = "SELECT * FROM account*"; - String expected = sql; - - assertThat(rewriteTerm(sql), - MatcherUtils.IsEqualIgnoreCaseAndWhiteSpace.equalToIgnoreCaseAndWhiteSpace(expected)); - } - - @Test - public void testSelectTheFieldWithConflictMappingShouldThrowException() { - String sql = "SELECT age FROM account* WHERE age = 10"; - exception.expect(VerificationException.class); - exception.expectMessage("Different mappings are not allowed for the same field[age]"); - rewriteTerm(sql); - } - - private String rewriteTerm(String sql) { - SQLQueryExpr sqlQueryExpr = SqlParserUtils.parse(sql); - sqlQueryExpr.accept(new TermFieldRewriter()); - return SQLUtils.toMySqlString(sqlQueryExpr) - .replaceAll("[\\n\\t]+", " ") - .replaceAll("^\\(", " ") - .replaceAll("\\)$", " ") - .trim(); - } + @Rule public ExpectedException exception = ExpectedException.none(); + + @Before + public void setup() { + mockMultipleIndexEnv(); + } + + @Test + public void testFromSubqueryShouldPass() { + String sql = "SELECT t.age as a FROM (SELECT age FROM account1 WHERE address = 'sea') t"; + String expected = + "SELECT t.age as a FROM (SELECT age FROM account1 WHERE address.keyword = 'sea') t"; + + assertThat( + rewriteTerm(sql), + MatcherUtils.IsEqualIgnoreCaseAndWhiteSpace.equalToIgnoreCaseAndWhiteSpace(expected)); + } + + @Test + public void testFromSubqueryWithoutTermShouldPass() { + String sql = "SELECT t.age as a FROM (SELECT age FROM account1 WHERE age = 10) t"; + String expected = sql; + + assertThat( + rewriteTerm(sql), + MatcherUtils.IsEqualIgnoreCaseAndWhiteSpace.equalToIgnoreCaseAndWhiteSpace(expected)); + } + + @Test + public void testFieldShouldBeRewritten() { + String sql = "SELECT age FROM account1 WHERE address = 'sea'"; + String expected = "SELECT age FROM account1 WHERE address.keyword = 'sea'"; + + assertThat( + rewriteTerm(sql), + MatcherUtils.IsEqualIgnoreCaseAndWhiteSpace.equalToIgnoreCaseAndWhiteSpace(expected)); + } + + @Test + public void testSelectTheFieldWithCompatibleMappingShouldPass() { + String sql = "SELECT id FROM account* WHERE id = 10"; + String expected = sql; + + assertThat( + rewriteTerm(sql), + MatcherUtils.IsEqualIgnoreCaseAndWhiteSpace.equalToIgnoreCaseAndWhiteSpace(expected)); + } + + @Test + public void testSelectTheFieldOnlyInOneIndexShouldPass() { + String sql = "SELECT address FROM account*"; + String expected = sql; + + assertThat( + rewriteTerm(sql), + MatcherUtils.IsEqualIgnoreCaseAndWhiteSpace.equalToIgnoreCaseAndWhiteSpace(expected)); + } + + /** + * Ideally, it should fail. There are two reasons we didn't cover it now. 1. The semantic check + * already done that. 2. The {@link TermFieldRewriter} didn't touch allcolumn case. + */ + @Test + public void testSelectAllFieldWithConflictMappingShouldPass() { + String sql = "SELECT * FROM account*"; + String expected = sql; + + assertThat( + rewriteTerm(sql), + MatcherUtils.IsEqualIgnoreCaseAndWhiteSpace.equalToIgnoreCaseAndWhiteSpace(expected)); + } + + @Test + public void testSelectTheFieldWithConflictMappingShouldThrowException() { + String sql = "SELECT age FROM account* WHERE age = 10"; + exception.expect(VerificationException.class); + exception.expectMessage("Different mappings are not allowed for the same field[age]"); + rewriteTerm(sql); + } + + private String rewriteTerm(String sql) { + SQLQueryExpr sqlQueryExpr = SqlParserUtils.parse(sql); + sqlQueryExpr.accept(new TermFieldRewriter()); + return SQLUtils.toMySqlString(sqlQueryExpr) + .replaceAll("[\\n\\t]+", " ") + .replaceAll("^\\(", " ") + .replaceAll("\\)$", " ") + .trim(); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/spatial/WktToGeoJsonConverterTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/spatial/WktToGeoJsonConverterTest.java index 24889ff3ca..e63c60467f 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/spatial/WktToGeoJsonConverterTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/spatial/WktToGeoJsonConverterTest.java @@ -3,181 +3,206 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.spatial; - import org.junit.Assert; import org.junit.Test; import org.opensearch.sql.legacy.spatial.WktToGeoJsonConverter; -/** - * Created by Eliran on 4/8/2015. - */ +/** Created by Eliran on 4/8/2015. */ public class WktToGeoJsonConverterTest { - @Test - public void convertPoint_NoRedundantSpaces_ShouldConvert(){ - String wkt = "POINT(12.3 13.3)"; - String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); - String expectedGeoJson = "{\"type\":\"Point\", \"coordinates\": [12.3,13.3]}"; - Assert.assertEquals(expectedGeoJson,geoJson); - } - - @Test - public void convertPoint_WithRedundantSpaces_ShouldConvert(){ - String wkt = " POINT ( 12.3 13.3 ) "; - String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); - String expectedGeoJson = "{\"type\":\"Point\", \"coordinates\": [12.3,13.3]}"; - Assert.assertEquals(expectedGeoJson,geoJson); - } - - @Test - public void convertPoint_RoundNumbers_ShouldConvert(){ - String wkt = "POINT(12 13)"; - String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); - String expectedGeoJson = "{\"type\":\"Point\", \"coordinates\": [12,13]}"; - Assert.assertEquals(expectedGeoJson,geoJson); - } - - @Test - public void convertPoint_FirstIsRoundNumber_ShouldConvert(){ - String wkt = "POINT(12 13.3)"; - String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); - String expectedGeoJson = "{\"type\":\"Point\", \"coordinates\": [12,13.3]}"; - Assert.assertEquals(expectedGeoJson,geoJson); - } - - @Test - public void convertPoint_SecondIsRoundNumber_ShouldConvert(){ - String wkt = "POINT(12.2 13)"; - String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); - String expectedGeoJson = "{\"type\":\"Point\", \"coordinates\": [12.2,13]}"; - Assert.assertEquals(expectedGeoJson,geoJson); - } - - @Test - public void convertPoint_NegativeCoordinates_ShouldConvert(){ - String wkt = "POINT(-12.2 13)"; - String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); - String expectedGeoJson = "{\"type\":\"Point\", \"coordinates\": [-12.2,13]}"; - Assert.assertEquals(expectedGeoJson,geoJson); - } - - @Test - public void convertPolygon_NoRedundantSpaces_ShouldConvert(){ - String wkt = "POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))"; - String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); - String expectedGeoJson = "{\"type\":\"Polygon\", \"coordinates\": [[[30,10],[40,40],[20,40],[10,20],[30,10]]]}"; - Assert.assertEquals(expectedGeoJson,geoJson); - } - - @Test - public void convertPolygon_NegativeCoordinates_ShouldConvert(){ - String wkt = "POLYGON ((-30 10, 40 40, 20 40, 10 20, -30 10))"; - String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); - String expectedGeoJson = "{\"type\":\"Polygon\", \"coordinates\": [[[-30,10],[40,40],[20,40],[10,20],[-30,10]]]}"; - Assert.assertEquals(expectedGeoJson,geoJson); - } - - @Test - public void convertPolygon_WithRedundantSpaces_ShouldConvert(){ - String wkt = " POLYGON ( (30 10, 40 40 , 20 40, 10 20, 30 10 ) ) "; - String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); - String expectedGeoJson = "{\"type\":\"Polygon\", \"coordinates\": [[[30,10],[40,40],[20,40],[10,20],[30,10]]]}"; - Assert.assertEquals(expectedGeoJson,geoJson); - } - - @Test - public void convertPolygonWithHole_NoRedundantSpaces_ShouldConvert(){ - String wkt = "POLYGON ((35 10, 45 45, 15 40, 10 20, 35 10),(20 30, 35 35, 30 20, 20 30))"; - String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); - String expectedGeoJson = "{\"type\":\"Polygon\", \"coordinates\": [[[35,10],[45,45],[15,40],[10,20],[35,10]],[[20,30],[35,35],[30,20],[20,30]]]}"; - Assert.assertEquals(expectedGeoJson,geoJson); - } - - @Test - public void convertPolygonWithHole_WithRedundantSpaces_ShouldConvert(){ - String wkt = "POLYGON ( (35 10, 45 45, 15 40, 10 20, 35 10 ), (20 30 , 35 35, 30 20, 20 30 ) ) "; - String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); - String expectedGeoJson = "{\"type\":\"Polygon\", \"coordinates\": [[[35,10],[45,45],[15,40],[10,20],[35,10]],[[20,30],[35,35],[30,20],[20,30]]]}"; - Assert.assertEquals(expectedGeoJson,geoJson); - } - - @Test - public void convertLineString_NoRedundantSpaces_ShouldConvert(){ - String wkt = "LINESTRING (30 10, 10 30, 40 40)"; - String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); - String expectedGeoJson = "{\"type\":\"LineString\", \"coordinates\": [[30,10],[10,30],[40,40]]}"; - Assert.assertEquals(expectedGeoJson,geoJson); - } - - @Test - public void convertLineString_NegativeCoordinates_ShouldConvert(){ - String wkt = "LINESTRING (-30 10, 10 30, 40 40)"; - String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); - String expectedGeoJson = "{\"type\":\"LineString\", \"coordinates\": [[-30,10],[10,30],[40,40]]}"; - Assert.assertEquals(expectedGeoJson,geoJson); - } - - @Test - public void convertLineString_WithRedundantSpaces_ShouldConvert(){ - String wkt = "LINESTRING ( 30 10, 10 30 , 40 40 )"; - String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); - String expectedGeoJson = "{\"type\":\"LineString\", \"coordinates\": [[30,10],[10,30],[40,40]]}"; - Assert.assertEquals(expectedGeoJson,geoJson); - } - - @Test - public void convertMultiPolygon_NoRedundantSpaces_ShouldConvert(){ - String wkt = "MULTIPOLYGON (((30 20, 45 40, 10 40, 30 20)),((15 5, 40 10, 10 20, 5 10, 15 5)))"; - String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); - String expectedGeoJson = "{\"type\":\"MultiPolygon\", \"coordinates\": [[[[30,20],[45,40],[10,40],[30,20]]],[[[15,5],[40,10],[10,20],[5,10],[15,5]]]]}"; - Assert.assertEquals(expectedGeoJson,geoJson); - } - @Test - public void convertMultiPolygon_WithRedundantSpaces_ShouldConvert(){ - String wkt = "MULTIPOLYGON ( ((30 20, 45 40, 10 40, 30 20) ) , ((15 5, 40 10, 10 20, 5 10, 15 5)))"; - String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); - String expectedGeoJson = "{\"type\":\"MultiPolygon\", \"coordinates\": [[[[30,20],[45,40],[10,40],[30,20]]],[[[15,5],[40,10],[10,20],[5,10],[15,5]]]]}"; - Assert.assertEquals(expectedGeoJson,geoJson); - } - @Test - public void convertMultiPolygon_OnePolygonHaveHoles_ShouldConvert(){ - String wkt = "MULTIPOLYGON (((30 20, 45 40, 10 40, 30 20),(20 30, 35 35, 30 20, 20 30)),((15 5, 40 10, 10 20, 5 10, 15 5)))"; - String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); - String expectedGeoJson = "{\"type\":\"MultiPolygon\", \"coordinates\": [[[[30,20],[45,40],[10,40],[30,20]],[[20,30],[35,35],[30,20],[20,30]]],[[[15,5],[40,10],[10,20],[5,10],[15,5]]]]}"; - Assert.assertEquals(expectedGeoJson,geoJson); - } - - @Test - public void convertMultiPoint_V1_ShouldConvert(){ - String wkt = "MULTIPOINT (10 40, 40 30, 20 20, 30 10)"; - String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); - String expectedGeoJson = "{\"type\":\"MultiPoint\", \"coordinates\": [[10,40],[40,30],[20,20],[30,10]]}"; - Assert.assertEquals(expectedGeoJson,geoJson); - } - - @Test - public void convertMultiPoint_V2_ShouldConvert(){ - String wkt = "MULTIPOINT ((10 40), (40 30), (20 20), (30 10))"; - String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); - String expectedGeoJson = "{\"type\":\"MultiPoint\", \"coordinates\": [[10,40],[40,30],[20,20],[30,10]]}"; - Assert.assertEquals(expectedGeoJson,geoJson); - } - - @Test - public void convertMultiLineString_NoRedundantSpaces_ShouldConvert(){ - String wkt = "MULTILINESTRING ((10 10, 20 20, 10 40),(40 40, 30 30, 40 20, 30 10))"; - String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); - String expectedGeoJson = "{\"type\":\"MultiLineString\", \"coordinates\": [[[10,10],[20,20],[10,40]],[[40,40],[30,30],[40,20],[30,10]]]}"; - Assert.assertEquals(expectedGeoJson,geoJson); - } - @Test - public void convertMultiLineString_WithRedundantSpaces_ShouldConvert(){ - String wkt = "MULTILINESTRING ( (10 10, 20 20, 10 40 ) , (40 40, 30 30, 40 20, 30 10))"; - String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); - String expectedGeoJson = "{\"type\":\"MultiLineString\", \"coordinates\": [[[10,10],[20,20],[10,40]],[[40,40],[30,30],[40,20],[30,10]]]}"; - Assert.assertEquals(expectedGeoJson,geoJson); - } + @Test + public void convertPoint_NoRedundantSpaces_ShouldConvert() { + String wkt = "POINT(12.3 13.3)"; + String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); + String expectedGeoJson = "{\"type\":\"Point\", \"coordinates\": [12.3,13.3]}"; + Assert.assertEquals(expectedGeoJson, geoJson); + } + + @Test + public void convertPoint_WithRedundantSpaces_ShouldConvert() { + String wkt = " POINT ( 12.3 13.3 ) "; + String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); + String expectedGeoJson = "{\"type\":\"Point\", \"coordinates\": [12.3,13.3]}"; + Assert.assertEquals(expectedGeoJson, geoJson); + } + + @Test + public void convertPoint_RoundNumbers_ShouldConvert() { + String wkt = "POINT(12 13)"; + String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); + String expectedGeoJson = "{\"type\":\"Point\", \"coordinates\": [12,13]}"; + Assert.assertEquals(expectedGeoJson, geoJson); + } + + @Test + public void convertPoint_FirstIsRoundNumber_ShouldConvert() { + String wkt = "POINT(12 13.3)"; + String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); + String expectedGeoJson = "{\"type\":\"Point\", \"coordinates\": [12,13.3]}"; + Assert.assertEquals(expectedGeoJson, geoJson); + } + + @Test + public void convertPoint_SecondIsRoundNumber_ShouldConvert() { + String wkt = "POINT(12.2 13)"; + String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); + String expectedGeoJson = "{\"type\":\"Point\", \"coordinates\": [12.2,13]}"; + Assert.assertEquals(expectedGeoJson, geoJson); + } + + @Test + public void convertPoint_NegativeCoordinates_ShouldConvert() { + String wkt = "POINT(-12.2 13)"; + String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); + String expectedGeoJson = "{\"type\":\"Point\", \"coordinates\": [-12.2,13]}"; + Assert.assertEquals(expectedGeoJson, geoJson); + } + + @Test + public void convertPolygon_NoRedundantSpaces_ShouldConvert() { + String wkt = "POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))"; + String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); + String expectedGeoJson = + "{\"type\":\"Polygon\", \"coordinates\": [[[30,10],[40,40],[20,40],[10,20],[30,10]]]}"; + Assert.assertEquals(expectedGeoJson, geoJson); + } + + @Test + public void convertPolygon_NegativeCoordinates_ShouldConvert() { + String wkt = "POLYGON ((-30 10, 40 40, 20 40, 10 20, -30 10))"; + String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); + String expectedGeoJson = + "{\"type\":\"Polygon\", \"coordinates\": [[[-30,10],[40,40],[20,40],[10,20],[-30,10]]]}"; + Assert.assertEquals(expectedGeoJson, geoJson); + } + + @Test + public void convertPolygon_WithRedundantSpaces_ShouldConvert() { + String wkt = " POLYGON ( (30 10, 40 40 , 20 40, 10 20, 30 10 ) ) "; + String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); + String expectedGeoJson = + "{\"type\":\"Polygon\", \"coordinates\": [[[30,10],[40,40],[20,40],[10,20],[30,10]]]}"; + Assert.assertEquals(expectedGeoJson, geoJson); + } + + @Test + public void convertPolygonWithHole_NoRedundantSpaces_ShouldConvert() { + String wkt = "POLYGON ((35 10, 45 45, 15 40, 10 20, 35 10),(20 30, 35 35, 30 20, 20 30))"; + String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); + String expectedGeoJson = + "{\"type\":\"Polygon\", \"coordinates\":" + + " [[[35,10],[45,45],[15,40],[10,20],[35,10]],[[20,30],[35,35],[30,20],[20,30]]]}"; + Assert.assertEquals(expectedGeoJson, geoJson); + } + + @Test + public void convertPolygonWithHole_WithRedundantSpaces_ShouldConvert() { + String wkt = + "POLYGON ( (35 10, 45 45, 15 40, 10 20, 35 10 ), (20 30 , 35 35, 30 20, 20 30 ) ) "; + String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); + String expectedGeoJson = + "{\"type\":\"Polygon\", \"coordinates\":" + + " [[[35,10],[45,45],[15,40],[10,20],[35,10]],[[20,30],[35,35],[30,20],[20,30]]]}"; + Assert.assertEquals(expectedGeoJson, geoJson); + } + + @Test + public void convertLineString_NoRedundantSpaces_ShouldConvert() { + String wkt = "LINESTRING (30 10, 10 30, 40 40)"; + String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); + String expectedGeoJson = + "{\"type\":\"LineString\", \"coordinates\": [[30,10],[10,30],[40,40]]}"; + Assert.assertEquals(expectedGeoJson, geoJson); + } + + @Test + public void convertLineString_NegativeCoordinates_ShouldConvert() { + String wkt = "LINESTRING (-30 10, 10 30, 40 40)"; + String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); + String expectedGeoJson = + "{\"type\":\"LineString\", \"coordinates\": [[-30,10],[10,30],[40,40]]}"; + Assert.assertEquals(expectedGeoJson, geoJson); + } + + @Test + public void convertLineString_WithRedundantSpaces_ShouldConvert() { + String wkt = "LINESTRING ( 30 10, 10 30 , 40 40 )"; + String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); + String expectedGeoJson = + "{\"type\":\"LineString\", \"coordinates\": [[30,10],[10,30],[40,40]]}"; + Assert.assertEquals(expectedGeoJson, geoJson); + } + + @Test + public void convertMultiPolygon_NoRedundantSpaces_ShouldConvert() { + String wkt = "MULTIPOLYGON (((30 20, 45 40, 10 40, 30 20)),((15 5, 40 10, 10 20, 5 10, 15 5)))"; + String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); + String expectedGeoJson = + "{\"type\":\"MultiPolygon\", \"coordinates\":" + + " [[[[30,20],[45,40],[10,40],[30,20]]],[[[15,5],[40,10],[10,20],[5,10],[15,5]]]]}"; + Assert.assertEquals(expectedGeoJson, geoJson); + } + + @Test + public void convertMultiPolygon_WithRedundantSpaces_ShouldConvert() { + String wkt = + "MULTIPOLYGON ( ((30 20, 45 40, 10 40, 30 20) ) , ((15 5, 40 10, 10 20, 5 10, 15 5)))"; + String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); + String expectedGeoJson = + "{\"type\":\"MultiPolygon\", \"coordinates\":" + + " [[[[30,20],[45,40],[10,40],[30,20]]],[[[15,5],[40,10],[10,20],[5,10],[15,5]]]]}"; + Assert.assertEquals(expectedGeoJson, geoJson); + } + + @Test + public void convertMultiPolygon_OnePolygonHaveHoles_ShouldConvert() { + String wkt = + "MULTIPOLYGON (((30 20, 45 40, 10 40, 30 20),(20 30, 35 35, 30 20, 20 30)),((15 5, 40 10," + + " 10 20, 5 10, 15 5)))"; + String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); + String expectedGeoJson = + "{\"type\":\"MultiPolygon\", \"coordinates\":" + + " [[[[30,20],[45,40],[10,40],[30,20]],[[20,30],[35,35],[30,20],[20,30]]],[[[15,5],[40,10],[10,20],[5,10],[15,5]]]]}"; + Assert.assertEquals(expectedGeoJson, geoJson); + } + + @Test + public void convertMultiPoint_V1_ShouldConvert() { + String wkt = "MULTIPOINT (10 40, 40 30, 20 20, 30 10)"; + String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); + String expectedGeoJson = + "{\"type\":\"MultiPoint\", \"coordinates\": [[10,40],[40,30],[20,20],[30,10]]}"; + Assert.assertEquals(expectedGeoJson, geoJson); + } + + @Test + public void convertMultiPoint_V2_ShouldConvert() { + String wkt = "MULTIPOINT ((10 40), (40 30), (20 20), (30 10))"; + String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); + String expectedGeoJson = + "{\"type\":\"MultiPoint\", \"coordinates\": [[10,40],[40,30],[20,20],[30,10]]}"; + Assert.assertEquals(expectedGeoJson, geoJson); + } + + @Test + public void convertMultiLineString_NoRedundantSpaces_ShouldConvert() { + String wkt = "MULTILINESTRING ((10 10, 20 20, 10 40),(40 40, 30 30, 40 20, 30 10))"; + String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); + String expectedGeoJson = + "{\"type\":\"MultiLineString\", \"coordinates\":" + + " [[[10,10],[20,20],[10,40]],[[40,40],[30,30],[40,20],[30,10]]]}"; + Assert.assertEquals(expectedGeoJson, geoJson); + } + + @Test + public void convertMultiLineString_WithRedundantSpaces_ShouldConvert() { + String wkt = "MULTILINESTRING ( (10 10, 20 20, 10 40 ) , (40 40, 30 30, 40 20, 30 10))"; + String geoJson = WktToGeoJsonConverter.toGeoJson(wkt); + String expectedGeoJson = + "{\"type\":\"MultiLineString\", \"coordinates\":" + + " [[[10,10],[20,20],[10,40]],[[40,40],[30,30],[40,20],[30,10]]]}"; + Assert.assertEquals(expectedGeoJson, geoJson); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/BackticksUnquoterTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/BackticksUnquoterTest.java index b0c6b8a2d8..c7e7f22d5c 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/BackticksUnquoterTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/BackticksUnquoterTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.utils; import static org.hamcrest.MatcherAssert.assertThat; @@ -15,28 +14,29 @@ import org.opensearch.sql.legacy.utils.StringUtils; /** - * To test the functionality of {@link StringUtils#unquoteSingleField} - * and {@link StringUtils#unquoteFullColumn(String, String)} + * To test the functionality of {@link StringUtils#unquoteSingleField} and {@link + * StringUtils#unquoteFullColumn(String, String)} */ public class BackticksUnquoterTest { - @Test - public void assertNotQuotedStringShouldKeepTheSame() { - assertThat(unquoteSingleField("identifier"), equalTo("identifier")); - assertThat(unquoteFullColumn("identifier"), equalTo("identifier")); - } - - @Test - public void assertStringWithOneBackTickShouldKeepTheSame() { - assertThat(unquoteSingleField("`identifier"), equalTo("`identifier")); - assertThat(unquoteFullColumn("`identifier"), equalTo("`identifier")); - } - - @Test - public void assertBackticksQuotedStringShouldBeUnquoted() { - assertThat("identifier", equalTo(unquoteSingleField("`identifier`"))); - - assertThat("identifier1.identifier2", equalTo(unquoteFullColumn("`identifier1`.`identifier2`"))); - assertThat("identifier1.identifier2", equalTo(unquoteFullColumn("`identifier1`.identifier2"))); - } + @Test + public void assertNotQuotedStringShouldKeepTheSame() { + assertThat(unquoteSingleField("identifier"), equalTo("identifier")); + assertThat(unquoteFullColumn("identifier"), equalTo("identifier")); + } + + @Test + public void assertStringWithOneBackTickShouldKeepTheSame() { + assertThat(unquoteSingleField("`identifier"), equalTo("`identifier")); + assertThat(unquoteFullColumn("`identifier"), equalTo("`identifier")); + } + + @Test + public void assertBackticksQuotedStringShouldBeUnquoted() { + assertThat("identifier", equalTo(unquoteSingleField("`identifier`"))); + + assertThat( + "identifier1.identifier2", equalTo(unquoteFullColumn("`identifier1`.`identifier2`"))); + assertThat("identifier1.identifier2", equalTo(unquoteFullColumn("`identifier1`.identifier2"))); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/PrettyFormatterTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/PrettyFormatterTest.java index f876b14110..68ad891020 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/PrettyFormatterTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/PrettyFormatterTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.utils; import static org.hamcrest.MatcherAssert.assertThat; @@ -19,42 +18,45 @@ public class PrettyFormatterTest { - @Test - public void assertFormatterWithoutContentInside() throws IOException { - String noContentInput = "{ }"; - String expectedOutput = "{ }"; - String result = JsonPrettyFormatter.format(noContentInput); - assertThat(result, equalTo(expectedOutput)); - } - - @Test - public void assertFormatterOutputsPrettyJson() throws IOException { - String explainFormattedPrettyFilePath = TestUtils.getResourceFilePath( - "/src/test/resources/expectedOutput/explain_format_pretty.json"); - String explainFormattedPretty = Files.toString(new File(explainFormattedPrettyFilePath), StandardCharsets.UTF_8) - .replaceAll("\r", ""); - - String explainFormattedOnelineFilePath = TestUtils.getResourceFilePath( - "/src/test/resources/explain_format_oneline.json"); - String explainFormattedOneline = Files.toString(new File(explainFormattedOnelineFilePath), StandardCharsets.UTF_8) - .replaceAll("\r", ""); - String result = JsonPrettyFormatter.format(explainFormattedOneline); - - assertThat(result, equalTo(explainFormattedPretty)); - } - - @Test(expected = IOException.class) - public void illegalInputOfNull() throws IOException { - JsonPrettyFormatter.format(""); - } - - @Test(expected = IOException.class) - public void illegalInputOfUnpairedBrace() throws IOException { - JsonPrettyFormatter.format("{\"key\" : \"value\""); - } - - @Test(expected = IOException.class) - public void illegalInputOfWrongBraces() throws IOException { - JsonPrettyFormatter.format("<\"key\" : \"value\">"); - } + @Test + public void assertFormatterWithoutContentInside() throws IOException { + String noContentInput = "{ }"; + String expectedOutput = "{ }"; + String result = JsonPrettyFormatter.format(noContentInput); + assertThat(result, equalTo(expectedOutput)); + } + + @Test + public void assertFormatterOutputsPrettyJson() throws IOException { + String explainFormattedPrettyFilePath = + TestUtils.getResourceFilePath( + "/src/test/resources/expectedOutput/explain_format_pretty.json"); + String explainFormattedPretty = + Files.toString(new File(explainFormattedPrettyFilePath), StandardCharsets.UTF_8) + .replaceAll("\r", ""); + + String explainFormattedOnelineFilePath = + TestUtils.getResourceFilePath("/src/test/resources/explain_format_oneline.json"); + String explainFormattedOneline = + Files.toString(new File(explainFormattedOnelineFilePath), StandardCharsets.UTF_8) + .replaceAll("\r", ""); + String result = JsonPrettyFormatter.format(explainFormattedOneline); + + assertThat(result, equalTo(explainFormattedPretty)); + } + + @Test(expected = IOException.class) + public void illegalInputOfNull() throws IOException { + JsonPrettyFormatter.format(""); + } + + @Test(expected = IOException.class) + public void illegalInputOfUnpairedBrace() throws IOException { + JsonPrettyFormatter.format("{\"key\" : \"value\""); + } + + @Test(expected = IOException.class) + public void illegalInputOfWrongBraces() throws IOException { + JsonPrettyFormatter.format("<\"key\" : \"value\">"); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/QueryContextTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/QueryContextTest.java index 55b78af0d7..5dbda8cb92 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/QueryContextTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/QueryContextTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.utils; import static org.hamcrest.Matchers.equalTo; @@ -18,56 +17,57 @@ public class QueryContextTest { - private static final String REQUEST_ID_KEY = "request_id"; + private static final String REQUEST_ID_KEY = "request_id"; - @After - public void cleanUpContext() { + @After + public void cleanUpContext() { - ThreadContext.clearMap(); - } + ThreadContext.clearMap(); + } - @Test - public void addRequestId() { + @Test + public void addRequestId() { - Assert.assertNull(ThreadContext.get(REQUEST_ID_KEY)); - QueryContext.addRequestId(); - final String requestId = ThreadContext.get(REQUEST_ID_KEY); - Assert.assertNotNull(requestId); - } + Assert.assertNull(ThreadContext.get(REQUEST_ID_KEY)); + QueryContext.addRequestId(); + final String requestId = ThreadContext.get(REQUEST_ID_KEY); + Assert.assertNotNull(requestId); + } - @Test - public void addRequestId_alreadyExists() { + @Test + public void addRequestId_alreadyExists() { - QueryContext.addRequestId(); - final String requestId = ThreadContext.get(REQUEST_ID_KEY); - QueryContext.addRequestId(); - final String requestId2 = ThreadContext.get(REQUEST_ID_KEY); - Assert.assertThat(requestId2, not(equalTo(requestId))); - } + QueryContext.addRequestId(); + final String requestId = ThreadContext.get(REQUEST_ID_KEY); + QueryContext.addRequestId(); + final String requestId2 = ThreadContext.get(REQUEST_ID_KEY); + Assert.assertThat(requestId2, not(equalTo(requestId))); + } - @Test - public void getRequestId_doesNotExist() { - assertNotNull(QueryContext.getRequestId()); - } + @Test + public void getRequestId_doesNotExist() { + assertNotNull(QueryContext.getRequestId()); + } - @Test - public void getRequestId() { + @Test + public void getRequestId() { - final String test_request_id = "test_id_111"; - ThreadContext.put(REQUEST_ID_KEY, test_request_id); - final String requestId = QueryContext.getRequestId(); - Assert.assertThat(requestId, equalTo(test_request_id)); - } + final String test_request_id = "test_id_111"; + ThreadContext.put(REQUEST_ID_KEY, test_request_id); + final String requestId = QueryContext.getRequestId(); + Assert.assertThat(requestId, equalTo(test_request_id)); + } - @Test - public void withCurrentContext() throws InterruptedException { + @Test + public void withCurrentContext() throws InterruptedException { - Runnable task = () -> { - Assert.assertTrue(ThreadContext.containsKey("test11")); - Assert.assertTrue(ThreadContext.containsKey("test22")); + Runnable task = + () -> { + Assert.assertTrue(ThreadContext.containsKey("test11")); + Assert.assertTrue(ThreadContext.containsKey("test22")); }; - ThreadContext.put("test11", "value11"); - ThreadContext.put("test22", "value11"); - new Thread(QueryContext.withCurrentContext(task)).join(); - } + ThreadContext.put("test11", "value11"); + ThreadContext.put("test22", "value11"); + new Thread(QueryContext.withCurrentContext(task)).join(); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/QueryDataAnonymizerTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/QueryDataAnonymizerTest.java index ca95b547a9..073fec61e7 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/QueryDataAnonymizerTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/QueryDataAnonymizerTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.utils; import org.junit.Assert; @@ -12,78 +11,84 @@ public class QueryDataAnonymizerTest { - @Test - public void queriesShouldHaveAnonymousFieldAndIndex() { - String query = "SELECT ABS(balance) FROM accounts WHERE age > 30 GROUP BY ABS(balance)"; - String expectedQuery = "( SELECT ABS(identifier) FROM table WHERE identifier > number GROUP BY ABS(identifier) )"; - Assert.assertEquals(expectedQuery, QueryDataAnonymizer.anonymizeData(query)); - } + @Test + public void queriesShouldHaveAnonymousFieldAndIndex() { + String query = "SELECT ABS(balance) FROM accounts WHERE age > 30 GROUP BY ABS(balance)"; + String expectedQuery = + "( SELECT ABS(identifier) FROM table WHERE identifier > number GROUP BY ABS(identifier) )"; + Assert.assertEquals(expectedQuery, QueryDataAnonymizer.anonymizeData(query)); + } - @Test - public void queriesShouldAnonymousNumbers() { - String query = "SELECT ABS(20), LOG(20.20) FROM accounts"; - String expectedQuery = "( SELECT ABS(number), LOG(number) FROM table )"; - Assert.assertEquals(expectedQuery, QueryDataAnonymizer.anonymizeData(query)); - } + @Test + public void queriesShouldAnonymousNumbers() { + String query = "SELECT ABS(20), LOG(20.20) FROM accounts"; + String expectedQuery = "( SELECT ABS(number), LOG(number) FROM table )"; + Assert.assertEquals(expectedQuery, QueryDataAnonymizer.anonymizeData(query)); + } - @Test - public void queriesShouldHaveAnonymousBooleanLiterals() { - String query = "SELECT TRUE FROM accounts"; - String expectedQuery = "( SELECT boolean_literal FROM table )"; - Assert.assertEquals(expectedQuery, QueryDataAnonymizer.anonymizeData(query)); - } + @Test + public void queriesShouldHaveAnonymousBooleanLiterals() { + String query = "SELECT TRUE FROM accounts"; + String expectedQuery = "( SELECT boolean_literal FROM table )"; + Assert.assertEquals(expectedQuery, QueryDataAnonymizer.anonymizeData(query)); + } - @Test - public void queriesShouldHaveAnonymousInputStrings() { - String query = "SELECT * FROM accounts WHERE name = 'Oliver'"; - String expectedQuery = "( SELECT * FROM table WHERE identifier = 'string_literal' )"; - Assert.assertEquals(expectedQuery, QueryDataAnonymizer.anonymizeData(query)); - } + @Test + public void queriesShouldHaveAnonymousInputStrings() { + String query = "SELECT * FROM accounts WHERE name = 'Oliver'"; + String expectedQuery = "( SELECT * FROM table WHERE identifier = 'string_literal' )"; + Assert.assertEquals(expectedQuery, QueryDataAnonymizer.anonymizeData(query)); + } - @Test - public void queriesWithAliasesShouldAnonymizeSensitiveData() { - String query = "SELECT balance AS b FROM accounts AS a"; - String expectedQuery = "( SELECT identifier AS b FROM table a )"; - Assert.assertEquals(expectedQuery, QueryDataAnonymizer.anonymizeData(query)); - } + @Test + public void queriesWithAliasesShouldAnonymizeSensitiveData() { + String query = "SELECT balance AS b FROM accounts AS a"; + String expectedQuery = "( SELECT identifier AS b FROM table a )"; + Assert.assertEquals(expectedQuery, QueryDataAnonymizer.anonymizeData(query)); + } - @Test - public void queriesWithFunctionsShouldAnonymizeSensitiveData() { - String query = "SELECT LTRIM(firstname) FROM accounts"; - String expectedQuery = "( SELECT LTRIM(identifier) FROM table )"; - Assert.assertEquals(expectedQuery, QueryDataAnonymizer.anonymizeData(query)); - } + @Test + public void queriesWithFunctionsShouldAnonymizeSensitiveData() { + String query = "SELECT LTRIM(firstname) FROM accounts"; + String expectedQuery = "( SELECT LTRIM(identifier) FROM table )"; + Assert.assertEquals(expectedQuery, QueryDataAnonymizer.anonymizeData(query)); + } - @Test - public void queriesWithAggregatesShouldAnonymizeSensitiveData() { - String query = "SELECT MAX(price) - MIN(price) from tickets"; - String expectedQuery = "( SELECT MAX(identifier) - MIN(identifier) FROM table )"; - Assert.assertEquals(expectedQuery, QueryDataAnonymizer.anonymizeData(query)); - } + @Test + public void queriesWithAggregatesShouldAnonymizeSensitiveData() { + String query = "SELECT MAX(price) - MIN(price) from tickets"; + String expectedQuery = "( SELECT MAX(identifier) - MIN(identifier) FROM table )"; + Assert.assertEquals(expectedQuery, QueryDataAnonymizer.anonymizeData(query)); + } - @Test - public void queriesWithSubqueriesShouldAnonymizeSensitiveData() { - String query = "SELECT a.f, a.l, a.a FROM " + - "(SELECT firstname AS f, lastname AS l, age AS a FROM accounts WHERE age > 30) a"; - String expectedQuery = "( SELECT identifier, identifier, identifier FROM (SELECT identifier AS f, " + - "identifier AS l, identifier AS a FROM table WHERE identifier > number ) a )"; - Assert.assertEquals(expectedQuery, QueryDataAnonymizer.anonymizeData(query)); - } + @Test + public void queriesWithSubqueriesShouldAnonymizeSensitiveData() { + String query = + "SELECT a.f, a.l, a.a FROM " + + "(SELECT firstname AS f, lastname AS l, age AS a FROM accounts WHERE age > 30) a"; + String expectedQuery = + "( SELECT identifier, identifier, identifier FROM (SELECT identifier AS f, " + + "identifier AS l, identifier AS a FROM table WHERE identifier > number ) a )"; + Assert.assertEquals(expectedQuery, QueryDataAnonymizer.anonymizeData(query)); + } - @Test - public void joinQueriesShouldAnonymizeSensitiveData() { - String query = "SELECT a.account_number, a.firstname, a.lastname, e.id, e.name " + - "FROM accounts a JOIN employees e"; - String expectedQuery = "( SELECT identifier, identifier, identifier, identifier, identifier " + - "FROM table a JOIN table e )"; - Assert.assertEquals(expectedQuery, QueryDataAnonymizer.anonymizeData(query)); - } + @Test + public void joinQueriesShouldAnonymizeSensitiveData() { + String query = + "SELECT a.account_number, a.firstname, a.lastname, e.id, e.name " + + "FROM accounts a JOIN employees e"; + String expectedQuery = + "( SELECT identifier, identifier, identifier, identifier, identifier " + + "FROM table a JOIN table e )"; + Assert.assertEquals(expectedQuery, QueryDataAnonymizer.anonymizeData(query)); + } - @Test - public void unionQueriesShouldAnonymizeSensitiveData() { - String query = "SELECT name, age FROM accounts UNION SELECT name, age FROM employees"; - String expectedQuery = "( SELECT identifier, identifier FROM table " + - "UNION SELECT identifier, identifier FROM table )"; - Assert.assertEquals(expectedQuery, QueryDataAnonymizer.anonymizeData(query)); - } + @Test + public void unionQueriesShouldAnonymizeSensitiveData() { + String query = "SELECT name, age FROM accounts UNION SELECT name, age FROM employees"; + String expectedQuery = + "( SELECT identifier, identifier FROM table " + + "UNION SELECT identifier, identifier FROM table )"; + Assert.assertEquals(expectedQuery, QueryDataAnonymizer.anonymizeData(query)); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/SQLFunctionsTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/SQLFunctionsTest.java index 70c4a2aa11..9fc2b6012d 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/SQLFunctionsTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/SQLFunctionsTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.utils; import static org.junit.Assert.assertEquals; @@ -35,69 +34,64 @@ public class SQLFunctionsTest { - private SQLFunctions sqlFunctions = new SQLFunctions(); - - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); - - @Test - public void testAssign() throws SqlParseException { - SQLFunctions sqlFunctions = new SQLFunctions(); - - final SQLIntegerExpr sqlIntegerExpr = new SQLIntegerExpr(10); - final Tuple assign = sqlFunctions.function("assign", - ImmutableList.of(new KVValue(null, sqlIntegerExpr)), - null, - true); - - assertTrue(assign.v1().matches("assign_[0-9]+")); - assertTrue(assign.v2().matches("def assign_[0-9]+ = 10;return assign_[0-9]+;")); - } - - @Test - public void testAbsWithIntReturnType() { - final SQLIntegerExpr sqlIntegerExpr = new SQLIntegerExpr(6); - - final SQLMethodInvokeExpr invokeExpr = new SQLMethodInvokeExpr("ABS"); - invokeExpr.addParameter(sqlIntegerExpr); - List params = new ArrayList<>(); - - final MethodField field = new ScriptMethodField("ABS", params, null, null); - field.setExpression(invokeExpr); - ColumnTypeProvider columnTypeProvider = new ColumnTypeProvider(OpenSearchDataType.INTEGER); - - Schema.Type resolvedType = columnTypeProvider.get(0); - final Schema.Type returnType = sqlFunctions.getScriptFunctionReturnType(field, resolvedType); - Assert.assertEquals(returnType, Schema.Type.INTEGER); - } - - @Test - public void testCastReturnType() { - final SQLIdentifierExpr identifierExpr = new SQLIdentifierExpr("int_type"); - SQLDataType sqlDataType = new SQLDataTypeImpl("INT"); - final SQLCastExpr castExpr = new SQLCastExpr(); - castExpr.setExpr(identifierExpr); - castExpr.setDataType(sqlDataType); - - List params = new ArrayList<>(); - final MethodField field = new ScriptMethodField("CAST", params, null, null); - field.setExpression(castExpr); - ColumnTypeProvider columnTypeProvider = new ColumnTypeProvider(OpenSearchDataType.INTEGER); - - Schema.Type resolvedType = columnTypeProvider.get(0); - final Schema.Type returnType = sqlFunctions.getScriptFunctionReturnType(field, resolvedType); - Assert.assertEquals(returnType, Schema.Type.INTEGER); - } - - @Test - public void testCastIntStatementScript() throws SqlParseException { - assertEquals( - "def result = (doc['age'].value instanceof boolean) " - + "? (doc['age'].value ? 1 : 0) " - + ": Double.parseDouble(doc['age'].value.toString()).intValue()", - sqlFunctions.getCastScriptStatement( - "result", "int", Arrays.asList(new KVValue("age"))) - ); - } - + private SQLFunctions sqlFunctions = new SQLFunctions(); + + @Rule public ExpectedException exceptionRule = ExpectedException.none(); + + @Test + public void testAssign() throws SqlParseException { + SQLFunctions sqlFunctions = new SQLFunctions(); + + final SQLIntegerExpr sqlIntegerExpr = new SQLIntegerExpr(10); + final Tuple assign = + sqlFunctions.function( + "assign", ImmutableList.of(new KVValue(null, sqlIntegerExpr)), null, true); + + assertTrue(assign.v1().matches("assign_[0-9]+")); + assertTrue(assign.v2().matches("def assign_[0-9]+ = 10;return assign_[0-9]+;")); + } + + @Test + public void testAbsWithIntReturnType() { + final SQLIntegerExpr sqlIntegerExpr = new SQLIntegerExpr(6); + + final SQLMethodInvokeExpr invokeExpr = new SQLMethodInvokeExpr("ABS"); + invokeExpr.addParameter(sqlIntegerExpr); + List params = new ArrayList<>(); + + final MethodField field = new ScriptMethodField("ABS", params, null, null); + field.setExpression(invokeExpr); + ColumnTypeProvider columnTypeProvider = new ColumnTypeProvider(OpenSearchDataType.INTEGER); + + Schema.Type resolvedType = columnTypeProvider.get(0); + final Schema.Type returnType = sqlFunctions.getScriptFunctionReturnType(field, resolvedType); + Assert.assertEquals(returnType, Schema.Type.INTEGER); + } + + @Test + public void testCastReturnType() { + final SQLIdentifierExpr identifierExpr = new SQLIdentifierExpr("int_type"); + SQLDataType sqlDataType = new SQLDataTypeImpl("INT"); + final SQLCastExpr castExpr = new SQLCastExpr(); + castExpr.setExpr(identifierExpr); + castExpr.setDataType(sqlDataType); + + List params = new ArrayList<>(); + final MethodField field = new ScriptMethodField("CAST", params, null, null); + field.setExpression(castExpr); + ColumnTypeProvider columnTypeProvider = new ColumnTypeProvider(OpenSearchDataType.INTEGER); + + Schema.Type resolvedType = columnTypeProvider.get(0); + final Schema.Type returnType = sqlFunctions.getScriptFunctionReturnType(field, resolvedType); + Assert.assertEquals(returnType, Schema.Type.INTEGER); + } + + @Test + public void testCastIntStatementScript() throws SqlParseException { + assertEquals( + "def result = (doc['age'].value instanceof boolean) " + + "? (doc['age'].value ? 1 : 0) " + + ": Double.parseDouble(doc['age'].value.toString()).intValue()", + sqlFunctions.getCastScriptStatement("result", "int", Arrays.asList(new KVValue("age")))); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/StringUtilsTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/StringUtilsTest.java index d25fed6f31..b73e91981e 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/StringUtilsTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/StringUtilsTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.utils; import static org.hamcrest.Matchers.equalTo; @@ -17,55 +16,57 @@ public class StringUtilsTest { - private Locale originalLocale; + private Locale originalLocale; - @Before - public void saveOriginalLocale() { - originalLocale = Locale.getDefault(); - } + @Before + public void saveOriginalLocale() { + originalLocale = Locale.getDefault(); + } - @After - public void restoreOriginalLocale() { - Locale.setDefault(originalLocale); - } + @After + public void restoreOriginalLocale() { + Locale.setDefault(originalLocale); + } - @Test - public void toLower() { - final String input = "SAMPLE STRING"; - final String output = StringUtils.toLower(input); + @Test + public void toLower() { + final String input = "SAMPLE STRING"; + final String output = StringUtils.toLower(input); - Assert.assertThat(output, equalTo("sample string")); + Assert.assertThat(output, equalTo("sample string")); - // See https://docs.oracle.com/javase/10/docs/api/java/lang/String.html#toLowerCase(java.util.Locale) - // for the choice of these characters and the locale. - final String upper = "\u0130 \u0049"; - Locale.setDefault(Locale.forLanguageTag("tr")); + // See + // https://docs.oracle.com/javase/10/docs/api/java/lang/String.html#toLowerCase(java.util.Locale) + // for the choice of these characters and the locale. + final String upper = "\u0130 \u0049"; + Locale.setDefault(Locale.forLanguageTag("tr")); - Assert.assertThat(upper.toUpperCase(Locale.ROOT), equalTo(StringUtils.toUpper(upper))); - } + Assert.assertThat(upper.toUpperCase(Locale.ROOT), equalTo(StringUtils.toUpper(upper))); + } - @Test - public void toUpper() { - final String input = "sample string"; - final String output = StringUtils.toUpper(input); + @Test + public void toUpper() { + final String input = "sample string"; + final String output = StringUtils.toUpper(input); - Assert.assertThat(output, equalTo("SAMPLE STRING")); + Assert.assertThat(output, equalTo("SAMPLE STRING")); - // See https://docs.oracle.com/javase/10/docs/api/java/lang/String.html#toUpperCase(java.util.Locale) - // for the choice of these characters and the locale. - final String lower = "\u0069 \u0131"; - Locale.setDefault(Locale.forLanguageTag("tr")); + // See + // https://docs.oracle.com/javase/10/docs/api/java/lang/String.html#toUpperCase(java.util.Locale) + // for the choice of these characters and the locale. + final String lower = "\u0069 \u0131"; + Locale.setDefault(Locale.forLanguageTag("tr")); - Assert.assertThat(lower.toUpperCase(Locale.ROOT), equalTo(StringUtils.toUpper(lower))); - } + Assert.assertThat(lower.toUpperCase(Locale.ROOT), equalTo(StringUtils.toUpper(lower))); + } - @Test - public void format() { - Locale.setDefault(Locale.forLanguageTag("tr")); - final String upper = "\u0130 \u0049"; - final String lower = "\u0069 \u0131"; + @Test + public void format() { + Locale.setDefault(Locale.forLanguageTag("tr")); + final String upper = "\u0130 \u0049"; + final String lower = "\u0069 \u0131"; - final String output = StringUtils.format("%s %s", upper, lower); - Assert.assertThat(output, equalTo(String.format(Locale.ROOT, "%s %s", upper, lower))); - } + final String output = StringUtils.format("%s %s", upper, lower); + Assert.assertThat(output, equalTo(String.format(Locale.ROOT, "%s %s", upper, lower))); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/UtilTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/UtilTest.java index 21731db5a5..e3c7a74a71 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/UtilTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/unittest/utils/UtilTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.unittest.utils; import java.util.HashMap; @@ -14,54 +13,54 @@ public class UtilTest { - @Test - public void clearEmptyPaths_EmptyMap_ShouldReturnTrue(){ - Map map = new HashMap<>(); - boolean result = Util.clearEmptyPaths(map); - // - Assert.assertTrue(result); - } + @Test + public void clearEmptyPaths_EmptyMap_ShouldReturnTrue() { + Map map = new HashMap<>(); + boolean result = Util.clearEmptyPaths(map); + // + Assert.assertTrue(result); + } - @Test - public void clearEmptyPaths_EmptyPathSize1_ShouldReturnTrueAndMapShouldBeEmpty(){ - Map map = new HashMap<>(); - map.put("a",new HashMap()); - boolean result = Util.clearEmptyPaths(map); - Assert.assertTrue(result); - Assert.assertEquals(0,map.size()); - } + @Test + public void clearEmptyPaths_EmptyPathSize1_ShouldReturnTrueAndMapShouldBeEmpty() { + Map map = new HashMap<>(); + map.put("a", new HashMap()); + boolean result = Util.clearEmptyPaths(map); + Assert.assertTrue(result); + Assert.assertEquals(0, map.size()); + } - @Test - public void clearEmptyPaths_EmptyPathSize2_ShouldReturnTrueAndMapShouldBeEmpty(){ - Map map = new HashMap<>(); - Map innerMap = new HashMap<>(); - innerMap.put("b",new HashMap()); - map.put("a",innerMap); - boolean result = Util.clearEmptyPaths(map); - Assert.assertTrue(result); - Assert.assertEquals(0,map.size()); - } + @Test + public void clearEmptyPaths_EmptyPathSize2_ShouldReturnTrueAndMapShouldBeEmpty() { + Map map = new HashMap<>(); + Map innerMap = new HashMap<>(); + innerMap.put("b", new HashMap()); + map.put("a", innerMap); + boolean result = Util.clearEmptyPaths(map); + Assert.assertTrue(result); + Assert.assertEquals(0, map.size()); + } - @Test - public void clearEmptyPaths_2PathsOneEmpty_MapShouldBeSizeOne(){ - Map map = new HashMap<>(); - map.put("a",new HashMap()); - map.put("c",1); - Util.clearEmptyPaths(map); - Assert.assertEquals(1,map.size()); - } + @Test + public void clearEmptyPaths_2PathsOneEmpty_MapShouldBeSizeOne() { + Map map = new HashMap<>(); + map.put("a", new HashMap()); + map.put("c", 1); + Util.clearEmptyPaths(map); + Assert.assertEquals(1, map.size()); + } - @Test - @SuppressWarnings("unchecked") - public void clearEmptyPaths_MapSizeTwoAndTwoOneInnerEmpty_MapShouldBeSizeTwoAndOne(){ - Map map = new HashMap<>(); - Map innerMap = new HashMap<>(); - innerMap.put("b",2); - innerMap.put("c",new HashMap()); - map.put("a",innerMap); - map.put("c",1); - Util.clearEmptyPaths(map); - Assert.assertEquals(2, map.size()); - Assert.assertEquals(1,((HashMap)map.get("a")).size()); - } + @Test + @SuppressWarnings("unchecked") + public void clearEmptyPaths_MapSizeTwoAndTwoOneInnerEmpty_MapShouldBeSizeTwoAndOne() { + Map map = new HashMap<>(); + Map innerMap = new HashMap<>(); + innerMap.put("b", 2); + innerMap.put("c", new HashMap()); + map.put("a", innerMap); + map.put("c", 1); + Util.clearEmptyPaths(map); + Assert.assertEquals(2, map.size()); + Assert.assertEquals(1, ((HashMap) map.get("a")).size()); + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/util/AggregationUtils.java b/legacy/src/test/java/org/opensearch/sql/legacy/util/AggregationUtils.java index 58fa8793ff..85da1d990f 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/util/AggregationUtils.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/util/AggregationUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.util; import com.fasterxml.jackson.core.JsonFactory; @@ -41,42 +40,52 @@ import org.opensearch.search.aggregations.pipeline.PercentilesBucketPipelineAggregationBuilder; public class AggregationUtils { - private final static List entryList = - new ImmutableMap.Builder>().put( - MinAggregationBuilder.NAME, (p, c) -> ParsedMin.fromXContent(p, (String) c)) - .put(MaxAggregationBuilder.NAME, (p, c) -> ParsedMax.fromXContent(p, (String) c)) - .put(SumAggregationBuilder.NAME, (p, c) -> ParsedSum.fromXContent(p, (String) c)) - .put(AvgAggregationBuilder.NAME, (p, c) -> ParsedAvg.fromXContent(p, (String) c)) - .put(StringTerms.NAME, (p, c) -> ParsedStringTerms.fromXContent(p, (String) c)) - .put(LongTerms.NAME, (p, c) -> ParsedLongTerms.fromXContent(p, (String) c)) - .put(DoubleTerms.NAME, (p, c) -> ParsedDoubleTerms.fromXContent(p, (String) c)) - .put(ValueCountAggregationBuilder.NAME, (p, c) -> ParsedValueCount.fromXContent(p, (String) c)) - .put(PercentilesBucketPipelineAggregationBuilder.NAME, - (p, c) -> ParsedPercentilesBucket.fromXContent(p, (String) c)) - .put(DateHistogramAggregationBuilder.NAME, (p, c) -> ParsedDateHistogram.fromXContent(p, (String) c)) - .build() - .entrySet() - .stream() - .map(entry -> new NamedXContentRegistry.Entry(Aggregation.class, new ParseField(entry.getKey()), - entry.getValue())) - .collect(Collectors.toList()); - private final static NamedXContentRegistry namedXContentRegistry = new NamedXContentRegistry(entryList); + private static final List entryList = + new ImmutableMap.Builder>() + .put(MinAggregationBuilder.NAME, (p, c) -> ParsedMin.fromXContent(p, (String) c)) + .put(MaxAggregationBuilder.NAME, (p, c) -> ParsedMax.fromXContent(p, (String) c)) + .put(SumAggregationBuilder.NAME, (p, c) -> ParsedSum.fromXContent(p, (String) c)) + .put(AvgAggregationBuilder.NAME, (p, c) -> ParsedAvg.fromXContent(p, (String) c)) + .put(StringTerms.NAME, (p, c) -> ParsedStringTerms.fromXContent(p, (String) c)) + .put(LongTerms.NAME, (p, c) -> ParsedLongTerms.fromXContent(p, (String) c)) + .put(DoubleTerms.NAME, (p, c) -> ParsedDoubleTerms.fromXContent(p, (String) c)) + .put( + ValueCountAggregationBuilder.NAME, + (p, c) -> ParsedValueCount.fromXContent(p, (String) c)) + .put( + PercentilesBucketPipelineAggregationBuilder.NAME, + (p, c) -> ParsedPercentilesBucket.fromXContent(p, (String) c)) + .put( + DateHistogramAggregationBuilder.NAME, + (p, c) -> ParsedDateHistogram.fromXContent(p, (String) c)) + .build() + .entrySet() + .stream() + .map( + entry -> + new NamedXContentRegistry.Entry( + Aggregation.class, new ParseField(entry.getKey()), entry.getValue())) + .collect(Collectors.toList()); + private static final NamedXContentRegistry namedXContentRegistry = + new NamedXContentRegistry(entryList); - /** - * Populate {@link Aggregations} from JSON string. - * @param json json string - * @return {@link Aggregations} - */ - public static Aggregations fromJson(String json) { - try { - XContentParser xContentParser = new JsonXContentParser( - namedXContentRegistry, - LoggingDeprecationHandler.INSTANCE, - new JsonFactory().createParser(json)); - xContentParser.nextToken(); - return Aggregations.fromXContent(xContentParser); - } catch (IOException e) { - throw new RuntimeException(e); - } + /** + * Populate {@link Aggregations} from JSON string. + * + * @param json json string + * @return {@link Aggregations} + */ + public static Aggregations fromJson(String json) { + try { + XContentParser xContentParser = + new JsonXContentParser( + namedXContentRegistry, + LoggingDeprecationHandler.INSTANCE, + new JsonFactory().createParser(json)); + xContentParser.nextToken(); + return Aggregations.fromXContent(xContentParser); + } catch (IOException e) { + throw new RuntimeException(e); } + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/util/CheckScriptContents.java b/legacy/src/test/java/org/opensearch/sql/legacy/util/CheckScriptContents.java index 2396ca5924..5f0e07aa35 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/util/CheckScriptContents.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/util/CheckScriptContents.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.util; import static java.util.Collections.emptyList; @@ -13,7 +12,6 @@ import static org.mockito.Mockito.any; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.spy; import static org.mockito.Mockito.when; import static org.opensearch.search.builder.SearchSourceBuilder.ScriptField; @@ -59,205 +57,210 @@ public class CheckScriptContents { - private static SQLExpr queryToExpr(String query) { - return new ElasticSqlExprParser(query).expr(); - } + private static SQLExpr queryToExpr(String query) { + return new ElasticSqlExprParser(query).expr(); + } - public static ScriptField getScriptFieldFromQuery(String query) { - try { - Client mockClient = mock(Client.class); - stubMockClient(mockClient); - QueryAction queryAction = OpenSearchActionFactory.create(mockClient, query); - SqlElasticRequestBuilder requestBuilder = queryAction.explain(); + public static ScriptField getScriptFieldFromQuery(String query) { + try { + Client mockClient = mock(Client.class); + stubMockClient(mockClient); + QueryAction queryAction = OpenSearchActionFactory.create(mockClient, query); + SqlElasticRequestBuilder requestBuilder = queryAction.explain(); - SearchRequestBuilder request = (SearchRequestBuilder) requestBuilder.getBuilder(); - List scriptFields = request.request().source().scriptFields(); + SearchRequestBuilder request = (SearchRequestBuilder) requestBuilder.getBuilder(); + List scriptFields = request.request().source().scriptFields(); - assertTrue(scriptFields.size() == 1); + assertTrue(scriptFields.size() == 1); - return scriptFields.get(0); + return scriptFields.get(0); - } catch (SQLFeatureNotSupportedException | SqlParseException | SQLFeatureDisabledException e) { - throw new ParserException("Unable to parse query: " + query, e); - } + } catch (SQLFeatureNotSupportedException | SqlParseException | SQLFeatureDisabledException e) { + throw new ParserException("Unable to parse query: " + query, e); } + } - public static ScriptFilter getScriptFilterFromQuery(String query, SqlParser parser) { - try { - Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); - Where where = select.getWhere(); - - assertTrue(where.getWheres().size() == 1); - assertTrue(((Condition) (where.getWheres().get(0))).getValue() instanceof ScriptFilter); + public static ScriptFilter getScriptFilterFromQuery(String query, SqlParser parser) { + try { + Select select = parser.parseSelect((SQLQueryExpr) queryToExpr(query)); + Where where = select.getWhere(); - return (ScriptFilter) (((Condition) (where.getWheres().get(0))).getValue()); + assertTrue(where.getWheres().size() == 1); + assertTrue(((Condition) (where.getWheres().get(0))).getValue() instanceof ScriptFilter); - } catch (SqlParseException e) { - throw new ParserException("Unable to parse query: " + query); - } - } + return (ScriptFilter) (((Condition) (where.getWheres().get(0))).getValue()); - public static boolean scriptContainsString(ScriptField scriptField, String string) { - return scriptField.script().getIdOrCode().contains(string); + } catch (SqlParseException e) { + throw new ParserException("Unable to parse query: " + query); } + } - public static boolean scriptContainsString(ScriptFilter scriptFilter, String string) { - return scriptFilter.getScript().contains(string); - } + public static boolean scriptContainsString(ScriptField scriptField, String string) { + return scriptField.script().getIdOrCode().contains(string); + } - public static boolean scriptHasPattern(ScriptField scriptField, String regex) { - Pattern pattern = Pattern.compile(regex); - Matcher matcher = pattern.matcher(scriptField.script().getIdOrCode()); - return matcher.find(); - } + public static boolean scriptContainsString(ScriptFilter scriptFilter, String string) { + return scriptFilter.getScript().contains(string); + } - public static boolean scriptHasPattern(ScriptFilter scriptFilter, String regex) { - Pattern pattern = Pattern.compile(regex); - Matcher matcher = pattern.matcher(scriptFilter.getScript()); - return matcher.find(); - } + public static boolean scriptHasPattern(ScriptField scriptField, String regex) { + Pattern pattern = Pattern.compile(regex); + Matcher matcher = pattern.matcher(scriptField.script().getIdOrCode()); + return matcher.find(); + } - public static void stubMockClient(Client mockClient) { - String mappings = "{\n" + - " \"opensearch-sql_test_index_bank\": {\n" + - " \"mappings\": {\n" + - " \"account\": {\n" + - " \"properties\": {\n" + - " \"account_number\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"address\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"age\": {\n" + - " \"type\": \"integer\"\n" + - " },\n" + - " \"balance\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"birthdate\": {\n" + - " \"type\": \"date\"\n" + - " },\n" + - " \"city\": {\n" + - " \"type\": \"keyword\"\n" + - " },\n" + - " \"email\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"employer\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " },\n" + - " \"firstname\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"gender\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"lastname\": {\n" + - " \"type\": \"keyword\"\n" + - " },\n" + - " \"male\": {\n" + - " \"type\": \"boolean\"\n" + - " },\n" + - " \"state\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"raw\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " },\n" + - // ==== All required by IndexMetaData.fromXContent() ==== - " \"settings\": {\n" + - " \"index\": {\n" + - " \"number_of_shards\": 5,\n" + - " \"number_of_replicas\": 0,\n" + - " \"version\": {\n" + - " \"created\": \"6050399\"\n" + - " }\n" + - " }\n" + - " },\n" + - " \"mapping_version\": \"1\",\n" + - " \"settings_version\": \"1\"\n" + - //======================================================= - " }\n" + - "}"; + public static boolean scriptHasPattern(ScriptFilter scriptFilter, String regex) { + Pattern pattern = Pattern.compile(regex); + Matcher matcher = pattern.matcher(scriptFilter.getScript()); + return matcher.find(); + } - AdminClient mockAdminClient = mock(AdminClient.class); - when(mockClient.admin()).thenReturn(mockAdminClient); + public static void stubMockClient(Client mockClient) { + String mappings = + "{\n" + + " \"opensearch-sql_test_index_bank\": {\n" + + " \"mappings\": {\n" + + " \"account\": {\n" + + " \"properties\": {\n" + + " \"account_number\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"address\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"age\": {\n" + + " \"type\": \"integer\"\n" + + " },\n" + + " \"balance\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"birthdate\": {\n" + + " \"type\": \"date\"\n" + + " },\n" + + " \"city\": {\n" + + " \"type\": \"keyword\"\n" + + " },\n" + + " \"email\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"employer\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " },\n" + + " \"firstname\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"gender\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"lastname\": {\n" + + " \"type\": \"keyword\"\n" + + " },\n" + + " \"male\": {\n" + + " \"type\": \"boolean\"\n" + + " },\n" + + " \"state\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"raw\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " },\n" + + + // ==== All required by IndexMetaData.fromXContent() ==== + " \"settings\": {\n" + + " \"index\": {\n" + + " \"number_of_shards\": 5,\n" + + " \"number_of_replicas\": 0,\n" + + " \"version\": {\n" + + " \"created\": \"6050399\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"mapping_version\": \"1\",\n" + + " \"settings_version\": \"1\"\n" + + + // ======================================================= + " }\n" + + "}"; - IndicesAdminClient mockIndexClient = mock(IndicesAdminClient.class); - when(mockAdminClient.indices()).thenReturn(mockIndexClient); + AdminClient mockAdminClient = mock(AdminClient.class); + when(mockClient.admin()).thenReturn(mockAdminClient); - ActionFuture mockActionResp = mock(ActionFuture.class); - when(mockIndexClient.getFieldMappings(any(GetFieldMappingsRequest.class))).thenReturn(mockActionResp); - mockLocalClusterState(mappings); - } + IndicesAdminClient mockIndexClient = mock(IndicesAdminClient.class); + when(mockAdminClient.indices()).thenReturn(mockIndexClient); - public static XContentParser createParser(String mappings) throws IOException { - return XContentType.JSON.xContent().createParser( - NamedXContentRegistry.EMPTY, - DeprecationHandler.THROW_UNSUPPORTED_OPERATION, - mappings - ); - } + ActionFuture mockActionResp = mock(ActionFuture.class); + when(mockIndexClient.getFieldMappings(any(GetFieldMappingsRequest.class))) + .thenReturn(mockActionResp); + mockLocalClusterState(mappings); + } - public static void mockLocalClusterState(String mappings) { - LocalClusterState.state().setClusterService(mockClusterService(mappings)); - LocalClusterState.state().setResolver(mockIndexNameExpressionResolver()); - LocalClusterState.state().setPluginSettings(mockPluginSettings()); - } + public static XContentParser createParser(String mappings) throws IOException { + return XContentType.JSON + .xContent() + .createParser( + NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, mappings); + } - public static ClusterService mockClusterService(String mappings) { - ClusterService mockService = mock(ClusterService.class); - ClusterState mockState = mock(ClusterState.class); - Metadata mockMetaData = mock(Metadata.class); + public static void mockLocalClusterState(String mappings) { + LocalClusterState.state().setClusterService(mockClusterService(mappings)); + LocalClusterState.state().setResolver(mockIndexNameExpressionResolver()); + LocalClusterState.state().setPluginSettings(mockPluginSettings()); + } - when(mockService.state()).thenReturn(mockState); - when(mockState.metadata()).thenReturn(mockMetaData); - try { - when(mockMetaData.findMappings(any(), any())).thenReturn( - Map.of(TestsConstants.TEST_INDEX_BANK, IndexMetadata.fromXContent( - createParser(mappings)).mapping())); - } - catch (IOException e) { - throw new IllegalStateException(e); - } - return mockService; - } + public static ClusterService mockClusterService(String mappings) { + ClusterService mockService = mock(ClusterService.class); + ClusterState mockState = mock(ClusterState.class); + Metadata mockMetaData = mock(Metadata.class); - public static IndexNameExpressionResolver mockIndexNameExpressionResolver() { - IndexNameExpressionResolver mockResolver = mock(IndexNameExpressionResolver.class); - when(mockResolver.concreteIndexNames(any(), any(), anyBoolean(), anyString())).thenAnswer( - (Answer) invocation -> { - // Return index expression directly without resolving - Object indexExprs = invocation.getArguments()[3]; - if (indexExprs instanceof String) { - return new String[]{ (String) indexExprs }; - } - return (String[]) indexExprs; - } - ); - return mockResolver; + when(mockService.state()).thenReturn(mockState); + when(mockState.metadata()).thenReturn(mockMetaData); + try { + when(mockMetaData.findMappings(any(), any())) + .thenReturn( + Map.of( + TestsConstants.TEST_INDEX_BANK, + IndexMetadata.fromXContent(createParser(mappings)).mapping())); + } catch (IOException e) { + throw new IllegalStateException(e); } + return mockService; + } - public static OpenSearchSettings mockPluginSettings() { - OpenSearchSettings settings = mock(OpenSearchSettings.class); + public static IndexNameExpressionResolver mockIndexNameExpressionResolver() { + IndexNameExpressionResolver mockResolver = mock(IndexNameExpressionResolver.class); + when(mockResolver.concreteIndexNames(any(), any(), anyBoolean(), anyString())) + .thenAnswer( + (Answer) + invocation -> { + // Return index expression directly without resolving + Object indexExprs = invocation.getArguments()[3]; + if (indexExprs instanceof String) { + return new String[] {(String) indexExprs}; + } + return (String[]) indexExprs; + }); + return mockResolver; + } - // Force return empty list to avoid ClusterSettings be invoked which is a final class and hard to mock. - // In this case, default value in Setting will be returned all the time. - doReturn(emptyList()).when(settings).getSettings(); - return settings; - } + public static OpenSearchSettings mockPluginSettings() { + OpenSearchSettings settings = mock(OpenSearchSettings.class); + // Force return empty list to avoid ClusterSettings be invoked which is a final class and hard + // to mock. + // In this case, default value in Setting will be returned all the time. + doReturn(emptyList()).when(settings).getSettings(); + return settings; + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/util/HasFieldWithValue.java b/legacy/src/test/java/org/opensearch/sql/legacy/util/HasFieldWithValue.java index 3a7f074a0f..74f6411f73 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/util/HasFieldWithValue.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/util/HasFieldWithValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.util; import java.lang.reflect.Field; @@ -13,42 +12,43 @@ /** * A matcher for private field value extraction along with matcher to assert its value. * - * @param Type of target (actual) object - * @param Type of field member (feature) extracted from target object by reflection + * @param Type of target (actual) object + * @param Type of field member (feature) extracted from target object by reflection */ public class HasFieldWithValue extends FeatureMatcher { - private final String fieldName; - - /** - * Construct a matcher. Reordered the argument list. - * - * @param name Identifying text for mismatch message - * @param desc Descriptive text to use in describeTo - * @param matcher The matcher to apply to the feature - */ - private HasFieldWithValue(String name, String desc, Matcher matcher) { - super(matcher, desc, name); - this.fieldName = name; - } - - public static HasFieldWithValue hasFieldWithValue(String name, String desc, Matcher matcher) { - return new HasFieldWithValue<>(name, desc, matcher); - } - - @Override - protected U featureValueOf(T targetObj) { - return getFieldValue(targetObj, fieldName); - } - - @SuppressWarnings("unchecked") - private U getFieldValue(Object obj, String fieldName) { - try { - Field field = obj.getClass().getDeclaredField(fieldName); - field.setAccessible(true); - return (U) field.get(obj); - } catch (NoSuchFieldException | IllegalAccessException e) { - throw new IllegalArgumentException(e); - } + private final String fieldName; + + /** + * Construct a matcher. Reordered the argument list. + * + * @param name Identifying text for mismatch message + * @param desc Descriptive text to use in describeTo + * @param matcher The matcher to apply to the feature + */ + private HasFieldWithValue(String name, String desc, Matcher matcher) { + super(matcher, desc, name); + this.fieldName = name; + } + + public static HasFieldWithValue hasFieldWithValue( + String name, String desc, Matcher matcher) { + return new HasFieldWithValue<>(name, desc, matcher); + } + + @Override + protected U featureValueOf(T targetObj) { + return getFieldValue(targetObj, fieldName); + } + + @SuppressWarnings("unchecked") + private U getFieldValue(Object obj, String fieldName) { + try { + Field field = obj.getClass().getDeclaredField(fieldName); + field.setAccessible(true); + return (U) field.get(obj); + } catch (NoSuchFieldException | IllegalAccessException e) { + throw new IllegalArgumentException(e); } + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/util/MatcherUtils.java b/legacy/src/test/java/org/opensearch/sql/legacy/util/MatcherUtils.java index 84f19de58b..0e5f699092 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/util/MatcherUtils.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/util/MatcherUtils.java @@ -3,9 +3,9 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.util; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.arrayContainingInAnyOrder; @@ -17,7 +17,6 @@ import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.hasItems; import static org.junit.Assert.assertEquals; -import static org.hamcrest.MatcherAssert.assertThat; import com.google.common.base.Strings; import java.util.ArrayList; @@ -36,260 +35,266 @@ public class MatcherUtils { - /** - * Assert field value in object by a custom matcher and getter to access the field. - * - * @param name description - * @param subMatcher sub-matcher for field - * @param getter getter function to access the field - * @param type of outer object - * @param type of inner field - * @return matcher - */ - public static FeatureMatcher featureValueOf(String name, - Matcher subMatcher, - Function getter) { - return new FeatureMatcher(subMatcher, name, name) { - @Override - protected U featureValueOf(T actual) { - return getter.apply(actual); - } - }; - } - - @SafeVarargs - public static Matcher hits(Matcher... hitMatchers) { - if (hitMatchers.length == 0) { - return featureValueOf("SearchHits", emptyArray(), SearchHits::getHits); - } - return featureValueOf("SearchHits", arrayContainingInAnyOrder(hitMatchers), SearchHits::getHits); + /** + * Assert field value in object by a custom matcher and getter to access the field. + * + * @param name description + * @param subMatcher sub-matcher for field + * @param getter getter function to access the field + * @param type of outer object + * @param type of inner field + * @return matcher + */ + public static FeatureMatcher featureValueOf( + String name, Matcher subMatcher, Function getter) { + return new FeatureMatcher(subMatcher, name, name) { + @Override + protected U featureValueOf(T actual) { + return getter.apply(actual); + } + }; + } + + @SafeVarargs + public static Matcher hits(Matcher... hitMatchers) { + if (hitMatchers.length == 0) { + return featureValueOf("SearchHits", emptyArray(), SearchHits::getHits); } - - @SafeVarargs - public static Matcher hitsInOrder(Matcher... hitMatchers) { - if (hitMatchers.length == 0) { - return featureValueOf("SearchHits", emptyArray(), SearchHits::getHits); - } - return featureValueOf("SearchHits", arrayContaining(hitMatchers), SearchHits::getHits); - } - - @SuppressWarnings("unchecked") - public static Matcher hit(Matcher>... entryMatchers) { - return featureValueOf("SearchHit", allOf(entryMatchers), SearchHit::getSourceAsMap); - } - - @SuppressWarnings("unchecked") - public static Matcher> kv(String key, Object value) { - // Use raw type to avoid generic type problem from Matcher> to Matcher - return (Matcher) hasEntry(key, value); + return featureValueOf( + "SearchHits", arrayContainingInAnyOrder(hitMatchers), SearchHits::getHits); + } + + @SafeVarargs + public static Matcher hitsInOrder(Matcher... hitMatchers) { + if (hitMatchers.length == 0) { + return featureValueOf("SearchHits", emptyArray(), SearchHits::getHits); } - - public static Matcher hitAny(String query, Matcher... matcher) { - return featureValueOf("SearchHits", hasItems(matcher), actual -> { - JSONArray array = (JSONArray) (actual.query(query)); - List results = new ArrayList<>(array.length()); - for (Object element : array) { - results.add((JSONObject) element); - } - return results; + return featureValueOf("SearchHits", arrayContaining(hitMatchers), SearchHits::getHits); + } + + @SuppressWarnings("unchecked") + public static Matcher hit(Matcher>... entryMatchers) { + return featureValueOf("SearchHit", allOf(entryMatchers), SearchHit::getSourceAsMap); + } + + @SuppressWarnings("unchecked") + public static Matcher> kv(String key, Object value) { + // Use raw type to avoid generic type problem from Matcher> to Matcher + return (Matcher) hasEntry(key, value); + } + + public static Matcher hitAny(String query, Matcher... matcher) { + return featureValueOf( + "SearchHits", + hasItems(matcher), + actual -> { + JSONArray array = (JSONArray) (actual.query(query)); + List results = new ArrayList<>(array.length()); + for (Object element : array) { + results.add((JSONObject) element); + } + return results; }); - } - - public static Matcher hitAny(Matcher... matcher) { - return hitAny("/hits/hits", matcher); - } - - public static Matcher hitAll(Matcher... matcher) { - return featureValueOf("SearchHits", containsInAnyOrder(matcher), actual -> { - JSONArray array = (JSONArray) (actual.query("/hits/hits")); - List results = new ArrayList<>(array.length()); - for (Object element : array) { - results.add((JSONObject) element); - } - return results; + } + + public static Matcher hitAny(Matcher... matcher) { + return hitAny("/hits/hits", matcher); + } + + public static Matcher hitAll(Matcher... matcher) { + return featureValueOf( + "SearchHits", + containsInAnyOrder(matcher), + actual -> { + JSONArray array = (JSONArray) (actual.query("/hits/hits")); + List results = new ArrayList<>(array.length()); + for (Object element : array) { + results.add((JSONObject) element); + } + return results; }); + } + + public static Matcher kvString(String key, Matcher matcher) { + return featureValueOf("Json Match", matcher, actual -> (String) actual.query(key)); + } + + public static Matcher kvDouble(String key, Matcher matcher) { + return featureValueOf("Json Match", matcher, actual -> (Double) actual.query(key)); + } + + public static Matcher kvInt(String key, Matcher matcher) { + return featureValueOf("Json Match", matcher, actual -> (Integer) actual.query(key)); + } + + @SafeVarargs + public static void verifySchema(JSONObject response, Matcher... matchers) { + verify(response.getJSONArray("schema"), matchers); + } + + @SafeVarargs + public static void verifyDataRows(JSONObject response, Matcher... matchers) { + verify(response.getJSONArray("datarows"), matchers); + } + + @SafeVarargs + public static void verifyColumn(JSONObject response, Matcher... matchers) { + verify(response.getJSONArray("schema"), matchers); + } + + @SafeVarargs + public static void verifyOrder(JSONObject response, Matcher... matchers) { + verifyOrder(response.getJSONArray("datarows"), matchers); + } + + @SafeVarargs + @SuppressWarnings("unchecked") + public static void verifyDataRowsInOrder(JSONObject response, Matcher... matchers) { + verifyInOrder(response.getJSONArray("datarows"), matchers); + } + + @SuppressWarnings("unchecked") + public static void verify(JSONArray array, Matcher... matchers) { + List objects = new ArrayList<>(); + array.iterator().forEachRemaining(o -> objects.add((T) o)); + assertEquals(matchers.length, objects.size()); + assertThat(objects, containsInAnyOrder(matchers)); + } + + @SafeVarargs + @SuppressWarnings("unchecked") + public static void verifyInOrder(JSONArray array, Matcher... matchers) { + List objects = new ArrayList<>(); + array.iterator().forEachRemaining(o -> objects.add((T) o)); + assertEquals(matchers.length, objects.size()); + assertThat(objects, contains(matchers)); + } + + @SuppressWarnings("unchecked") + public static void verifySome(JSONArray array, Matcher... matchers) { + List objects = new ArrayList<>(); + array.iterator().forEachRemaining(o -> objects.add((T) o)); + + assertThat(matchers.length, greaterThan(0)); + for (Matcher matcher : matchers) { + assertThat(objects, hasItems(matcher)); } - - public static Matcher kvString(String key, Matcher matcher) { - return featureValueOf("Json Match", matcher, actual -> (String) actual.query(key)); - } - - public static Matcher kvDouble(String key, Matcher matcher) { - return featureValueOf("Json Match", matcher, actual -> (Double) actual.query(key)); + } + + @SafeVarargs + public static void verifyOrder(JSONArray array, Matcher... matchers) { + List objects = new ArrayList<>(); + array.iterator().forEachRemaining(o -> objects.add((T) o)); + assertEquals(matchers.length, objects.size()); + assertThat(objects, containsInRelativeOrder(matchers)); + } + + public static TypeSafeMatcher schema( + String expectedName, String expectedAlias, String expectedType) { + return new TypeSafeMatcher() { + @Override + public void describeTo(Description description) { + description.appendText( + String.format( + "(name=%s, alias=%s, type=%s)", expectedName, expectedAlias, expectedType)); + } + + @Override + protected boolean matchesSafely(JSONObject jsonObject) { + String actualName = (String) jsonObject.query("/name"); + String actualAlias = (String) jsonObject.query("/alias"); + String actualType = (String) jsonObject.query("/type"); + return expectedName.equals(actualName) + && (Strings.isNullOrEmpty(actualAlias) && Strings.isNullOrEmpty(expectedAlias) + || expectedAlias.equals(actualAlias)) + && expectedType.equals(actualType); + } + }; + } + + public static TypeSafeMatcher rows(Object... expectedObjects) { + return new TypeSafeMatcher() { + @Override + public void describeTo(Description description) { + description.appendText(String.join(",", Arrays.asList(expectedObjects).toString())); + } + + @Override + protected boolean matchesSafely(JSONArray array) { + List actualObjects = new ArrayList<>(); + array.iterator().forEachRemaining(actualObjects::add); + return Arrays.asList(expectedObjects).equals(actualObjects); + } + }; + } + + public static TypeSafeMatcher columnPattern(String regex) { + return new TypeSafeMatcher() { + @Override + protected boolean matchesSafely(JSONObject jsonObject) { + return ((String) jsonObject.query("/name")).matches(regex); + } + + @Override + public void describeTo(Description description) { + description.appendText(String.format("(column_pattern=%s)", regex)); + } + }; + } + + public static TypeSafeMatcher columnName(String name) { + return new TypeSafeMatcher() { + @Override + protected boolean matchesSafely(JSONObject jsonObject) { + return jsonObject.query("/name").equals(name); + } + + @Override + public void describeTo(Description description) { + description.appendText(String.format("(name=%s)", name)); + } + }; + } + + /** Tests if a string is equal to another string, ignore the case and whitespace. */ + public static class IsEqualIgnoreCaseAndWhiteSpace extends TypeSafeMatcher { + private final String string; + + public IsEqualIgnoreCaseAndWhiteSpace(String string) { + if (string == null) { + throw new IllegalArgumentException("Non-null value required"); + } + this.string = string; } - public static Matcher kvInt(String key, Matcher matcher) { - return featureValueOf("Json Match", matcher, actual -> (Integer) actual.query(key)); + @Override + public boolean matchesSafely(String item) { + return ignoreCase(ignoreSpaces(string)).equals(ignoreCase(ignoreSpaces(item))); } - @SafeVarargs - public static void verifySchema(JSONObject response, Matcher... matchers) { - verify(response.getJSONArray("schema"), matchers); + @Override + public void describeMismatchSafely(String item, Description mismatchDescription) { + mismatchDescription.appendText("was ").appendValue(item); } - @SafeVarargs - public static void verifyDataRows(JSONObject response, Matcher... matchers) { - verify(response.getJSONArray("datarows"), matchers); + @Override + public void describeTo(Description description) { + description + .appendText("a string equal to ") + .appendValue(string) + .appendText(" ignore case and white space"); } - @SafeVarargs - public static void verifyColumn(JSONObject response, Matcher... matchers) { - verify(response.getJSONArray("schema"), matchers); + public String ignoreSpaces(String toBeStripped) { + return toBeStripped.replaceAll("\\s+", "").trim(); } - @SafeVarargs - public static void verifyOrder(JSONObject response, Matcher... matchers) { - verifyOrder(response.getJSONArray("datarows"), matchers); + public String ignoreCase(String toBeLower) { + return toBeLower.toLowerCase(); } - @SafeVarargs - @SuppressWarnings("unchecked") - public static void verifyDataRowsInOrder(JSONObject response, Matcher... matchers) { - verifyInOrder(response.getJSONArray("datarows"), matchers); - } - - @SuppressWarnings("unchecked") - public static void verify(JSONArray array, Matcher... matchers) { - List objects = new ArrayList<>(); - array.iterator().forEachRemaining(o -> objects.add((T) o)); - assertEquals(matchers.length, objects.size()); - assertThat(objects, containsInAnyOrder(matchers)); - } - - @SafeVarargs - @SuppressWarnings("unchecked") - public static void verifyInOrder(JSONArray array, Matcher... matchers) { - List objects = new ArrayList<>(); - array.iterator().forEachRemaining(o -> objects.add((T) o)); - assertEquals(matchers.length, objects.size()); - assertThat(objects, contains(matchers)); - } - - @SuppressWarnings("unchecked") - public static void verifySome(JSONArray array, Matcher... matchers) { - List objects = new ArrayList<>(); - array.iterator().forEachRemaining(o -> objects.add((T) o)); - - assertThat(matchers.length, greaterThan(0)); - for (Matcher matcher : matchers) { - assertThat(objects, hasItems(matcher)); - } - } - - @SafeVarargs - public static void verifyOrder(JSONArray array, Matcher... matchers) { - List objects = new ArrayList<>(); - array.iterator().forEachRemaining(o -> objects.add((T) o)); - assertEquals(matchers.length, objects.size()); - assertThat(objects, containsInRelativeOrder(matchers)); - } - - public static TypeSafeMatcher schema(String expectedName, String expectedAlias, String expectedType) { - return new TypeSafeMatcher() { - @Override - public void describeTo(Description description) { - description.appendText( - String.format("(name=%s, alias=%s, type=%s)", expectedName, expectedAlias, expectedType)); - } - - @Override - protected boolean matchesSafely(JSONObject jsonObject) { - String actualName = (String) jsonObject.query("/name"); - String actualAlias = (String) jsonObject.query("/alias"); - String actualType = (String) jsonObject.query("/type"); - return expectedName.equals(actualName) && - (Strings.isNullOrEmpty(actualAlias) && Strings.isNullOrEmpty(expectedAlias) || - expectedAlias.equals(actualAlias)) && - expectedType.equals(actualType); - } - }; - } - - public static TypeSafeMatcher rows(Object... expectedObjects) { - return new TypeSafeMatcher() { - @Override - public void describeTo(Description description) { - description.appendText(String.join(",", Arrays.asList(expectedObjects).toString())); - } - - @Override - protected boolean matchesSafely(JSONArray array) { - List actualObjects = new ArrayList<>(); - array.iterator().forEachRemaining(actualObjects::add); - return Arrays.asList(expectedObjects).equals(actualObjects); - } - }; - } - - public static TypeSafeMatcher columnPattern(String regex) { - return new TypeSafeMatcher() { - @Override - protected boolean matchesSafely(JSONObject jsonObject) { - return ((String)jsonObject.query("/name")).matches(regex); - } - - @Override - public void describeTo(Description description) { - description.appendText(String.format("(column_pattern=%s)", regex)); - } - }; - } - - public static TypeSafeMatcher columnName(String name) { - return new TypeSafeMatcher() { - @Override - protected boolean matchesSafely(JSONObject jsonObject) { - return jsonObject.query("/name").equals(name); - } - - @Override - public void describeTo(Description description) { - description.appendText(String.format("(name=%s)", name)); - } - }; - } - - - /** - * Tests if a string is equal to another string, ignore the case and whitespace. - */ - public static class IsEqualIgnoreCaseAndWhiteSpace extends TypeSafeMatcher { - private final String string; - - public IsEqualIgnoreCaseAndWhiteSpace(String string) { - if (string == null) { - throw new IllegalArgumentException("Non-null value required"); - } - this.string = string; - } - - @Override - public boolean matchesSafely(String item) { - return ignoreCase(ignoreSpaces(string)).equals(ignoreCase(ignoreSpaces(item))); - } - - @Override - public void describeMismatchSafely(String item, Description mismatchDescription) { - mismatchDescription.appendText("was ").appendValue(item); - } - - @Override - public void describeTo(Description description) { - description.appendText("a string equal to ") - .appendValue(string) - .appendText(" ignore case and white space"); - } - - public String ignoreSpaces(String toBeStripped) { - return toBeStripped.replaceAll("\\s+", "").trim(); - } - - public String ignoreCase(String toBeLower) { - return toBeLower.toLowerCase(); - } - - public static Matcher equalToIgnoreCaseAndWhiteSpace(String expectedString) { - return new IsEqualIgnoreCaseAndWhiteSpace(expectedString); - } + public static Matcher equalToIgnoreCaseAndWhiteSpace(String expectedString) { + return new IsEqualIgnoreCaseAndWhiteSpace(expectedString); } + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/util/MultipleIndexClusterUtils.java b/legacy/src/test/java/org/opensearch/sql/legacy/util/MultipleIndexClusterUtils.java index ff15cd698c..c3513e2a01 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/util/MultipleIndexClusterUtils.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/util/MultipleIndexClusterUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.util; import static org.mockito.Matchers.any; @@ -24,170 +23,177 @@ import org.opensearch.cluster.service.ClusterService; import org.opensearch.sql.legacy.esdomain.LocalClusterState; -/** - * Test Utility which provide the cluster have 2 indices. - */ +/** Test Utility which provide the cluster have 2 indices. */ public class MultipleIndexClusterUtils { - public final static String INDEX_ACCOUNT_1 = "account1"; - public final static String INDEX_ACCOUNT_2 = "account2"; - public final static String INDEX_ACCOUNT_ALL = "account*"; + public static final String INDEX_ACCOUNT_1 = "account1"; + public static final String INDEX_ACCOUNT_2 = "account2"; + public static final String INDEX_ACCOUNT_ALL = "account*"; - public static String INDEX_ACCOUNT_1_MAPPING = "{\n" + - " \"field_mappings\": {\n" + - " \"mappings\": {\n" + - " \"account1\": {\n" + - " \"properties\": {\n" + - " \"id\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"address\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\"\n" + - " }\n" + - " },\n" + - " \"fielddata\": true\n" + - " },\n" + - " \"age\": {\n" + - " \"type\": \"integer\"\n" + - " },\n" + - " \"projects\": {\n" + - " \"type\": \"nested\",\n" + - " \"properties\": {\n" + - " \"name\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\"\n" + - " }\n" + - " },\n" + - " \"fielddata\": true\n" + - " },\n" + - " \"started_year\": {\n" + - " \"type\": \"int\"\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " },\n" + - " \"settings\": {\n" + - " \"index\": {\n" + - " \"number_of_shards\": 1,\n" + - " \"number_of_replicas\": 0,\n" + - " \"version\": {\n" + - " \"created\": \"6050399\"\n" + - " }\n" + - " }\n" + - " },\n" + - " \"mapping_version\": \"1\",\n" + - " \"settings_version\": \"1\"\n" + - " }\n" + - "}"; + public static String INDEX_ACCOUNT_1_MAPPING = + "{\n" + + " \"field_mappings\": {\n" + + " \"mappings\": {\n" + + " \"account1\": {\n" + + " \"properties\": {\n" + + " \"id\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"address\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\"\n" + + " }\n" + + " },\n" + + " \"fielddata\": true\n" + + " },\n" + + " \"age\": {\n" + + " \"type\": \"integer\"\n" + + " },\n" + + " \"projects\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"name\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\"\n" + + " }\n" + + " },\n" + + " \"fielddata\": true\n" + + " },\n" + + " \"started_year\": {\n" + + " \"type\": \"int\"\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " },\n" + + " \"settings\": {\n" + + " \"index\": {\n" + + " \"number_of_shards\": 1,\n" + + " \"number_of_replicas\": 0,\n" + + " \"version\": {\n" + + " \"created\": \"6050399\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"mapping_version\": \"1\",\n" + + " \"settings_version\": \"1\"\n" + + " }\n" + + "}"; - /** - * The difference with account1. - * 1. missing address. - * 2. age has different type. - * 3. projects.started_year has different type. - */ - public static String INDEX_ACCOUNT_2_MAPPING = "{\n" + - " \"field_mappings\": {\n" + - " \"mappings\": {\n" + - " \"account2\": {\n" + - " \"properties\": {\n" + - " \"id\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"age\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"projects\": {\n" + - " \"type\": \"nested\",\n" + - " \"properties\": {\n" + - " \"name\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\"\n" + - " }\n" + - " },\n" + - " \"fielddata\": true\n" + - " },\n" + - " \"started_year\": {\n" + - " \"type\": \"long\"\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " },\n" + - " \"settings\": {\n" + - " \"index\": {\n" + - " \"number_of_shards\": 1,\n" + - " \"number_of_replicas\": 0,\n" + - " \"version\": {\n" + - " \"created\": \"6050399\"\n" + - " }\n" + - " }\n" + - " },\n" + - " \"mapping_version\": \"1\",\n" + - " \"settings_version\": \"1\"\n" + - " }\n" + - "}"; + /** + * The difference with account1. 1. missing address. 2. age has different type. 3. + * projects.started_year has different type. + */ + public static String INDEX_ACCOUNT_2_MAPPING = + "{\n" + + " \"field_mappings\": {\n" + + " \"mappings\": {\n" + + " \"account2\": {\n" + + " \"properties\": {\n" + + " \"id\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"age\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"projects\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"name\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\"\n" + + " }\n" + + " },\n" + + " \"fielddata\": true\n" + + " },\n" + + " \"started_year\": {\n" + + " \"type\": \"long\"\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " },\n" + + " \"settings\": {\n" + + " \"index\": {\n" + + " \"number_of_shards\": 1,\n" + + " \"number_of_replicas\": 0,\n" + + " \"version\": {\n" + + " \"created\": \"6050399\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"mapping_version\": \"1\",\n" + + " \"settings_version\": \"1\"\n" + + " }\n" + + "}"; - public static void mockMultipleIndexEnv() { - mockLocalClusterState( - Map.of(INDEX_ACCOUNT_1, buildIndexMapping(INDEX_ACCOUNT_1, INDEX_ACCOUNT_1_MAPPING), - INDEX_ACCOUNT_2, buildIndexMapping(INDEX_ACCOUNT_2, INDEX_ACCOUNT_2_MAPPING), - INDEX_ACCOUNT_ALL, buildIndexMapping(Map.of(INDEX_ACCOUNT_1, INDEX_ACCOUNT_1_MAPPING, - INDEX_ACCOUNT_2, INDEX_ACCOUNT_2_MAPPING)))); - } + public static void mockMultipleIndexEnv() { + mockLocalClusterState( + Map.of( + INDEX_ACCOUNT_1, + buildIndexMapping(INDEX_ACCOUNT_1, INDEX_ACCOUNT_1_MAPPING), + INDEX_ACCOUNT_2, + buildIndexMapping(INDEX_ACCOUNT_2, INDEX_ACCOUNT_2_MAPPING), + INDEX_ACCOUNT_ALL, + buildIndexMapping( + Map.of( + INDEX_ACCOUNT_1, + INDEX_ACCOUNT_1_MAPPING, + INDEX_ACCOUNT_2, + INDEX_ACCOUNT_2_MAPPING)))); + } - public static void mockLocalClusterState(Map> indexMapping) { - LocalClusterState.state().setClusterService(mockClusterService(indexMapping)); - LocalClusterState.state().setResolver(mockIndexNameExpressionResolver()); - LocalClusterState.state().setPluginSettings(mockPluginSettings()); - } + public static void mockLocalClusterState(Map> indexMapping) { + LocalClusterState.state().setClusterService(mockClusterService(indexMapping)); + LocalClusterState.state().setResolver(mockIndexNameExpressionResolver()); + LocalClusterState.state().setPluginSettings(mockPluginSettings()); + } + public static ClusterService mockClusterService( + Map> indexMapping) { + ClusterService mockService = mock(ClusterService.class); + ClusterState mockState = mock(ClusterState.class); + Metadata mockMetaData = mock(Metadata.class); - public static ClusterService mockClusterService(Map> - indexMapping) { - ClusterService mockService = mock(ClusterService.class); - ClusterState mockState = mock(ClusterState.class); - Metadata mockMetaData = mock(Metadata.class); - - when(mockService.state()).thenReturn(mockState); - when(mockState.metadata()).thenReturn(mockMetaData); - try { - for (var entry : indexMapping.entrySet()) { - when(mockMetaData.findMappings(eq(new String[]{entry.getKey()}), any())) - .thenReturn(entry.getValue()); - } - } catch (IOException e) { - throw new IllegalStateException(e); - } - return mockService; + when(mockService.state()).thenReturn(mockState); + when(mockState.metadata()).thenReturn(mockMetaData); + try { + for (var entry : indexMapping.entrySet()) { + when(mockMetaData.findMappings(eq(new String[] {entry.getKey()}), any())) + .thenReturn(entry.getValue()); + } + } catch (IOException e) { + throw new IllegalStateException(e); } + return mockService; + } - private static Map buildIndexMapping(Map indexMapping) { - return indexMapping.entrySet().stream().collect(Collectors.toUnmodifiableMap( - Map.Entry::getKey, e -> { - try { - return IndexMetadata.fromXContent(createParser(e.getValue())).mapping(); - } catch (IOException ex) { - throw new IllegalStateException(ex); - } - })); - - } + private static Map buildIndexMapping(Map indexMapping) { + return indexMapping.entrySet().stream() + .collect( + Collectors.toUnmodifiableMap( + Map.Entry::getKey, + e -> { + try { + return IndexMetadata.fromXContent(createParser(e.getValue())).mapping(); + } catch (IOException ex) { + throw new IllegalStateException(ex); + } + })); + } - private static Map buildIndexMapping(String index, String mapping) { - try { - return Map.of(index, IndexMetadata.fromXContent(createParser(mapping)).mapping()); - } catch (IOException e) { - throw new IllegalStateException(e); - } + private static Map buildIndexMapping(String index, String mapping) { + try { + return Map.of(index, IndexMetadata.fromXContent(createParser(mapping)).mapping()); + } catch (IOException e) { + throw new IllegalStateException(e); } + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/util/SqlExplainUtils.java b/legacy/src/test/java/org/opensearch/sql/legacy/util/SqlExplainUtils.java index 6228b971e2..3ad1cae211 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/util/SqlExplainUtils.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/util/SqlExplainUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.util; import com.alibaba.druid.sql.parser.ParserException; @@ -15,22 +14,20 @@ import org.opensearch.sql.legacy.query.OpenSearchActionFactory; import org.opensearch.sql.legacy.query.QueryAction; -/** - * Test utils class that explains a query - */ +/** Test utils class that explains a query */ public class SqlExplainUtils { - public static String explain(String query) { - try { - Client mockClient = Mockito.mock(Client.class); - CheckScriptContents.stubMockClient(mockClient); - QueryAction queryAction = OpenSearchActionFactory.create(mockClient, query); + public static String explain(String query) { + try { + Client mockClient = Mockito.mock(Client.class); + CheckScriptContents.stubMockClient(mockClient); + QueryAction queryAction = OpenSearchActionFactory.create(mockClient, query); - return queryAction.explain().explain(); - } catch (SqlParseException | SQLFeatureNotSupportedException | SQLFeatureDisabledException e) { - throw new ParserException("Illegal sql expr in: " + query); - } + return queryAction.explain().explain(); + } catch (SqlParseException | SQLFeatureNotSupportedException | SQLFeatureDisabledException e) { + throw new ParserException("Illegal sql expr in: " + query); } + } - private SqlExplainUtils() {} + private SqlExplainUtils() {} } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/util/SqlParserUtils.java b/legacy/src/test/java/org/opensearch/sql/legacy/util/SqlParserUtils.java index a1c023cbff..90ccc705fd 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/util/SqlParserUtils.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/util/SqlParserUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.util; import com.alibaba.druid.sql.ast.SQLExpr; @@ -13,24 +12,23 @@ import org.opensearch.sql.legacy.parser.ElasticSqlExprParser; import org.opensearch.sql.legacy.rewriter.parent.SQLExprParentSetter; -/** - * Test utils class include all SQLExpr related method. - */ +/** Test utils class include all SQLExpr related method. */ public class SqlParserUtils { - /** - * Parse sql with {@link ElasticSqlExprParser} - * @param sql sql - * @return {@link SQLQueryExpr} - */ - public static SQLQueryExpr parse(String sql) { - ElasticSqlExprParser parser = new ElasticSqlExprParser(sql); - SQLExpr expr = parser.expr(); - if (parser.getLexer().token() != Token.EOF) { - throw new ParserException("Illegal sql: " + sql); - } - SQLQueryExpr queryExpr = (SQLQueryExpr) expr; - queryExpr.accept(new SQLExprParentSetter()); - return (SQLQueryExpr) expr; + /** + * Parse sql with {@link ElasticSqlExprParser} + * + * @param sql sql + * @return {@link SQLQueryExpr} + */ + public static SQLQueryExpr parse(String sql) { + ElasticSqlExprParser parser = new ElasticSqlExprParser(sql); + SQLExpr expr = parser.expr(); + if (parser.getLexer().token() != Token.EOF) { + throw new ParserException("Illegal sql: " + sql); } + SQLQueryExpr queryExpr = (SQLQueryExpr) expr; + queryExpr.accept(new SQLExprParentSetter()); + return (SQLQueryExpr) expr; + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/util/TestUtils.java b/legacy/src/test/java/org/opensearch/sql/legacy/util/TestUtils.java index 27be512fc0..ab9a0ded14 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/util/TestUtils.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/util/TestUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.util; import static com.google.common.base.Strings.isNullOrEmpty; @@ -36,792 +35,809 @@ public class TestUtils { - /** - * Create test index by REST client. - * @param client client connection - * @param indexName test index name - * @param mapping test index mapping or null if no predefined mapping - */ - public static void createIndexByRestClient(RestClient client, String indexName, String mapping) { - Request request = new Request("PUT", "/" + indexName); - if (!isNullOrEmpty(mapping)) { - request.setJsonEntity(mapping); - } - performRequest(client, request); - } - - /** - * https://github.com/elastic/elasticsearch/pull/49959 - * Deprecate creation of dot-prefixed index names except for hidden and system indices. - * Create hidden index by REST client. - * @param client client connection - * @param indexName test index name - * @param mapping test index mapping or null if no predefined mapping - */ - public static void createHiddenIndexByRestClient(RestClient client, String indexName, String mapping) { - Request request = new Request("PUT", "/" + indexName); - JSONObject jsonObject = isNullOrEmpty(mapping) ? new JSONObject() : new JSONObject(mapping); - jsonObject.put("settings", new JSONObject("{\"index\":{\"hidden\":true}}")); - request.setJsonEntity(jsonObject.toString()); - - performRequest(client, request); + /** + * Create test index by REST client. + * + * @param client client connection + * @param indexName test index name + * @param mapping test index mapping or null if no predefined mapping + */ + public static void createIndexByRestClient(RestClient client, String indexName, String mapping) { + Request request = new Request("PUT", "/" + indexName); + if (!isNullOrEmpty(mapping)) { + request.setJsonEntity(mapping); } - - /** - * Check if index already exists by OpenSearch index exists API which returns: - * 200 - specified indices or aliases exist - * 404 - one or more indices specified or aliases do not exist - * @param client client connection - * @param indexName index name - * @return true for index exist - */ - public static boolean isIndexExist(RestClient client, String indexName) { - try { - Response response = client.performRequest(new Request("HEAD", "/" + indexName)); - return (response.getStatusLine().getStatusCode() == 200); - } catch (IOException e) { - throw new IllegalStateException("Failed to perform request", e); - } + performRequest(client, request); + } + + /** + * https://github.com/elastic/elasticsearch/pull/49959 Deprecate creation of dot-prefixed index + * names except for hidden and system indices. Create hidden index by REST client. + * + * @param client client connection + * @param indexName test index name + * @param mapping test index mapping or null if no predefined mapping + */ + public static void createHiddenIndexByRestClient( + RestClient client, String indexName, String mapping) { + Request request = new Request("PUT", "/" + indexName); + JSONObject jsonObject = isNullOrEmpty(mapping) ? new JSONObject() : new JSONObject(mapping); + jsonObject.put("settings", new JSONObject("{\"index\":{\"hidden\":true}}")); + request.setJsonEntity(jsonObject.toString()); + + performRequest(client, request); + } + + /** + * Check if index already exists by OpenSearch index exists API which returns: 200 - specified + * indices or aliases exist 404 - one or more indices specified or aliases do not exist + * + * @param client client connection + * @param indexName index name + * @return true for index exist + */ + public static boolean isIndexExist(RestClient client, String indexName) { + try { + Response response = client.performRequest(new Request("HEAD", "/" + indexName)); + return (response.getStatusLine().getStatusCode() == 200); + } catch (IOException e) { + throw new IllegalStateException("Failed to perform request", e); } - - /** - * Load test data set by REST client. - * @param client client connection - * @param indexName index name - * @param dataSetFilePath file path of test data set - * @throws IOException - */ - public static void loadDataByRestClient(RestClient client, String indexName, String dataSetFilePath) throws IOException { - Path path = Paths.get(getResourceFilePath(dataSetFilePath)); - Request request = new Request("POST", "/" + indexName + "/_bulk?refresh=true"); - request.setJsonEntity(new String(Files.readAllBytes(path))); - performRequest(client, request); + } + + /** + * Load test data set by REST client. + * + * @param client client connection + * @param indexName index name + * @param dataSetFilePath file path of test data set + * @throws IOException + */ + public static void loadDataByRestClient( + RestClient client, String indexName, String dataSetFilePath) throws IOException { + Path path = Paths.get(getResourceFilePath(dataSetFilePath)); + Request request = new Request("POST", "/" + indexName + "/_bulk?refresh=true"); + request.setJsonEntity(new String(Files.readAllBytes(path))); + performRequest(client, request); + } + + /** + * Perform a request by REST client. + * + * @param client client connection + * @param request request object + */ + public static Response performRequest(RestClient client, Request request) { + try { + Response response = client.performRequest(request); + int status = response.getStatusLine().getStatusCode(); + if (status >= 400) { + throw new IllegalStateException("Failed to perform request. Error code: " + status); + } + return response; + } catch (IOException e) { + throw new IllegalStateException("Failed to perform request", e); } - - /** - * Perform a request by REST client. - * @param client client connection - * @param request request object - */ - public static Response performRequest(RestClient client, Request request) { - try { - Response response = client.performRequest(request); - int status = response.getStatusLine().getStatusCode(); - if (status >= 400) { - throw new IllegalStateException("Failed to perform request. Error code: " + status); - } - return response; - } catch (IOException e) { - throw new IllegalStateException("Failed to perform request", e); + } + + public static String getAccountIndexMapping() { + return "{ \"mappings\": {" + + " \"properties\": {\n" + + " \"gender\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true\n" + + " }," + + " \"address\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true\n" + + " }," + + " \"firstname\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true,\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }" + + " }" + + " }," + + " \"lastname\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true,\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }" + + " }" + + " }," + + " \"state\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true,\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }" + + " }" + + " }" + + " }" + + " }" + + "}"; + } + + public static String getPhraseIndexMapping() { + return "{ \"mappings\": {" + + " \"properties\": {\n" + + " \"phrase\": {\n" + + " \"type\": \"text\",\n" + + " \"store\": true\n" + + " }" + + " }" + + " }" + + "}"; + } + + public static String getDogIndexMapping() { + return "{ \"mappings\": {" + + " \"properties\": {\n" + + " \"dog_name\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true\n" + + " }" + + " }" + + " }" + + "}"; + } + + public static String getDogs2IndexMapping() { + return "{ \"mappings\": {" + + " \"properties\": {\n" + + " \"dog_name\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true\n" + + " },\n" + + " \"holdersName\": {\n" + + " \"type\": \"keyword\"\n" + + " }" + + " }" + + " }" + + "}"; + } + + public static String getDogs3IndexMapping() { + return "{ \"mappings\": {" + + " \"properties\": {\n" + + " \"holdersName\": {\n" + + " \"type\": \"keyword\"\n" + + " },\n" + + " \"color\": {\n" + + " \"type\": \"text\"\n" + + " }" + + " }" + + " }" + + "}"; + } + + public static String getPeople2IndexMapping() { + return "{ \"mappings\": {" + + " \"properties\": {\n" + + " \"firstname\": {\n" + + " \"type\": \"keyword\"\n" + + " }" + + " }" + + " }" + + "}"; + } + + public static String getGameOfThronesIndexMapping() { + return "{ \"mappings\": { " + + " \"properties\": {\n" + + " \"nickname\": {\n" + + " \"type\":\"text\", " + + " \"fielddata\":true" + + " },\n" + + " \"name\": {\n" + + " \"properties\": {\n" + + " \"firstname\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true\n" + + " },\n" + + " \"lastname\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true\n" + + " },\n" + + " \"ofHerName\": {\n" + + " \"type\": \"integer\"\n" + + " },\n" + + " \"ofHisName\": {\n" + + " \"type\": \"integer\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"house\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"gender\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\"\n" + + " }\n" + + " }\n" + + " }" + + "} } }"; + } + + // System + + public static String getOdbcIndexMapping() { + return "{\n" + + "\t\"mappings\" :{\n" + + "\t\t\"properties\":{\n" + + "\t\t\t\"odbc_time\":{\n" + + "\t\t\t\t\"type\":\"date\",\n" + + "\t\t\t\t\"format\": \"'{ts' ''yyyy-MM-dd HH:mm:ss.SSS'''}'\"\n" + + "\t\t\t},\n" + + "\t\t\t\"docCount\":{\n" + + "\t\t\t\t\"type\":\"text\"\n" + + "\t\t\t}\n" + + "\t\t}\n" + + "\t}\n" + + "}"; + } + + public static String getLocationIndexMapping() { + return "{\n" + + "\t\"mappings\" :{\n" + + "\t\t\"properties\":{\n" + + "\t\t\t\"place\":{\n" + + "\t\t\t\t\"type\":\"geo_shape\"\n" + + + // "\t\t\t\t\"tree\": \"quadtree\",\n" + // Field tree and precision are deprecated in + // OpenSearch + // "\t\t\t\t\"precision\": \"10km\"\n" + + "\t\t\t},\n" + + "\t\t\t\"center\":{\n" + + "\t\t\t\t\"type\":\"geo_point\"\n" + + "\t\t\t},\n" + + "\t\t\t\"description\":{\n" + + "\t\t\t\t\"type\":\"text\"\n" + + "\t\t\t}\n" + + "\t\t}\n" + + "\t}\n" + + "}"; + } + + public static String getEmployeeNestedTypeIndexMapping() { + return "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"comments\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"date\": {\n" + + " \"type\": \"date\"\n" + + " },\n" + + " \"likes\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"message\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " },\n" + + " \"id\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"name\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " },\n" + + " \"projects\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"address\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"city\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " },\n" + + " \"state\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " },\n" + + " \"name\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\"\n" + + " }\n" + + " },\n" + + " \"fielddata\": true\n" + + " },\n" + + " \"started_year\": {\n" + + " \"type\": \"long\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"title\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + "}\n"; + } + + public static String getNestedTypeIndexMapping() { + return "{ \"mappings\": {\n" + + " \"properties\": {\n" + + " \"message\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"info\": {\n" + + " \"type\": \"keyword\",\n" + + " \"index\": \"true\"\n" + + " },\n" + + " \"author\": {\n" + + " \"type\": \"keyword\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\" : 256\n" + + " }\n" + + " },\n" + + " \"index\": \"true\"\n" + + " },\n" + + " \"dayOfWeek\": {\n" + + " \"type\": \"long\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"comment\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"data\": {\n" + + " \"type\": \"keyword\",\n" + + " \"index\": \"true\"\n" + + " },\n" + + " \"likes\": {\n" + + " \"type\": \"long\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"myNum\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"someField\": {\n" + + " \"type\": \"keyword\",\n" + + " \"index\": \"true\"\n" + + " }\n" + + " }\n" + + " }\n" + + " }}"; + } + + public static String getJoinTypeIndexMapping() { + return "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"join_field\": {\n" + + " \"type\": \"join\",\n" + + " \"relations\": {\n" + + " \"parentType\": \"childrenType\"\n" + + " }\n" + + " },\n" + + " \"parentTile\": {\n" + + " \"index\": \"true\",\n" + + " \"type\": \"keyword\"\n" + + " },\n" + + " \"dayOfWeek\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"author\": {\n" + + " \"index\": \"true\",\n" + + " \"type\": \"keyword\"\n" + + " },\n" + + " \"info\": {\n" + + " \"index\": \"true\",\n" + + " \"type\": \"keyword\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; + } + + public static String getBankIndexMapping() { + return "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"account_number\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"address\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"age\": {\n" + + " \"type\": \"integer\"\n" + + " },\n" + + " \"balance\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"birthdate\": {\n" + + " \"type\": \"date\"\n" + + " },\n" + + " \"city\": {\n" + + " \"type\": \"keyword\"\n" + + " },\n" + + " \"email\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"employer\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"firstname\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"gender\": {\n" + + " \"type\": \"text\",\n" + + " \"fielddata\": true\n" + + " }," + + " \"lastname\": {\n" + + " \"type\": \"keyword\"\n" + + " },\n" + + " \"male\": {\n" + + " \"type\": \"boolean\"\n" + + " },\n" + + " \"state\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; + } + + public static String getBankWithNullValuesIndexMapping() { + return "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"account_number\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"address\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"age\": {\n" + + " \"type\": \"integer\"\n" + + " },\n" + + " \"balance\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"gender\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"firstname\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"lastname\": {\n" + + " \"type\": \"keyword\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; + } + + public static String getOrderIndexMapping() { + return "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"id\": {\n" + + " \"type\": \"long\"\n" + + " },\n" + + " \"name\": {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; + } + + public static String getWeblogsIndexMapping() { + return "{\n" + + " \"mappings\": {\n" + + " \"properties\": {\n" + + " \"host\": {\n" + + " \"type\": \"ip\"\n" + + " },\n" + + " \"method\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"url\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"response\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"bytes\": {\n" + + " \"type\": \"text\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; + } + + public static String getDateIndexMapping() { + return "{ \"mappings\": {" + + " \"properties\": {\n" + + " \"date_keyword\": {\n" + + " \"type\": \"keyword\",\n" + + " \"ignore_above\": 256\n" + + " }" + + " }" + + " }" + + "}"; + } + + public static String getDateTimeIndexMapping() { + return "{" + + " \"mappings\": {" + + " \"properties\": {" + + " \"birthday\": {" + + " \"type\": \"date\"" + + " }" + + " }" + + " }" + + "}"; + } + + public static String getNestedSimpleIndexMapping() { + return "{" + + " \"mappings\": {" + + " \"properties\": {" + + " \"address\": {" + + " \"type\": \"nested\"," + + " \"properties\": {" + + " \"city\": {" + + " \"type\": \"text\"," + + " \"fields\": {" + + " \"keyword\": {" + + " \"type\": \"keyword\"," + + " \"ignore_above\": 256" + + " }" + + " }" + + " }," + + " \"state\": {" + + " \"type\": \"text\"," + + " \"fields\": {" + + " \"keyword\": {" + + " \"type\": \"keyword\"," + + " \"ignore_above\": 256" + + " }" + + " }" + + " }" + + " }" + + " }," + + " \"age\": {" + + " \"type\": \"long\"" + + " }," + + " \"id\": {" + + " \"type\": \"long\"" + + " }," + + " \"name\": {" + + " \"type\": \"text\"," + + " \"fields\": {" + + " \"keyword\": {" + + " \"type\": \"keyword\"," + + " \"ignore_above\": 256" + + " }" + + " }" + + " }" + + " }" + + " }" + + "}"; + } + + public static void loadBulk(Client client, String jsonPath, String defaultIndex) + throws Exception { + System.out.println(String.format("Loading file %s into OpenSearch cluster", jsonPath)); + String absJsonPath = getResourceFilePath(jsonPath); + + BulkRequest bulkRequest = new BulkRequest(); + try (final InputStream stream = new FileInputStream(absJsonPath); + final Reader streamReader = new InputStreamReader(stream, StandardCharsets.UTF_8); + final BufferedReader br = new BufferedReader(streamReader)) { + + while (true) { + + String actionLine = br.readLine(); + if (actionLine == null || actionLine.trim().isEmpty()) { + break; } + String sourceLine = br.readLine(); + JSONObject actionJson = new JSONObject(actionLine); + + IndexRequest indexRequest = new IndexRequest(); + indexRequest.index(defaultIndex); + if (actionJson.getJSONObject("index").has("_id")) { + String docId = actionJson.getJSONObject("index").getString("_id"); + indexRequest.id(docId); + } + if (actionJson.getJSONObject("index").has("_routing")) { + String routing = actionJson.getJSONObject("index").getString("_routing"); + indexRequest.routing(routing); + } + indexRequest.source(sourceLine, XContentType.JSON); + bulkRequest.add(indexRequest); + } } - public static String getAccountIndexMapping() { - return "{ \"mappings\": {" + - " \"properties\": {\n" + - " \"gender\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true\n" + - " }," + - " \"address\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true\n" + - " }," + - " \"firstname\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true,\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }" + - " }" + - " }," + - " \"lastname\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true,\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }" + - " }" + - " }," + - " \"state\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true,\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }" + - " }" + - " }" + - " }"+ - " }" + - "}"; - } - - public static String getPhraseIndexMapping() { - return "{ \"mappings\": {" + - " \"properties\": {\n" + - " \"phrase\": {\n" + - " \"type\": \"text\",\n" + - " \"store\": true\n" + - " }" + - " }"+ - " }" + - "}"; - } - - public static String getDogIndexMapping() { - return "{ \"mappings\": {" + - " \"properties\": {\n" + - " \"dog_name\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true\n" + - " }"+ - " }"+ - " }" + - "}"; - } - - public static String getDogs2IndexMapping() { - return "{ \"mappings\": {" + - " \"properties\": {\n" + - " \"dog_name\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true\n" + - " },\n"+ - " \"holdersName\": {\n" + - " \"type\": \"keyword\"\n" + - " }"+ - " }"+ - " }" + - "}"; - } - - public static String getDogs3IndexMapping() { - return "{ \"mappings\": {" + - " \"properties\": {\n" + - " \"holdersName\": {\n" + - " \"type\": \"keyword\"\n" + - " },\n"+ - " \"color\": {\n" + - " \"type\": \"text\"\n" + - " }"+ - " }"+ - " }" + - "}"; - } - - public static String getPeople2IndexMapping() { - return "{ \"mappings\": {" + - " \"properties\": {\n" + - " \"firstname\": {\n" + - " \"type\": \"keyword\"\n" + - " }"+ - " }"+ - " }" + - "}"; - } + BulkResponse bulkResponse = client.bulk(bulkRequest).actionGet(); - public static String getGameOfThronesIndexMapping() { - return "{ \"mappings\": { " + - " \"properties\": {\n" + - " \"nickname\": {\n" + - " \"type\":\"text\", "+ - " \"fielddata\":true"+ - " },\n"+ - " \"name\": {\n" + - " \"properties\": {\n" + - " \"firstname\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true\n" + - " },\n" + - " \"lastname\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true\n" + - " },\n" + - " \"ofHerName\": {\n" + - " \"type\": \"integer\"\n" + - " },\n" + - " \"ofHisName\": {\n" + - " \"type\": \"integer\"\n" + - " }\n" + - " }\n" + - " },\n" + - " \"house\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\"\n" + - " }\n" + - " }\n" + - " },\n" + - " \"gender\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\"\n" + - " }\n" + - " }\n" + - " }" + - "} } }"; + if (bulkResponse.hasFailures()) { + throw new Exception( + "Failed to load test data into index " + + defaultIndex + + ", " + + bulkResponse.buildFailureMessage()); } - - // System - - public static String getOdbcIndexMapping() { - return "{\n" + - "\t\"mappings\" :{\n" + - "\t\t\"properties\":{\n" + - "\t\t\t\"odbc_time\":{\n" + - "\t\t\t\t\"type\":\"date\",\n" + - "\t\t\t\t\"format\": \"'{ts' ''yyyy-MM-dd HH:mm:ss.SSS'''}'\"\n" + - "\t\t\t},\n" + - "\t\t\t\"docCount\":{\n" + - "\t\t\t\t\"type\":\"text\"\n" + - "\t\t\t}\n" + - "\t\t}\n" + - "\t}\n" + - "}"; + System.out.println(bulkResponse.getItems().length + " documents loaded."); + // ensure the documents are searchable + client.admin().indices().prepareRefresh(defaultIndex).execute().actionGet(); + } + + public static String getResourceFilePath(String relPath) { + String projectRoot = System.getProperty("project.root", null); + if (projectRoot == null) { + return new File(relPath).getAbsolutePath(); + } else { + return new File(projectRoot + "/" + relPath).getAbsolutePath(); } + } - public static String getLocationIndexMapping() { - return "{\n" + - "\t\"mappings\" :{\n" + - "\t\t\"properties\":{\n" + - "\t\t\t\"place\":{\n" + - "\t\t\t\t\"type\":\"geo_shape\"\n" + - //"\t\t\t\t\"tree\": \"quadtree\",\n" + // Field tree and precision are deprecated in OpenSearch - //"\t\t\t\t\"precision\": \"10km\"\n" + - "\t\t\t},\n" + - "\t\t\t\"center\":{\n" + - "\t\t\t\t\"type\":\"geo_point\"\n" + - "\t\t\t},\n" + - "\t\t\t\"description\":{\n" + - "\t\t\t\t\"type\":\"text\"\n" + - "\t\t\t}\n" + - "\t\t}\n" + - "\t}\n" + - "}"; - } + public static String getResponseBody(Response response) throws IOException { - public static String getEmployeeNestedTypeIndexMapping() { - return "{\n" + - " \"mappings\": {\n" + - " \"properties\": {\n" + - " \"comments\": {\n" + - " \"type\": \"nested\",\n" + - " \"properties\": {\n" + - " \"date\": {\n" + - " \"type\": \"date\"\n" + - " },\n" + - " \"likes\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"message\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " },\n" + - " \"id\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"name\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " },\n" + - " \"projects\": {\n" + - " \"type\": \"nested\",\n" + - " \"properties\": {\n" + - " \"address\": {\n" + - " \"type\": \"nested\",\n" + - " \"properties\": {\n" + - " \"city\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " },\n" + - " \"state\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " },\n" + - " \"name\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\"\n" + - " }\n" + - " },\n" + - " \"fielddata\": true\n" + - " },\n" + - " \"started_year\": {\n" + - " \"type\": \"long\"\n" + - " }\n" + - " }\n" + - " },\n" + - " \"title\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - "}\n"; - } + return getResponseBody(response, false); + } + public static String getResponseBody(Response response, boolean retainNewLines) + throws IOException { + final StringBuilder sb = new StringBuilder(); - public static String getNestedTypeIndexMapping() { - return "{ \"mappings\": {\n" + - " \"properties\": {\n" + - " \"message\": {\n" + - " \"type\": \"nested\",\n" + - " \"properties\": {\n" + - " \"info\": {\n" + - " \"type\": \"keyword\",\n" + - " \"index\": \"true\"\n" + - " },\n" + - " \"author\": {\n" + - " \"type\": \"keyword\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\" : 256\n" + - " }\n" + - " },\n" + - " \"index\": \"true\"\n" + - " },\n" + - " \"dayOfWeek\": {\n" + - " \"type\": \"long\"\n" + - " }\n" + - " }\n" + - " },\n" + - " \"comment\": {\n" + - " \"type\": \"nested\",\n" + - " \"properties\": {\n" + - " \"data\": {\n" + - " \"type\": \"keyword\",\n" + - " \"index\": \"true\"\n" + - " },\n" + - " \"likes\": {\n" + - " \"type\": \"long\"\n" + - " }\n" + - " }\n" + - " },\n" + - " \"myNum\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"someField\": {\n" + - " \"type\": \"keyword\",\n" + - " \"index\": \"true\"\n" + - " }\n" + - " }\n" + - " }\n" + - " }}"; - } - - public static String getJoinTypeIndexMapping() { - return "{\n" + - " \"mappings\": {\n" + - " \"properties\": {\n" + - " \"join_field\": {\n" + - " \"type\": \"join\",\n" + - " \"relations\": {\n" + - " \"parentType\": \"childrenType\"\n" + - " }\n" + - " },\n" + - " \"parentTile\": {\n" + - " \"index\": \"true\",\n" + - " \"type\": \"keyword\"\n" + - " },\n" + - " \"dayOfWeek\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"author\": {\n" + - " \"index\": \"true\",\n" + - " \"type\": \"keyword\"\n" + - " },\n" + - " \"info\": {\n" + - " \"index\": \"true\",\n" + - " \"type\": \"keyword\"\n" + - " }\n" + - " }\n" + - " }\n" + - "}"; - } - - public static String getBankIndexMapping() { - return "{\n" + - " \"mappings\": {\n" + - " \"properties\": {\n" + - " \"account_number\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"address\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"age\": {\n" + - " \"type\": \"integer\"\n" + - " },\n" + - " \"balance\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"birthdate\": {\n" + - " \"type\": \"date\"\n" + - " },\n" + - " \"city\": {\n" + - " \"type\": \"keyword\"\n" + - " },\n" + - " \"email\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"employer\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"firstname\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"gender\": {\n" + - " \"type\": \"text\",\n" + - " \"fielddata\": true\n" + - " }," + - " \"lastname\": {\n" + - " \"type\": \"keyword\"\n" + - " },\n" + - " \"male\": {\n" + - " \"type\": \"boolean\"\n" + - " },\n" + - " \"state\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - "}"; - } - - public static String getBankWithNullValuesIndexMapping() { - return "{\n" + - " \"mappings\": {\n" + - " \"properties\": {\n" + - " \"account_number\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"address\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"age\": {\n" + - " \"type\": \"integer\"\n" + - " },\n" + - " \"balance\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"gender\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"firstname\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"lastname\": {\n" + - " \"type\": \"keyword\"\n" + - " }\n" + - " }\n" + - " }\n" + - "}"; - } + try (final InputStream is = response.getEntity().getContent(); + final BufferedReader br = + new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8))) { - public static String getOrderIndexMapping() { - return "{\n" + - " \"mappings\": {\n" + - " \"properties\": {\n" + - " \"id\": {\n" + - " \"type\": \"long\"\n" + - " },\n" + - " \"name\": {\n" + - " \"type\": \"text\",\n" + - " \"fields\": {\n" + - " \"keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - "}"; + String line; + while ((line = br.readLine()) != null) { + sb.append(line); + if (retainNewLines) { + sb.append(String.format(Locale.ROOT, "%n")); + } + } } + return sb.toString(); + } - public static String getWeblogsIndexMapping() { - return "{\n" + - " \"mappings\": {\n" + - " \"properties\": {\n" + - " \"host\": {\n" + - " \"type\": \"ip\"\n" + - " },\n" + - " \"method\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"url\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"response\": {\n" + - " \"type\": \"text\"\n" + - " },\n" + - " \"bytes\": {\n" + - " \"type\": \"text\"\n" + - " }\n" + - " }\n" + - " }\n" + - "}"; - } + public static String fileToString( + final String filePathFromProjectRoot, final boolean removeNewLines) throws IOException { - public static String getDateIndexMapping() { - return "{ \"mappings\": {" + - " \"properties\": {\n" + - " \"date_keyword\": {\n" + - " \"type\": \"keyword\",\n" + - " \"ignore_above\": 256\n" + - " }"+ - " }"+ - " }" + - "}"; - } + final String absolutePath = getResourceFilePath(filePathFromProjectRoot); - public static String getDateTimeIndexMapping() { - return "{" + - " \"mappings\": {" + - " \"properties\": {" + - " \"birthday\": {" + - " \"type\": \"date\"" + - " }" + - " }" + - " }" + - "}"; - } + try (final InputStream stream = new FileInputStream(absolutePath); + final Reader streamReader = new InputStreamReader(stream, StandardCharsets.UTF_8); + final BufferedReader br = new BufferedReader(streamReader)) { - public static String getNestedSimpleIndexMapping() { - return "{" + - " \"mappings\": {" + - " \"properties\": {" + - " \"address\": {" + - " \"type\": \"nested\"," + - " \"properties\": {" + - " \"city\": {" + - " \"type\": \"text\"," + - " \"fields\": {" + - " \"keyword\": {" + - " \"type\": \"keyword\"," + - " \"ignore_above\": 256" + - " }" + - " }" + - " }," + - " \"state\": {" + - " \"type\": \"text\"," + - " \"fields\": {" + - " \"keyword\": {" + - " \"type\": \"keyword\"," + - " \"ignore_above\": 256" + - " }" + - " }" + - " }" + - " }" + - " }," + - " \"age\": {" + - " \"type\": \"long\"" + - " }," + - " \"id\": {" + - " \"type\": \"long\"" + - " }," + - " \"name\": {" + - " \"type\": \"text\"," + - " \"fields\": {" + - " \"keyword\": {" + - " \"type\": \"keyword\"," + - " \"ignore_above\": 256" + - " }" + - " }" + - " }" + - " }" + - " }" + - "}"; - } - public static void loadBulk(Client client, String jsonPath, String defaultIndex) throws Exception { - System.out.println(String.format("Loading file %s into OpenSearch cluster", jsonPath)); - String absJsonPath = getResourceFilePath(jsonPath); - - BulkRequest bulkRequest = new BulkRequest(); - try (final InputStream stream = new FileInputStream(absJsonPath); - final Reader streamReader = new InputStreamReader(stream, StandardCharsets.UTF_8); - final BufferedReader br = new BufferedReader(streamReader)) { - - while (true) { - - String actionLine = br.readLine(); - if (actionLine == null || actionLine.trim().isEmpty()) { - break; - } - String sourceLine = br.readLine(); - JSONObject actionJson = new JSONObject(actionLine); - - IndexRequest indexRequest = new IndexRequest(); - indexRequest.index(defaultIndex); - if (actionJson.getJSONObject("index").has("_id")) { - String docId = actionJson.getJSONObject("index").getString("_id"); - indexRequest.id(docId); - } - if (actionJson.getJSONObject("index").has("_routing")) { - String routing = actionJson.getJSONObject("index").getString("_routing"); - indexRequest.routing(routing); - } - indexRequest.source(sourceLine, XContentType.JSON); - bulkRequest.add(indexRequest); - } - } + final StringBuilder stringBuilder = new StringBuilder(); + String line = br.readLine(); - BulkResponse bulkResponse = client.bulk(bulkRequest).actionGet(); + while (line != null) { - if (bulkResponse.hasFailures()) { - throw new Exception("Failed to load test data into index " + defaultIndex + ", " + - bulkResponse.buildFailureMessage()); + stringBuilder.append(line); + if (!removeNewLines) { + stringBuilder.append(String.format(Locale.ROOT, "%n")); } - System.out.println(bulkResponse.getItems().length + " documents loaded."); - // ensure the documents are searchable - client.admin().indices().prepareRefresh(defaultIndex).execute().actionGet(); - } + line = br.readLine(); + } - public static String getResourceFilePath(String relPath) { - String projectRoot = System.getProperty("project.root", null); - if (projectRoot == null) { - return new File(relPath).getAbsolutePath(); - } else { - return new File(projectRoot + "/" + relPath).getAbsolutePath(); - } + return stringBuilder.toString(); } - - public static String getResponseBody(Response response) throws IOException { - - return getResponseBody(response, false); + } + + /** + * Builds all permutations of the given list of Strings + * + * @param items list of strings to permute + * @return list of permutations + */ + public static List> getPermutations(final List items) { + + if (items.size() > 5) { + throw new IllegalArgumentException("Inefficient test, please refactor"); } - public static String getResponseBody(Response response, boolean retainNewLines) throws IOException { - final StringBuilder sb = new StringBuilder(); + final List> result = new LinkedList<>(); - try (final InputStream is = response.getEntity().getContent(); - final BufferedReader br = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8))) { + if (items.isEmpty() || 1 == items.size()) { - String line; - while ((line = br.readLine()) != null) { - sb.append(line); - if (retainNewLines) { - sb.append(String.format(Locale.ROOT, "%n")); - } - } - } - return sb.toString(); + final List onlyElement = new ArrayList<>(); + if (1 == items.size()) { + onlyElement.add(items.get(0)); + } + result.add(onlyElement); + return result; } - public static String fileToString(final String filePathFromProjectRoot, final boolean removeNewLines) - throws IOException { - - final String absolutePath = getResourceFilePath(filePathFromProjectRoot); - - try (final InputStream stream = new FileInputStream(absolutePath); - final Reader streamReader = new InputStreamReader(stream, StandardCharsets.UTF_8); - final BufferedReader br = new BufferedReader(streamReader)) { - - final StringBuilder stringBuilder = new StringBuilder(); - String line = br.readLine(); - - while (line != null) { - - stringBuilder.append(line); - if (!removeNewLines) { - stringBuilder.append(String.format(Locale.ROOT, "%n")); - } - line = br.readLine(); - } - - return stringBuilder.toString(); - } + for (int i = 0; i < items.size(); ++i) { + + final List smallerSet = new ArrayList<>(); + + if (i != 0) { + smallerSet.addAll(items.subList(0, i)); + } + if (i != items.size() - 1) { + smallerSet.addAll(items.subList(i + 1, items.size())); + } + + final String currentItem = items.get(i); + result.addAll( + getPermutations(smallerSet).stream() + .map( + smallerSetPermutation -> { + final List permutation = new ArrayList<>(); + permutation.add(currentItem); + permutation.addAll(smallerSetPermutation); + return permutation; + }) + .collect(Collectors.toCollection(LinkedList::new))); } - /** - * Builds all permutations of the given list of Strings - * @param items - list of strings to permute - * @return list of permutations - */ - public static List> getPermutations(final List items) { - - if (items.size() > 5) { - throw new IllegalArgumentException("Inefficient test, please refactor"); - } - - final List> result = new LinkedList<>(); - - if (items.isEmpty() || 1 == items.size()) { - - final List onlyElement = new ArrayList<>(); - if (1 == items.size()) { - onlyElement.add(items.get(0)); - } - result.add(onlyElement); - return result; - } - - for (int i = 0; i < items.size(); ++i) { - - final List smallerSet = new ArrayList<>(); - - if (i != 0) { - smallerSet.addAll(items.subList(0, i)); - } - if (i != items.size() - 1) { - smallerSet.addAll(items.subList(i + 1, items.size())); - } - - final String currentItem = items.get(i); - result.addAll(getPermutations(smallerSet).stream().map(smallerSetPermutation -> { - final List permutation = new ArrayList<>(); - permutation.add(currentItem); - permutation.addAll(smallerSetPermutation); - return permutation; - }).collect(Collectors.toCollection(LinkedList::new))); - } - - return result; - } + return result; + } } diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/util/TestsConstants.java b/legacy/src/test/java/org/opensearch/sql/legacy/util/TestsConstants.java index a6b2c84d55..f436cedaaa 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/util/TestsConstants.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/util/TestsConstants.java @@ -3,48 +3,46 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.legacy.util; -/** - * Created by omershelef on 18/12/14. - */ +/** Created by omershelef on 18/12/14. */ public class TestsConstants { - public final static String PERSISTENT = "persistent"; - public final static String TRANSIENT = "transient"; - - public final static String TEST_INDEX = "opensearch-sql_test_index"; - - public final static String TEST_INDEX_ONLINE = TEST_INDEX + "_online"; - public final static String TEST_INDEX_ACCOUNT = TEST_INDEX + "_account"; - public final static String TEST_INDEX_PHRASE = TEST_INDEX + "_phrase"; - public final static String TEST_INDEX_DOG = TEST_INDEX + "_dog"; - public final static String TEST_INDEX_DOG2 = TEST_INDEX + "_dog2"; - public final static String TEST_INDEX_DOG3 = TEST_INDEX + "_dog3"; - public final static String TEST_INDEX_DOGSUBQUERY = TEST_INDEX + "_subquery"; - public final static String TEST_INDEX_PEOPLE = TEST_INDEX + "_people"; - public final static String TEST_INDEX_PEOPLE2 = TEST_INDEX + "_people2"; - public final static String TEST_INDEX_GAME_OF_THRONES = TEST_INDEX + "_game_of_thrones"; - public final static String TEST_INDEX_SYSTEM = TEST_INDEX + "_system"; - public final static String TEST_INDEX_ODBC = TEST_INDEX + "_odbc"; - public final static String TEST_INDEX_LOCATION = TEST_INDEX + "_location"; - public final static String TEST_INDEX_LOCATION2 = TEST_INDEX + "_location2"; - public final static String TEST_INDEX_NESTED_TYPE = TEST_INDEX + "_nested_type"; - public final static String TEST_INDEX_NESTED_SIMPLE = TEST_INDEX + "_nested_simple"; - public final static String TEST_INDEX_NESTED_WITH_QUOTES = TEST_INDEX + "_nested_type_with_quotes"; - public final static String TEST_INDEX_EMPLOYEE_NESTED = TEST_INDEX + "_employee_nested"; - public final static String TEST_INDEX_JOIN_TYPE = TEST_INDEX + "_join_type"; - public final static String TEST_INDEX_BANK = TEST_INDEX + "_bank"; - public final static String TEST_INDEX_BANK_TWO = TEST_INDEX_BANK + "_two"; - public final static String TEST_INDEX_BANK_WITH_NULL_VALUES = TEST_INDEX_BANK + "_with_null_values"; - public final static String TEST_INDEX_ORDER = TEST_INDEX + "_order"; - public final static String TEST_INDEX_WEBLOG = TEST_INDEX + "_weblog"; - public final static String TEST_INDEX_DATE = TEST_INDEX + "_date"; - public final static String TEST_INDEX_DATE_TIME = TEST_INDEX + "_datetime"; - - - public final static String DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"; - public final static String TS_DATE_FORMAT = "yyyy-MM-dd HH:mm:ss.SSS"; - public final static String SIMPLE_DATE_FORMAT = "yyyy-MM-dd"; + public static final String PERSISTENT = "persistent"; + public static final String TRANSIENT = "transient"; + + public static final String TEST_INDEX = "opensearch-sql_test_index"; + + public static final String TEST_INDEX_ONLINE = TEST_INDEX + "_online"; + public static final String TEST_INDEX_ACCOUNT = TEST_INDEX + "_account"; + public static final String TEST_INDEX_PHRASE = TEST_INDEX + "_phrase"; + public static final String TEST_INDEX_DOG = TEST_INDEX + "_dog"; + public static final String TEST_INDEX_DOG2 = TEST_INDEX + "_dog2"; + public static final String TEST_INDEX_DOG3 = TEST_INDEX + "_dog3"; + public static final String TEST_INDEX_DOGSUBQUERY = TEST_INDEX + "_subquery"; + public static final String TEST_INDEX_PEOPLE = TEST_INDEX + "_people"; + public static final String TEST_INDEX_PEOPLE2 = TEST_INDEX + "_people2"; + public static final String TEST_INDEX_GAME_OF_THRONES = TEST_INDEX + "_game_of_thrones"; + public static final String TEST_INDEX_SYSTEM = TEST_INDEX + "_system"; + public static final String TEST_INDEX_ODBC = TEST_INDEX + "_odbc"; + public static final String TEST_INDEX_LOCATION = TEST_INDEX + "_location"; + public static final String TEST_INDEX_LOCATION2 = TEST_INDEX + "_location2"; + public static final String TEST_INDEX_NESTED_TYPE = TEST_INDEX + "_nested_type"; + public static final String TEST_INDEX_NESTED_SIMPLE = TEST_INDEX + "_nested_simple"; + public static final String TEST_INDEX_NESTED_WITH_QUOTES = + TEST_INDEX + "_nested_type_with_quotes"; + public static final String TEST_INDEX_EMPLOYEE_NESTED = TEST_INDEX + "_employee_nested"; + public static final String TEST_INDEX_JOIN_TYPE = TEST_INDEX + "_join_type"; + public static final String TEST_INDEX_BANK = TEST_INDEX + "_bank"; + public static final String TEST_INDEX_BANK_TWO = TEST_INDEX_BANK + "_two"; + public static final String TEST_INDEX_BANK_WITH_NULL_VALUES = + TEST_INDEX_BANK + "_with_null_values"; + public static final String TEST_INDEX_ORDER = TEST_INDEX + "_order"; + public static final String TEST_INDEX_WEBLOG = TEST_INDEX + "_weblog"; + public static final String TEST_INDEX_DATE = TEST_INDEX + "_date"; + public static final String TEST_INDEX_DATE_TIME = TEST_INDEX + "_datetime"; + + public static final String DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"; + public static final String TS_DATE_FORMAT = "yyyy-MM-dd HH:mm:ss.SSS"; + public static final String SIMPLE_DATE_FORMAT = "yyyy-MM-dd"; } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/client/MLClient.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/client/MLClient.java index 19f49d0e5f..4bc6009875 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/client/MLClient.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/client/MLClient.java @@ -3,16 +3,14 @@ import org.opensearch.client.node.NodeClient; import org.opensearch.ml.client.MachineLearningNodeClient; - public class MLClient { private static MachineLearningNodeClient INSTANCE; - private MLClient() { - - } + private MLClient() {} /** * get machine learning client. + * * @param nodeClient node client * @return machine learning client */ diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchClient.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchClient.java index dc6e72bd91..0a9cc67993 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchClient.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchClient.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.client; import java.util.List; @@ -14,9 +13,8 @@ import org.opensearch.sql.opensearch.response.OpenSearchResponse; /** - * OpenSearch client abstraction to wrap different OpenSearch client implementation. For - * example, implementation by node client for OpenSearch plugin or by REST client for - * standalone mode. + * OpenSearch client abstraction to wrap different OpenSearch client implementation. For example, + * implementation by node client for OpenSearch plugin or by REST client for standalone mode. */ public interface OpenSearchClient { @@ -24,6 +22,7 @@ public interface OpenSearchClient { /** * Check if the given index exists. + * * @param indexName index name * @return true if exists, otherwise false */ @@ -31,8 +30,9 @@ public interface OpenSearchClient { /** * Create OpenSearch index based on the given mappings. + * * @param indexName index name - * @param mappings index mappings + * @param mappings index mappings */ void createIndex(String indexName, Map mappings); diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClient.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClient.java index c6d44e2c23..993e092534 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClient.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClient.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.client; import com.google.common.collect.ImmutableList; @@ -40,9 +39,7 @@ public class OpenSearchNodeClient implements OpenSearchClient { /** Node client provided by OpenSearch container. */ private final NodeClient client; - /** - * Constructor of OpenSearchNodeClient. - */ + /** Constructor of OpenSearchNodeClient. */ public OpenSearchNodeClient(NodeClient client) { this.client = client; } @@ -50,8 +47,8 @@ public OpenSearchNodeClient(NodeClient client) { @Override public boolean exists(String indexName) { try { - IndicesExistsResponse checkExistResponse = client.admin().indices() - .exists(new IndicesExistsRequest(indexName)).actionGet(); + IndicesExistsResponse checkExistResponse = + client.admin().indices().exists(new IndicesExistsRequest(indexName)).actionGet(); return checkExistResponse.isExists(); } catch (Exception e) { throw new IllegalStateException("Failed to check if index [" + indexName + "] exists", e); @@ -83,13 +80,12 @@ public void createIndex(String indexName, Map mappings) { @Override public Map getIndexMappings(String... indexExpression) { try { - GetMappingsResponse mappingsResponse = client.admin().indices() - .prepareGetMappings(indexExpression) - .setLocal(true) - .get(); - return mappingsResponse.mappings().entrySet().stream().collect(Collectors.toUnmodifiableMap( - Map.Entry::getKey, - cursor -> new IndexMapping(cursor.getValue()))); + GetMappingsResponse mappingsResponse = + client.admin().indices().prepareGetMappings(indexExpression).setLocal(true).get(); + return mappingsResponse.mappings().entrySet().stream() + .collect( + Collectors.toUnmodifiableMap( + Map.Entry::getKey, cursor -> new IndexMapping(cursor.getValue()))); } catch (IndexNotFoundException e) { // Re-throw directly to be treated as client error finally throw e; @@ -127,15 +123,11 @@ public Map getIndexMaxResultWindows(String... indexExpression) } } - /** - * TODO: Scroll doesn't work for aggregation. Support aggregation later. - */ + /** TODO: Scroll doesn't work for aggregation. Support aggregation later. */ @Override public OpenSearchResponse search(OpenSearchRequest request) { return request.search( - req -> client.search(req).actionGet(), - req -> client.searchScroll(req).actionGet() - ); + req -> client.search(req).actionGet(), req -> client.searchScroll(req).actionGet()); } /** @@ -145,13 +137,12 @@ public OpenSearchResponse search(OpenSearchRequest request) { */ @Override public List indices() { - final GetIndexResponse indexResponse = client.admin().indices() - .prepareGetIndex() - .setLocal(true) - .get(); + final GetIndexResponse indexResponse = + client.admin().indices().prepareGetIndex().setLocal(true).get(); final Stream aliasStream = ImmutableList.copyOf(indexResponse.aliases().values()).stream() - .flatMap(Collection::stream).map(AliasMetadata::alias); + .flatMap(Collection::stream) + .map(AliasMetadata::alias); return Stream.concat(Arrays.stream(indexResponse.getIndices()), aliasStream) .collect(Collectors.toList()); @@ -164,20 +155,20 @@ public List indices() { */ @Override public Map meta() { - return ImmutableMap.of(META_CLUSTER_NAME, - client.settings().get("cluster.name", "opensearch")); + return ImmutableMap.of(META_CLUSTER_NAME, client.settings().get("cluster.name", "opensearch")); } @Override public void cleanup(OpenSearchRequest request) { - request.clean(scrollId -> { - try { - client.prepareClearScroll().addScrollId(scrollId).get(); - } catch (Exception e) { - throw new IllegalStateException( - "Failed to clean up resources for search request " + request, e); - } - }); + request.clean( + scrollId -> { + try { + client.prepareClearScroll().addScrollId(scrollId).get(); + } catch (Exception e) { + throw new IllegalStateException( + "Failed to clean up resources for search request " + request, e); + } + }); } @Override diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchRestClient.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchRestClient.java index c27c4bbc30..b6106982a7 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchRestClient.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchRestClient.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.client; import com.google.common.collect.ImmutableList; @@ -49,8 +48,7 @@ public class OpenSearchRestClient implements OpenSearchClient { @Override public boolean exists(String indexName) { try { - return client.indices().exists( - new GetIndexRequest(indexName), RequestOptions.DEFAULT); + return client.indices().exists(new GetIndexRequest(indexName), RequestOptions.DEFAULT); } catch (IOException e) { throw new IllegalStateException("Failed to check if index [" + indexName + "] exist", e); } @@ -59,8 +57,9 @@ public boolean exists(String indexName) { @Override public void createIndex(String indexName, Map mappings) { try { - client.indices().create( - new CreateIndexRequest(indexName).mapping(mappings), RequestOptions.DEFAULT); + client + .indices() + .create(new CreateIndexRequest(indexName).mapping(mappings), RequestOptions.DEFAULT); } catch (IOException e) { throw new IllegalStateException("Failed to create index [" + indexName + "]", e); } @@ -80,27 +79,29 @@ public Map getIndexMappings(String... indexExpression) { @Override public Map getIndexMaxResultWindows(String... indexExpression) { - GetSettingsRequest request = new GetSettingsRequest() - .indices(indexExpression).includeDefaults(true); + GetSettingsRequest request = + new GetSettingsRequest().indices(indexExpression).includeDefaults(true); try { GetSettingsResponse response = client.indices().getSettings(request, RequestOptions.DEFAULT); Map settings = response.getIndexToSettings(); Map defaultSettings = response.getIndexToDefaultSettings(); Map result = new HashMap<>(); - defaultSettings.forEach((key, value) -> { - Integer maxResultWindow = value.getAsInt("index.max_result_window", null); - if (maxResultWindow != null) { - result.put(key, maxResultWindow); - } - }); - - settings.forEach((key, value) -> { - Integer maxResultWindow = value.getAsInt("index.max_result_window", null); - if (maxResultWindow != null) { - result.put(key, maxResultWindow); - } - }); + defaultSettings.forEach( + (key, value) -> { + Integer maxResultWindow = value.getAsInt("index.max_result_window", null); + if (maxResultWindow != null) { + result.put(key, maxResultWindow); + } + }); + + settings.forEach( + (key, value) -> { + Integer maxResultWindow = value.getAsInt("index.max_result_window", null); + if (maxResultWindow != null) { + result.put(key, maxResultWindow); + } + }); return result; } catch (IOException e) { @@ -126,8 +127,7 @@ public OpenSearchResponse search(OpenSearchRequest request) { throw new IllegalStateException( "Failed to perform scroll operation with request " + req, e); } - } - ); + }); } /** @@ -142,7 +142,8 @@ public List indices() { client.indices().get(new GetIndexRequest(), RequestOptions.DEFAULT); final Stream aliasStream = ImmutableList.copyOf(indexResponse.getAliases().values()).stream() - .flatMap(Collection::stream).map(AliasMetadata::alias); + .flatMap(Collection::stream) + .map(AliasMetadata::alias); return Stream.concat(Arrays.stream(indexResponse.getIndices()), aliasStream) .collect(Collectors.toList()); } catch (IOException e) { @@ -173,16 +174,17 @@ public Map meta() { @Override public void cleanup(OpenSearchRequest request) { - request.clean(scrollId -> { - try { - ClearScrollRequest clearRequest = new ClearScrollRequest(); - clearRequest.addScrollId(scrollId); - client.clearScroll(clearRequest, RequestOptions.DEFAULT); - } catch (IOException e) { - throw new IllegalStateException( - "Failed to clean up resources for search request " + request, e); - } - }); + request.clean( + scrollId -> { + try { + ClearScrollRequest clearRequest = new ClearScrollRequest(); + clearRequest.addScrollId(scrollId); + client.clearScroll(clearRequest, RequestOptions.DEFAULT); + } catch (IOException e) { + throw new IllegalStateException( + "Failed to clean up resources for search request " + request, e); + } + }); } @Override diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchBinaryType.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchBinaryType.java index cd58d4bc9f..ddd7ab8eb9 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchBinaryType.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchBinaryType.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.type; import static org.opensearch.sql.data.type.ExprCoreType.UNKNOWN; @@ -11,8 +10,8 @@ import lombok.EqualsAndHashCode; /** - * The type of a binary value. See - * doc + * The type of a binary value. See doc */ @EqualsAndHashCode(callSuper = false) public class OpenSearchBinaryType extends OpenSearchDataType { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchDataType.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchDataType.java index 273b980d2a..d276374539 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchDataType.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchDataType.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.type; import com.google.common.collect.ImmutableMap; @@ -18,15 +17,11 @@ import org.opensearch.sql.data.type.ExprCoreType; import org.opensearch.sql.data.type.ExprType; -/** - * The extension of ExprType in OpenSearch. - */ +/** The extension of ExprType in OpenSearch. */ @EqualsAndHashCode public class OpenSearchDataType implements ExprType, Serializable { - /** - * The mapping (OpenSearch engine) type. - */ + /** The mapping (OpenSearch engine) type. */ public enum MappingType { Invalid(null, ExprCoreType.UNKNOWN), Text("text", ExprCoreType.UNKNOWN), @@ -51,8 +46,7 @@ public enum MappingType { private final String name; // Associated `ExprCoreType` - @Getter - private final ExprCoreType exprCoreType; + @Getter private final ExprCoreType exprCoreType; MappingType(String name, ExprCoreType exprCoreType) { this.name = name; @@ -64,16 +58,15 @@ public String toString() { } } - @EqualsAndHashCode.Exclude - @Getter - protected MappingType mappingType; + @EqualsAndHashCode.Exclude @Getter protected MappingType mappingType; // resolved ExprCoreType protected ExprCoreType exprCoreType; /** - * Get a simplified type {@link ExprCoreType} if possible. - * To avoid returning `UNKNOWN` for `OpenSearch*Type`s, e.g. for IP, returns itself. + * Get a simplified type {@link ExprCoreType} if possible. To avoid returning `UNKNOWN` for + * `OpenSearch*Type`s, e.g. for IP, returns itself. + * * @return An {@link ExprType}. */ public ExprType getExprType() { @@ -84,22 +77,23 @@ public ExprType getExprType() { } /** - * Simple instances of OpenSearchDataType are created once during entire SQL engine lifetime - * and cached there. This reduces memory usage and increases type comparison. - * Note: Types with non-empty fields and properties are not cached. + * Simple instances of OpenSearchDataType are created once during entire SQL engine lifetime and + * cached there. This reduces memory usage and increases type comparison. Note: Types with + * non-empty fields and properties are not cached. */ private static final Map instances = new HashMap<>(); static { EnumUtils.getEnumList(MappingType.class).stream() - .filter(t -> t != MappingType.Invalid).forEach(t -> - instances.put(t.toString(), OpenSearchDataType.of(t))); - EnumUtils.getEnumList(ExprCoreType.class).forEach(t -> - instances.put(t.toString(), OpenSearchDataType.of(t))); + .filter(t -> t != MappingType.Invalid) + .forEach(t -> instances.put(t.toString(), OpenSearchDataType.of(t))); + EnumUtils.getEnumList(ExprCoreType.class) + .forEach(t -> instances.put(t.toString(), OpenSearchDataType.of(t))); } /** * Parses index mapping and maps it to a Data type in the SQL plugin. + * * @param indexMapping An input with keys and objects that need to be mapped to a data type. * @return The mapping. */ @@ -110,37 +104,35 @@ public static Map parseMapping(Map i return result; } - indexMapping.forEach((k, v) -> { - var innerMap = (Map)v; - // by default, the type is treated as an Object if "type" is not provided - var type = ((String) innerMap - .getOrDefault( - "type", - "object")) - .replace("_", ""); - if (!EnumUtils.isValidEnumIgnoreCase(OpenSearchDataType.MappingType.class, type)) { - // unknown type, e.g. `alias` - // TODO resolve alias reference - return; - } - // create OpenSearchDataType - result.put(k, OpenSearchDataType.of( - EnumUtils.getEnumIgnoreCase(OpenSearchDataType.MappingType.class, type), - innerMap) - ); - }); + indexMapping.forEach( + (k, v) -> { + var innerMap = (Map) v; + // by default, the type is treated as an Object if "type" is not provided + var type = ((String) innerMap.getOrDefault("type", "object")).replace("_", ""); + if (!EnumUtils.isValidEnumIgnoreCase(OpenSearchDataType.MappingType.class, type)) { + // unknown type, e.g. `alias` + // TODO resolve alias reference + return; + } + // create OpenSearchDataType + result.put( + k, + OpenSearchDataType.of( + EnumUtils.getEnumIgnoreCase(OpenSearchDataType.MappingType.class, type), + innerMap)); + }); return result; } /** * A constructor function which builds proper `OpenSearchDataType` for given mapping `Type`. + * * @param mappingType A mapping type. * @return An instance or inheritor of `OpenSearchDataType`. */ public static OpenSearchDataType of(MappingType mappingType, Map innerMap) { - OpenSearchDataType res = instances.getOrDefault(mappingType.toString(), - new OpenSearchDataType(mappingType) - ); + OpenSearchDataType res = + instances.getOrDefault(mappingType.toString(), new OpenSearchDataType(mappingType)); switch (mappingType) { case Object: // TODO: use Object type once it has been added @@ -158,9 +150,12 @@ public static OpenSearchDataType of(MappingType mappingType, Map Map fields = parseMapping((Map) innerMap.getOrDefault("fields", Map.of())); return (!fields.isEmpty()) ? OpenSearchTextType.of(fields) : OpenSearchTextType.of(); - case GeoPoint: return OpenSearchGeoPointType.of(); - case Binary: return OpenSearchBinaryType.of(); - case Ip: return OpenSearchIpType.of(); + case GeoPoint: + return OpenSearchGeoPointType.of(); + case Binary: + return OpenSearchBinaryType.of(); + case Ip: + return OpenSearchIpType.of(); case Date: // Default date formatter is used when "" is passed as the second parameter String format = (String) innerMap.getOrDefault("format", ""); @@ -173,6 +168,7 @@ public static OpenSearchDataType of(MappingType mappingType, Map /** * A constructor function which builds proper `OpenSearchDataType` for given mapping `Type`. * Designed to be called by the mapping parser only (and tests). + * * @param mappingType A mapping type. * @return An instance or inheritor of `OpenSearchDataType`. */ @@ -182,6 +178,7 @@ public static OpenSearchDataType of(MappingType mappingType) { /** * A constructor function which builds proper `OpenSearchDataType` for given {@link ExprType}. + * * @param type A type. * @return An instance of `OpenSearchDataType`. */ @@ -211,9 +208,7 @@ protected OpenSearchDataType(ExprCoreType type) { // For datatypes with properties (example: object and nested types) // a read-only collection - @Getter - @EqualsAndHashCode.Exclude - Map properties = ImmutableMap.of(); + @Getter @EqualsAndHashCode.Exclude Map properties = ImmutableMap.of(); @Override // Called when building TypeEnvironment and when serializing PPL response @@ -236,46 +231,52 @@ public String legacyTypeName() { } /** - * Clone type object without {@link #properties} - without info about nested object types. - * Note: Should be overriden by all derived classes for proper work. + * Clone type object without {@link #properties} - without info about nested object types. Note: + * Should be overriden by all derived classes for proper work. + * * @return A cloned object. */ protected OpenSearchDataType cloneEmpty() { return this.mappingType == null - ? new OpenSearchDataType(this.exprCoreType) : new OpenSearchDataType(this.mappingType); + ? new OpenSearchDataType(this.exprCoreType) + : new OpenSearchDataType(this.mappingType); } /** - * Flattens mapping tree into a single layer list of objects (pairs of name-types actually), - * which don't have nested types. - * See {@link OpenSearchDataTypeTest#traverseAndFlatten() test} for example. + * Flattens mapping tree into a single layer list of objects (pairs of name-types actually), which + * don't have nested types. See {@link OpenSearchDataTypeTest#traverseAndFlatten() test} for + * example. + * * @param tree A list of `OpenSearchDataType`s - map between field name and its type. * @return A list of all `OpenSearchDataType`s from given map on the same nesting level (1). - * Nested object names are prefixed by names of their host. + * Nested object names are prefixed by names of their host. */ public static Map traverseAndFlatten( Map tree) { final Map result = new LinkedHashMap<>(); - BiConsumer, String> visitLevel = new BiConsumer<>() { - @Override - public void accept(Map subtree, String prefix) { - for (var entry : subtree.entrySet()) { - String entryKey = entry.getKey(); - var nextPrefix = prefix.isEmpty() ? entryKey : String.format("%s.%s", prefix, entryKey); - result.put(nextPrefix, entry.getValue().cloneEmpty()); - var nextSubtree = entry.getValue().getProperties(); - if (!nextSubtree.isEmpty()) { - accept(nextSubtree, nextPrefix); + BiConsumer, String> visitLevel = + new BiConsumer<>() { + @Override + public void accept(Map subtree, String prefix) { + for (var entry : subtree.entrySet()) { + String entryKey = entry.getKey(); + var nextPrefix = + prefix.isEmpty() ? entryKey : String.format("%s.%s", prefix, entryKey); + result.put(nextPrefix, entry.getValue().cloneEmpty()); + var nextSubtree = entry.getValue().getProperties(); + if (!nextSubtree.isEmpty()) { + accept(nextSubtree, nextPrefix); + } + } } - } - } - }; + }; visitLevel.accept(tree, ""); return result; } /** * Resolve type of identified from parsed mapping tree. + * * @param tree Parsed mapping tree (not flattened). * @param id An identifier. * @return Resolved OpenSearchDataType or null if not found. diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchDateType.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchDateType.java index 76947bf720..d0a924c494 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchDateType.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchDateType.java @@ -20,124 +20,124 @@ import org.opensearch.sql.data.type.ExprCoreType; import org.opensearch.sql.data.type.ExprType; -/** - * Date type with support for predefined and custom formats read from the index mapping. - */ +/** Date type with support for predefined and custom formats read from the index mapping. */ @EqualsAndHashCode(callSuper = true) public class OpenSearchDateType extends OpenSearchDataType { private static final OpenSearchDateType instance = new OpenSearchDateType(); /** Numeric formats which support full datetime. */ - public static final List SUPPORTED_NAMED_NUMERIC_FORMATS = List.of( - FormatNames.EPOCH_MILLIS, - FormatNames.EPOCH_SECOND - ); + public static final List SUPPORTED_NAMED_NUMERIC_FORMATS = + List.of(FormatNames.EPOCH_MILLIS, FormatNames.EPOCH_SECOND); /** List of named formats which support full datetime. */ - public static final List SUPPORTED_NAMED_DATETIME_FORMATS = List.of( - FormatNames.ISO8601, - FormatNames.BASIC_DATE_TIME, - FormatNames.BASIC_DATE_TIME_NO_MILLIS, - FormatNames.BASIC_ORDINAL_DATE_TIME, - FormatNames.BASIC_ORDINAL_DATE_TIME_NO_MILLIS, - FormatNames.BASIC_WEEK_DATE_TIME, - FormatNames.STRICT_BASIC_WEEK_DATE_TIME, - FormatNames.BASIC_WEEK_DATE_TIME_NO_MILLIS, - FormatNames.STRICT_BASIC_WEEK_DATE_TIME_NO_MILLIS, - FormatNames.BASIC_WEEK_DATE, - FormatNames.STRICT_BASIC_WEEK_DATE, - FormatNames.DATE_OPTIONAL_TIME, - FormatNames.STRICT_DATE_OPTIONAL_TIME, - FormatNames.STRICT_DATE_OPTIONAL_TIME_NANOS, - FormatNames.DATE_TIME, - FormatNames.STRICT_DATE_TIME, - FormatNames.DATE_TIME_NO_MILLIS, - FormatNames.STRICT_DATE_TIME_NO_MILLIS, - FormatNames.DATE_HOUR_MINUTE_SECOND_FRACTION, - FormatNames.STRICT_DATE_HOUR_MINUTE_SECOND_FRACTION, - FormatNames.DATE_HOUR_MINUTE_SECOND_FRACTION, - FormatNames.DATE_HOUR_MINUTE_SECOND_MILLIS, - FormatNames.STRICT_DATE_HOUR_MINUTE_SECOND_MILLIS, - FormatNames.DATE_HOUR_MINUTE_SECOND, - FormatNames.STRICT_DATE_HOUR_MINUTE_SECOND, - FormatNames.DATE_HOUR_MINUTE, - FormatNames.STRICT_DATE_HOUR_MINUTE, - FormatNames.DATE_HOUR, - FormatNames.STRICT_DATE_HOUR, - FormatNames.ORDINAL_DATE_TIME, - FormatNames.STRICT_ORDINAL_DATE_TIME, - FormatNames.ORDINAL_DATE_TIME_NO_MILLIS, - FormatNames.STRICT_ORDINAL_DATE_TIME_NO_MILLIS, - FormatNames.WEEK_DATE_TIME, - FormatNames.STRICT_WEEK_DATE_TIME, - FormatNames.WEEK_DATE_TIME_NO_MILLIS, - FormatNames.STRICT_WEEK_DATE_TIME_NO_MILLIS - ); + public static final List SUPPORTED_NAMED_DATETIME_FORMATS = + List.of( + FormatNames.ISO8601, + FormatNames.BASIC_DATE_TIME, + FormatNames.BASIC_DATE_TIME_NO_MILLIS, + FormatNames.BASIC_ORDINAL_DATE_TIME, + FormatNames.BASIC_ORDINAL_DATE_TIME_NO_MILLIS, + FormatNames.BASIC_WEEK_DATE_TIME, + FormatNames.STRICT_BASIC_WEEK_DATE_TIME, + FormatNames.BASIC_WEEK_DATE_TIME_NO_MILLIS, + FormatNames.STRICT_BASIC_WEEK_DATE_TIME_NO_MILLIS, + FormatNames.BASIC_WEEK_DATE, + FormatNames.STRICT_BASIC_WEEK_DATE, + FormatNames.DATE_OPTIONAL_TIME, + FormatNames.STRICT_DATE_OPTIONAL_TIME, + FormatNames.STRICT_DATE_OPTIONAL_TIME_NANOS, + FormatNames.DATE_TIME, + FormatNames.STRICT_DATE_TIME, + FormatNames.DATE_TIME_NO_MILLIS, + FormatNames.STRICT_DATE_TIME_NO_MILLIS, + FormatNames.DATE_HOUR_MINUTE_SECOND_FRACTION, + FormatNames.STRICT_DATE_HOUR_MINUTE_SECOND_FRACTION, + FormatNames.DATE_HOUR_MINUTE_SECOND_FRACTION, + FormatNames.DATE_HOUR_MINUTE_SECOND_MILLIS, + FormatNames.STRICT_DATE_HOUR_MINUTE_SECOND_MILLIS, + FormatNames.DATE_HOUR_MINUTE_SECOND, + FormatNames.STRICT_DATE_HOUR_MINUTE_SECOND, + FormatNames.DATE_HOUR_MINUTE, + FormatNames.STRICT_DATE_HOUR_MINUTE, + FormatNames.DATE_HOUR, + FormatNames.STRICT_DATE_HOUR, + FormatNames.ORDINAL_DATE_TIME, + FormatNames.STRICT_ORDINAL_DATE_TIME, + FormatNames.ORDINAL_DATE_TIME_NO_MILLIS, + FormatNames.STRICT_ORDINAL_DATE_TIME_NO_MILLIS, + FormatNames.WEEK_DATE_TIME, + FormatNames.STRICT_WEEK_DATE_TIME, + FormatNames.WEEK_DATE_TIME_NO_MILLIS, + FormatNames.STRICT_WEEK_DATE_TIME_NO_MILLIS); /** List of named formats that only support year/month/day. */ - public static final List SUPPORTED_NAMED_DATE_FORMATS = List.of( - FormatNames.BASIC_DATE, - FormatNames.BASIC_ORDINAL_DATE, - FormatNames.DATE, - FormatNames.STRICT_DATE, - FormatNames.YEAR_MONTH_DAY, - FormatNames.STRICT_YEAR_MONTH_DAY, - FormatNames.ORDINAL_DATE, - FormatNames.STRICT_ORDINAL_DATE, - FormatNames.WEEK_DATE, - FormatNames.STRICT_WEEK_DATE, - FormatNames.WEEKYEAR_WEEK_DAY, - FormatNames.STRICT_WEEKYEAR_WEEK_DAY - ); - - /** list of named formats which produce incomplete date, - * e.g. 1 or 2 are missing from tuple year/month/day. */ - public static final List SUPPORTED_NAMED_INCOMPLETE_DATE_FORMATS = List.of( - FormatNames.YEAR_MONTH, - FormatNames.STRICT_YEAR_MONTH, - FormatNames.YEAR, - FormatNames.STRICT_YEAR, - FormatNames.WEEK_YEAR, - FormatNames.WEEK_YEAR_WEEK, - FormatNames.STRICT_WEEKYEAR_WEEK, - FormatNames.WEEKYEAR, - FormatNames.STRICT_WEEKYEAR - ); + public static final List SUPPORTED_NAMED_DATE_FORMATS = + List.of( + FormatNames.BASIC_DATE, + FormatNames.BASIC_ORDINAL_DATE, + FormatNames.DATE, + FormatNames.STRICT_DATE, + FormatNames.YEAR_MONTH_DAY, + FormatNames.STRICT_YEAR_MONTH_DAY, + FormatNames.ORDINAL_DATE, + FormatNames.STRICT_ORDINAL_DATE, + FormatNames.WEEK_DATE, + FormatNames.STRICT_WEEK_DATE, + FormatNames.WEEKYEAR_WEEK_DAY, + FormatNames.STRICT_WEEKYEAR_WEEK_DAY); + + /** + * list of named formats which produce incomplete date, e.g. 1 or 2 are missing from tuple + * year/month/day. + */ + public static final List SUPPORTED_NAMED_INCOMPLETE_DATE_FORMATS = + List.of( + FormatNames.YEAR_MONTH, + FormatNames.STRICT_YEAR_MONTH, + FormatNames.YEAR, + FormatNames.STRICT_YEAR, + FormatNames.WEEK_YEAR, + FormatNames.WEEK_YEAR_WEEK, + FormatNames.STRICT_WEEKYEAR_WEEK, + FormatNames.WEEKYEAR, + FormatNames.STRICT_WEEKYEAR); /** List of named formats that only support hour/minute/second. */ - public static final List SUPPORTED_NAMED_TIME_FORMATS = List.of( - FormatNames.BASIC_TIME, - FormatNames.BASIC_TIME_NO_MILLIS, - FormatNames.BASIC_T_TIME, - FormatNames.BASIC_T_TIME_NO_MILLIS, - FormatNames.TIME, - FormatNames.STRICT_TIME, - FormatNames.TIME_NO_MILLIS, - FormatNames.STRICT_TIME_NO_MILLIS, - FormatNames.HOUR_MINUTE_SECOND_FRACTION, - FormatNames.STRICT_HOUR_MINUTE_SECOND_FRACTION, - FormatNames.HOUR_MINUTE_SECOND_MILLIS, - FormatNames.STRICT_HOUR_MINUTE_SECOND_MILLIS, - FormatNames.HOUR_MINUTE_SECOND, - FormatNames.STRICT_HOUR_MINUTE_SECOND, - FormatNames.HOUR_MINUTE, - FormatNames.STRICT_HOUR_MINUTE, - FormatNames.HOUR, - FormatNames.STRICT_HOUR, - FormatNames.T_TIME, - FormatNames.STRICT_T_TIME, - FormatNames.T_TIME_NO_MILLIS, - FormatNames.STRICT_T_TIME_NO_MILLIS - ); - - /** Formatter symbols which used to format time or date correspondingly. - * {@link java.time.format.DateTimeFormatter}. */ + public static final List SUPPORTED_NAMED_TIME_FORMATS = + List.of( + FormatNames.BASIC_TIME, + FormatNames.BASIC_TIME_NO_MILLIS, + FormatNames.BASIC_T_TIME, + FormatNames.BASIC_T_TIME_NO_MILLIS, + FormatNames.TIME, + FormatNames.STRICT_TIME, + FormatNames.TIME_NO_MILLIS, + FormatNames.STRICT_TIME_NO_MILLIS, + FormatNames.HOUR_MINUTE_SECOND_FRACTION, + FormatNames.STRICT_HOUR_MINUTE_SECOND_FRACTION, + FormatNames.HOUR_MINUTE_SECOND_MILLIS, + FormatNames.STRICT_HOUR_MINUTE_SECOND_MILLIS, + FormatNames.HOUR_MINUTE_SECOND, + FormatNames.STRICT_HOUR_MINUTE_SECOND, + FormatNames.HOUR_MINUTE, + FormatNames.STRICT_HOUR_MINUTE, + FormatNames.HOUR, + FormatNames.STRICT_HOUR, + FormatNames.T_TIME, + FormatNames.STRICT_T_TIME, + FormatNames.T_TIME_NO_MILLIS, + FormatNames.STRICT_T_TIME_NO_MILLIS); + + /** + * Formatter symbols which used to format time or date correspondingly. {@link + * java.time.format.DateTimeFormatter}. + */ private static final String CUSTOM_FORMAT_TIME_SYMBOLS = "nNASsmHkKha"; + private static final String CUSTOM_FORMAT_DATE_SYMBOLS = "FecEWwYqQgdMLDyuG"; - @EqualsAndHashCode.Exclude - private final List formats; + @EqualsAndHashCode.Exclude private final List formats; private OpenSearchDateType() { super(MappingType.Date); @@ -166,6 +166,7 @@ public boolean hasFormats() { /** * Retrieves and splits a user defined format string from the mapping into a list of formats. + * * @return A list of format names and user defined formats. */ private List getFormatList(String format) { @@ -175,49 +176,57 @@ private List getFormatList(String format) { /** * Retrieves a list of named OpenSearch formatters given by user mapping. + * * @return a list of DateFormatters that can be used to parse a Date/Time/Timestamp. */ public List getAllNamedFormatters() { return formats.stream() .filter(formatString -> FormatNames.forName(formatString) != null) - .map(DateFormatter::forPattern).collect(Collectors.toList()); + .map(DateFormatter::forPattern) + .collect(Collectors.toList()); } /** * Retrieves a list of numeric formatters that format for dates. + * * @return a list of DateFormatters that can be used to parse a Date. */ public List getNumericNamedFormatters() { return formats.stream() - .filter(formatString -> { - FormatNames namedFormat = FormatNames.forName(formatString); - return namedFormat != null && SUPPORTED_NAMED_NUMERIC_FORMATS.contains(namedFormat); - }) - .map(DateFormatter::forPattern).collect(Collectors.toList()); + .filter( + formatString -> { + FormatNames namedFormat = FormatNames.forName(formatString); + return namedFormat != null && SUPPORTED_NAMED_NUMERIC_FORMATS.contains(namedFormat); + }) + .map(DateFormatter::forPattern) + .collect(Collectors.toList()); } /** * Retrieves a list of custom formats defined by the user. + * * @return a list of formats as strings that can be used to parse a Date/Time/Timestamp. */ public List getAllCustomFormats() { return formats.stream() .filter(format -> FormatNames.forName(format) == null) - .map(format -> { - try { - DateFormatter.forPattern(format); - return format; - } catch (Exception ignored) { - // parsing failed - return null; - } - }) + .map( + format -> { + try { + DateFormatter.forPattern(format); + return format; + } catch (Exception ignored) { + // parsing failed + return null; + } + }) .filter(Objects::nonNull) .collect(Collectors.toList()); } /** * Retrieves a list of custom formatters defined by the user. + * * @return a list of DateFormatters that can be used to parse a Date/Time/Timestamp. */ public List getAllCustomFormatters() { @@ -228,41 +237,50 @@ public List getAllCustomFormatters() { /** * Retrieves a list of named formatters that format for dates. + * * @return a list of DateFormatters that can be used to parse a Date. */ public List getDateNamedFormatters() { return formats.stream() - .filter(formatString -> { - FormatNames namedFormat = FormatNames.forName(formatString); - return namedFormat != null && SUPPORTED_NAMED_DATE_FORMATS.contains(namedFormat); - }) - .map(DateFormatter::forPattern).collect(Collectors.toList()); + .filter( + formatString -> { + FormatNames namedFormat = FormatNames.forName(formatString); + return namedFormat != null && SUPPORTED_NAMED_DATE_FORMATS.contains(namedFormat); + }) + .map(DateFormatter::forPattern) + .collect(Collectors.toList()); } /** * Retrieves a list of named formatters that format for Times. + * * @return a list of DateFormatters that can be used to parse a Time. */ public List getTimeNamedFormatters() { return formats.stream() - .filter(formatString -> { - FormatNames namedFormat = FormatNames.forName(formatString); - return namedFormat != null && SUPPORTED_NAMED_TIME_FORMATS.contains(namedFormat); - }) - .map(DateFormatter::forPattern).collect(Collectors.toList()); + .filter( + formatString -> { + FormatNames namedFormat = FormatNames.forName(formatString); + return namedFormat != null && SUPPORTED_NAMED_TIME_FORMATS.contains(namedFormat); + }) + .map(DateFormatter::forPattern) + .collect(Collectors.toList()); } /** * Retrieves a list of named formatters that format for DateTimes. + * * @return a list of DateFormatters that can be used to parse a DateTime. */ public List getDateTimeNamedFormatters() { return formats.stream() - .filter(formatString -> { - FormatNames namedFormat = FormatNames.forName(formatString); - return namedFormat != null && SUPPORTED_NAMED_DATETIME_FORMATS.contains(namedFormat); - }) - .map(DateFormatter::forPattern).collect(Collectors.toList()); + .filter( + formatString -> { + FormatNames namedFormat = FormatNames.forName(formatString); + return namedFormat != null && SUPPORTED_NAMED_DATETIME_FORMATS.contains(namedFormat); + }) + .map(DateFormatter::forPattern) + .collect(Collectors.toList()); } private ExprCoreType getExprTypeFromCustomFormats(List formats) { @@ -368,6 +386,7 @@ public static boolean isDateTypeCompatible(ExprType exprType) { /** * Create a Date type which has a LinkedHashMap defining all formats. + * * @return A new type object. */ public static OpenSearchDateType of(String format) { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchGeoPointType.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchGeoPointType.java index c2428a59a8..75137973c5 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchGeoPointType.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchGeoPointType.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.type; import static org.opensearch.sql.data.type.ExprCoreType.UNKNOWN; @@ -11,8 +10,8 @@ import lombok.EqualsAndHashCode; /** - * The type of a geo_point value. See - * doc + * The type of a geo_point value. See doc */ @EqualsAndHashCode(callSuper = false) public class OpenSearchGeoPointType extends OpenSearchDataType { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchIpType.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchIpType.java index fccafc6caf..22581ec28c 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchIpType.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchIpType.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.type; import static org.opensearch.sql.data.type.ExprCoreType.UNKNOWN; @@ -11,8 +10,8 @@ import lombok.EqualsAndHashCode; /** - * The type of an ip value. See - * doc + * The type of an ip value. See doc */ @EqualsAndHashCode(callSuper = false) public class OpenSearchIpType extends OpenSearchDataType { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchTextType.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchTextType.java index 67b7296834..169eb3f49d 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchTextType.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/type/OpenSearchTextType.java @@ -15,8 +15,8 @@ import org.opensearch.sql.data.type.ExprType; /** - * The type of a text value. See - * doc + * The type of a text value. See doc */ public class OpenSearchTextType extends OpenSearchDataType { @@ -24,8 +24,7 @@ public class OpenSearchTextType extends OpenSearchDataType { // text could have fields // a read-only collection - @EqualsAndHashCode.Exclude - Map fields = ImmutableMap.of(); + @EqualsAndHashCode.Exclude Map fields = ImmutableMap.of(); private OpenSearchTextType() { super(MappingType.Text); @@ -34,6 +33,7 @@ private OpenSearchTextType() { /** * Constructs a Text Type using the passed in fields argument. + * * @param fields The fields to be used to construct the text type. * @return A new OpenSeachTextTypeObject */ @@ -67,8 +67,8 @@ protected OpenSearchDataType cloneEmpty() { } /** - * Text field doesn't have doc value (exception thrown even when you call "get") - * Limitation: assume inner field name is always "keyword". + * Text field doesn't have doc value (exception thrown even when you call "get") Limitation: + * assume inner field name is always "keyword". */ public static String convertTextToKeyword(String fieldName, ExprType fieldType) { if (fieldType instanceof OpenSearchTextType diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/Content.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/Content.java index 992689a186..21070fa3d6 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/Content.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/Content.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.utils; import java.util.Iterator; @@ -12,110 +11,69 @@ /** * Regardless the underling data format, the {@link Content} define the data in abstract manner. - * which could be parsed by ElasticsearchExprValueFactory. - * There are two major use cases: - * 1. Represent the JSON data retrieve from OpenSearch search response. - * 2. Represent the Object data extract from the OpenSearch aggregation response. + * which could be parsed by ElasticsearchExprValueFactory. There are two major use cases: 1. + * Represent the JSON data retrieve from OpenSearch search response. 2. Represent the Object data + * extract from the OpenSearch aggregation response. */ public interface Content { - /** - * Is null value. - */ + /** Is null value. */ boolean isNull(); - /** - * Is number value. - */ + /** Is number value. */ boolean isNumber(); - /** - * Is float value. - */ + /** Is float value. */ boolean isFloat(); - /** - * Is double value. - */ + /** Is double value. */ boolean isDouble(); - /** - * Is long value. - */ + /** Is long value. */ boolean isLong(); - /** - * Is boolean value. - */ + /** Is boolean value. */ boolean isBoolean(); - /** - * Is string value. - */ + /** Is string value. */ boolean isString(); - /** - * Is array value. - */ + /** Is array value. */ boolean isArray(); - /** - * Get integer value. - */ + /** Get integer value. */ Integer intValue(); - /** - * Get long value. - */ + /** Get long value. */ Long longValue(); - /** - * Get short value. - */ + /** Get short value. */ Short shortValue(); - /** - * Get byte value. - */ + /** Get byte value. */ Byte byteValue(); - /** - * Get float value. - */ + /** Get float value. */ Float floatValue(); - /** - * Get double value. - */ + /** Get double value. */ Double doubleValue(); - /** - * Get string value. - */ + /** Get string value. */ String stringValue(); - /** - * Get boolean value. - */ + /** Get boolean value. */ Boolean booleanValue(); - /** - * Get map of {@link Content} value. - */ + /** Get map of {@link Content} value. */ Iterator> map(); - /** - * Get array of {@link Content} value. - */ + /** Get array of {@link Content} value. */ Iterator array(); - /** - * Get geo point value. - */ + /** Get geo point value. */ Pair geoValue(); - /** - * Get {@link Object} value. - */ + /** Get {@link Object} value. */ Object objectValue(); } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/ObjectContent.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/ObjectContent.java index e8875d19ba..fd45ca0d51 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/ObjectContent.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/ObjectContent.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.utils; import com.fasterxml.jackson.databind.node.ArrayNode; @@ -15,17 +14,15 @@ import lombok.RequiredArgsConstructor; import org.apache.commons.lang3.tuple.Pair; -/** - * The Implementation of Content to represent {@link Object}. - */ +/** The Implementation of Content to represent {@link Object}. */ @RequiredArgsConstructor public class ObjectContent implements Content { private final Object value; /** - * The parse method parses the value as double value, - * since the key values histogram buckets are defaulted to double. + * The parse method parses the value as double value, since the key values histogram buckets are + * defaulted to double. */ @Override public Integer intValue() { @@ -81,11 +78,14 @@ public Object objectValue() { @SuppressWarnings("unchecked") @Override public Iterator> map() { - return ((Map) value).entrySet().stream() - .map(entry -> (Map.Entry) new AbstractMap.SimpleEntry( - entry.getKey(), - new ObjectContent(entry.getValue()))) - .iterator(); + return ((Map) value) + .entrySet().stream() + .map( + entry -> + (Map.Entry) + new AbstractMap.SimpleEntry( + entry.getKey(), new ObjectContent(entry.getValue()))) + .iterator(); } @SuppressWarnings("unchecked") @@ -140,8 +140,8 @@ public Pair geoValue() { return Pair.of(Double.valueOf(split[0]), Double.valueOf(split[1])); } - private T parseNumberValue(Object value, Function stringTFunction, - Function numberTFunction) { + private T parseNumberValue( + Object value, Function stringTFunction, Function numberTFunction) { if (value instanceof String) { return stringTFunction.apply((String) value); } else { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/OpenSearchJsonContent.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/OpenSearchJsonContent.java index 61da7c3b74..f79c8a708b 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/OpenSearchJsonContent.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/utils/OpenSearchJsonContent.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.utils; import com.fasterxml.jackson.databind.JsonNode; @@ -14,9 +13,7 @@ import lombok.RequiredArgsConstructor; import org.apache.commons.lang3.tuple.Pair; -/** - * The Implementation of Content to represent {@link JsonNode}. - */ +/** The Implementation of Content to represent {@link JsonNode}. */ @RequiredArgsConstructor public class OpenSearchJsonContent implements Content { @@ -68,8 +65,7 @@ public Iterator> map() { final JsonNode mapValue = value(); mapValue .fieldNames() - .forEachRemaining( - field -> map.put(field, new OpenSearchJsonContent(mapValue.get(field)))); + .forEachRemaining(field -> map.put(field, new OpenSearchJsonContent(mapValue.get(field)))); return map.entrySet().iterator(); } @@ -133,33 +129,27 @@ public Pair geoValue() { lat = extractDoubleValue(value.get("lat")); } catch (Exception exception) { throw new IllegalStateException( - "latitude must be number value, but got value: " + value.get( - "lat")); + "latitude must be number value, but got value: " + value.get("lat")); } try { lon = extractDoubleValue(value.get("lon")); } catch (Exception exception) { throw new IllegalStateException( - "longitude must be number value, but got value: " + value.get( - "lon")); + "longitude must be number value, but got value: " + value.get("lon")); } return Pair.of(lat, lon); } else { - throw new IllegalStateException("geo point must in format of {\"lat\": number, \"lon\": " - + "number}"); + throw new IllegalStateException( + "geo point must in format of {\"lat\": number, \"lon\": " + "number}"); } } - /** - * Getter for value. If value is array the whole array is returned. - */ + /** Getter for value. If value is array the whole array is returned. */ private JsonNode value() { return value; } - /** - * Get doubleValue from JsonNode if possible. - */ + /** Get doubleValue from JsonNode if possible. */ private Double extractDoubleValue(JsonNode node) { if (node.isTextual()) { return Double.valueOf(node.textValue()); diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprBinaryValue.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprBinaryValue.java index e418832117..1043c1acdc 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprBinaryValue.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprBinaryValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.value; import lombok.EqualsAndHashCode; @@ -12,10 +11,9 @@ import org.opensearch.sql.data.type.ExprType; import org.opensearch.sql.opensearch.data.type.OpenSearchBinaryType; - /** - * OpenSearch BinaryValue. - * Todo, add this to avoid the unknown value type exception, the implementation will be changed. + * OpenSearch BinaryValue. Todo, add this to avoid the unknown value type exception, the + * implementation will be changed. */ @EqualsAndHashCode(callSuper = false) public class OpenSearchExprBinaryValue extends AbstractExprValue { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprGeoPointValue.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprGeoPointValue.java index 72f7f4a4f2..0de9c898e8 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprGeoPointValue.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprGeoPointValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.value; import java.util.Objects; @@ -14,8 +13,8 @@ import org.opensearch.sql.opensearch.data.type.OpenSearchGeoPointType; /** - * OpenSearch GeoPointValue. - * Todo, add this to avoid the unknown value type exception, the implementation will be changed. + * OpenSearch GeoPointValue. Todo, add this to avoid the unknown value type exception, the + * implementation will be changed. */ public class OpenSearchExprGeoPointValue extends AbstractExprValue { @@ -37,7 +36,8 @@ public ExprType type() { @Override public int compare(ExprValue other) { - return geoPoint.toString() + return geoPoint + .toString() .compareTo((((OpenSearchExprGeoPointValue) other).geoPoint).toString()); } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprIpValue.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprIpValue.java index a17deb7e45..467e14e7f1 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprIpValue.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprIpValue.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.value; import java.util.Objects; @@ -14,8 +13,8 @@ import org.opensearch.sql.opensearch.data.type.OpenSearchIpType; /** - * OpenSearch IP ExprValue. - * Todo, add this to avoid the unknown value type exception, the implementation will be changed. + * OpenSearch IP ExprValue. Todo, add this to avoid the unknown value type exception, the + * implementation will be changed. */ @RequiredArgsConstructor public class OpenSearchExprIpValue extends AbstractExprValue { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprTextValue.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprTextValue.java index d093588168..fb696d6b04 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprTextValue.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprTextValue.java @@ -3,16 +3,13 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.value; import org.opensearch.sql.data.model.ExprStringValue; import org.opensearch.sql.data.type.ExprType; import org.opensearch.sql.opensearch.data.type.OpenSearchTextType; -/** - * Expression Text Value, it is a extension of the ExprValue by OpenSearch. - */ +/** Expression Text Value, it is a extension of the ExprValue by OpenSearch. */ public class OpenSearchExprTextValue extends ExprStringValue { public OpenSearchExprTextValue(String value) { super(value); diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactory.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactory.java index 95815d5c38..e6827a0a27 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactory.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactory.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.value; import static org.opensearch.sql.data.type.ExprCoreType.ARRAY; @@ -25,13 +24,11 @@ import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.ImmutableMap; import java.time.Instant; import java.time.LocalDate; import java.time.LocalTime; -import java.time.ZoneId; import java.time.ZonedDateTime; import java.time.format.DateTimeParseException; import java.time.temporal.TemporalAccessor; @@ -73,18 +70,15 @@ import org.opensearch.sql.opensearch.data.utils.OpenSearchJsonContent; import org.opensearch.sql.opensearch.response.agg.OpenSearchAggregationResponseParser; -/** - * Construct ExprValue from OpenSearch response. - */ +/** Construct ExprValue from OpenSearch response. */ public class OpenSearchExprValueFactory { - /** - * The Mapping of Field and ExprType. - */ + /** The Mapping of Field and ExprType. */ private final Map typeMapping; /** - * Extend existing mapping by new data without overwrite. - * Called from aggregation only {@see AggregationQueryBuilder#buildTypeMapping}. + * Extend existing mapping by new data without overwrite. Called from aggregation only {@see + * AggregationQueryBuilder#buildTypeMapping}. + * * @param typeMapping A data type mapping produced by aggregation. */ public void extendTypeMapping(Map typeMapping) { @@ -97,9 +91,7 @@ public void extendTypeMapping(Map typeMapping) { } } - @Getter - @Setter - private OpenSearchAggregationResponseParser parser; + @Getter @Setter private OpenSearchAggregationResponseParser parser; private static final String TOP_PATH = ""; @@ -107,70 +99,81 @@ public void extendTypeMapping(Map typeMapping) { private static final Map> typeActionMap = new ImmutableMap.Builder>() - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.Integer), + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.Integer), (c, dt) -> new ExprIntegerValue(c.intValue())) - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.Long), + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.Long), (c, dt) -> new ExprLongValue(c.longValue())) - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.Short), + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.Short), (c, dt) -> new ExprShortValue(c.shortValue())) - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.Byte), + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.Byte), (c, dt) -> new ExprByteValue(c.byteValue())) - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.Float), + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.Float), (c, dt) -> new ExprFloatValue(c.floatValue())) - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.Double), + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.Double), (c, dt) -> new ExprDoubleValue(c.doubleValue())) - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.Text), + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.Text), (c, dt) -> new OpenSearchExprTextValue(c.stringValue())) - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword), + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword), (c, dt) -> new ExprStringValue(c.stringValue())) - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.Boolean), + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.Boolean), (c, dt) -> ExprBooleanValue.of(c.booleanValue())) - //Handles the creation of DATE, TIME & DATETIME + // Handles the creation of DATE, TIME & DATETIME .put(OpenSearchDateType.of(TIME), OpenSearchExprValueFactory::createOpenSearchDateType) .put(OpenSearchDateType.of(DATE), OpenSearchExprValueFactory::createOpenSearchDateType) - .put(OpenSearchDateType.of(TIMESTAMP), - OpenSearchExprValueFactory::createOpenSearchDateType) - .put(OpenSearchDateType.of(DATETIME), + .put( + OpenSearchDateType.of(TIMESTAMP), OpenSearchExprValueFactory::createOpenSearchDateType) - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.Ip), + .put( + OpenSearchDateType.of(DATETIME), OpenSearchExprValueFactory::createOpenSearchDateType) + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.Ip), (c, dt) -> new OpenSearchExprIpValue(c.stringValue())) - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.GeoPoint), - (c, dt) -> new OpenSearchExprGeoPointValue(c.geoValue().getLeft(), - c.geoValue().getRight())) - .put(OpenSearchDataType.of(OpenSearchDataType.MappingType.Binary), + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.GeoPoint), + (c, dt) -> + new OpenSearchExprGeoPointValue(c.geoValue().getLeft(), c.geoValue().getRight())) + .put( + OpenSearchDataType.of(OpenSearchDataType.MappingType.Binary), (c, dt) -> new OpenSearchExprBinaryValue(c.stringValue())) .build(); - /** - * Constructor of OpenSearchExprValueFactory. - */ + /** Constructor of OpenSearchExprValueFactory. */ public OpenSearchExprValueFactory(Map typeMapping) { this.typeMapping = OpenSearchDataType.traverseAndFlatten(typeMapping); } /** - * The struct construction has the following assumption: - * 1. The field has OpenSearch Object data type. - * See - * docs - * 2. The deeper field is flattened in the typeMapping. e.g. - * { "employ", "STRUCT" } - * { "employ.id", "INTEGER" } - * { "employ.state", "STRING" } + * The struct construction has the following assumption: 1. The field has OpenSearch Object data + * type. See docs 2. + * The deeper field is flattened in the typeMapping. e.g. { "employ", "STRUCT" } { "employ.id", + * "INTEGER" } { "employ.state", "STRING" } */ public ExprValue construct(String jsonString, boolean supportArrays) { try { - return parse(new OpenSearchJsonContent(OBJECT_MAPPER.readTree(jsonString)), TOP_PATH, - Optional.of(STRUCT), supportArrays); + return parse( + new OpenSearchJsonContent(OBJECT_MAPPER.readTree(jsonString)), + TOP_PATH, + Optional.of(STRUCT), + supportArrays); } catch (JsonProcessingException e) { throw new IllegalStateException(String.format("invalid json: %s.", jsonString), e); } } /** - * Construct ExprValue from field and its value object. Throw exception if trying - * to construct from field of unsupported type. - * Todo, add IP, GeoPoint support after we have function implementation around it. + * Construct ExprValue from field and its value object. Throw exception if trying to construct + * from field of unsupported type. Todo, add IP, GeoPoint support after we have function + * implementation around it. * * @param field field name * @param value value object @@ -181,11 +184,7 @@ public ExprValue construct(String field, Object value, boolean supportArrays) { } private ExprValue parse( - Content content, - String field, - Optional fieldType, - boolean supportArrays - ) { + Content content, String field, Optional fieldType, boolean supportArrays) { if (content.isNull() || !fieldType.isPresent()) { return ExprNullValue.of(); } @@ -209,16 +208,16 @@ private ExprValue parse( } /** - * In OpenSearch, it is possible field doesn't have type definition in mapping. - * but has empty value. For example, {"empty_field": []}. + * In OpenSearch, it is possible field doesn't have type definition in mapping. but has empty + * value. For example, {"empty_field": []}. */ private Optional type(String field) { return Optional.ofNullable(typeMapping.get(field)); } /** - * Parse value with the first matching formatter into {@link ExprValue} - * with corresponding {@link ExprCoreType}. + * Parse value with the first matching formatter into {@link ExprValue} with corresponding {@link + * ExprCoreType}. * * @param value - time as string * @param dataType - field data type @@ -234,12 +233,12 @@ private static ExprValue parseDateTimeString(String value, OpenSearchDateType da TemporalAccessor accessor = formatter.parse(value); ZonedDateTime zonedDateTime = DateFormatters.from(accessor); switch (returnFormat) { - case TIME: return new ExprTimeValue( - zonedDateTime.withZoneSameLocal(UTC_ZONE_ID).toLocalTime()); - case DATE: return new ExprDateValue( - zonedDateTime.withZoneSameLocal(UTC_ZONE_ID).toLocalDate()); - default: return new ExprTimestampValue( - zonedDateTime.withZoneSameLocal(UTC_ZONE_ID).toInstant()); + case TIME: + return new ExprTimeValue(zonedDateTime.withZoneSameLocal(UTC_ZONE_ID).toLocalTime()); + case DATE: + return new ExprDateValue(zonedDateTime.withZoneSameLocal(UTC_ZONE_ID).toLocalDate()); + default: + return new ExprTimestampValue(zonedDateTime.withZoneSameLocal(UTC_ZONE_ID).toInstant()); } } catch (IllegalArgumentException ignored) { // nothing to do, try another format @@ -249,19 +248,22 @@ private static ExprValue parseDateTimeString(String value, OpenSearchDateType da // if no formatters are available, try the default formatter try { switch (returnFormat) { - case TIME: return new ExprTimeValue( - DateFormatters.from(STRICT_HOUR_MINUTE_SECOND_FORMATTER.parse(value)).toLocalTime()); - case DATE: return new ExprDateValue( - DateFormatters.from(STRICT_YEAR_MONTH_DAY_FORMATTER.parse(value)).toLocalDate()); - default: return new ExprTimestampValue( - DateFormatters.from(DATE_TIME_FORMATTER.parse(value)).toInstant()); + case TIME: + return new ExprTimeValue( + DateFormatters.from(STRICT_HOUR_MINUTE_SECOND_FORMATTER.parse(value)).toLocalTime()); + case DATE: + return new ExprDateValue( + DateFormatters.from(STRICT_YEAR_MONTH_DAY_FORMATTER.parse(value)).toLocalDate()); + default: + return new ExprTimestampValue( + DateFormatters.from(DATE_TIME_FORMATTER.parse(value)).toInstant()); } } catch (DateTimeParseException ignored) { // ignored } - throw new IllegalArgumentException(String.format( - "Construct %s from \"%s\" failed, unsupported format.", returnFormat, value)); + throw new IllegalArgumentException( + String.format("Construct %s from \"%s\" failed, unsupported format.", returnFormat, value)); } private static ExprValue createOpenSearchDateType(Content value, ExprType type) { @@ -272,8 +274,8 @@ private static ExprValue createOpenSearchDateType(Content value, ExprType type) var numFormatters = dt.getNumericNamedFormatters(); if (numFormatters.size() > 0 || !dt.hasFormats()) { long epochMillis = 0; - if (numFormatters.contains(DateFormatter.forPattern( - FormatNames.EPOCH_SECOND.getSnakeCaseName()))) { + if (numFormatters.contains( + DateFormatter.forPattern(FormatNames.EPOCH_SECOND.getSnakeCaseName()))) { // no CamelCase for `EPOCH_*` formats epochMillis = value.longValue() * 1000; } else /* EPOCH_MILLIS */ { @@ -281,9 +283,12 @@ private static ExprValue createOpenSearchDateType(Content value, ExprType type) } Instant instant = Instant.ofEpochMilli(epochMillis); switch ((ExprCoreType) returnFormat) { - case TIME: return new ExprTimeValue(LocalTime.from(instant.atZone(UTC_ZONE_ID))); - case DATE: return new ExprDateValue(LocalDate.ofInstant(instant, UTC_ZONE_ID)); - default: return new ExprTimestampValue(instant); + case TIME: + return new ExprTimeValue(LocalTime.from(instant.atZone(UTC_ZONE_ID))); + case DATE: + return new ExprDateValue(LocalDate.ofInstant(instant, UTC_ZONE_ID)); + default: + return new ExprTimestampValue(instant); } } else { // custom format @@ -299,6 +304,7 @@ private static ExprValue createOpenSearchDateType(Content value, ExprType type) /** * Parse struct content. + * * @param content Content to parse. * @param prefix Prefix for Level of object depth to parse. * @param supportArrays Parsing the whole array if array is type nested. @@ -306,15 +312,23 @@ private static ExprValue createOpenSearchDateType(Content value, ExprType type) */ private ExprValue parseStruct(Content content, String prefix, boolean supportArrays) { LinkedHashMap result = new LinkedHashMap<>(); - content.map().forEachRemaining(entry -> result.put(entry.getKey(), - parse(entry.getValue(), - makeField(prefix, entry.getKey()), - type(makeField(prefix, entry.getKey())), supportArrays))); + content + .map() + .forEachRemaining( + entry -> + result.put( + entry.getKey(), + parse( + entry.getValue(), + makeField(prefix, entry.getKey()), + type(makeField(prefix, entry.getKey())), + supportArrays))); return new ExprTupleValue(result); } /** * Parse array content. Can also parse nested which isn't necessarily an array. + * * @param content Content to parse. * @param prefix Prefix for Level of object depth to parse. * @param type Type of content parsing. @@ -322,32 +336,31 @@ private ExprValue parseStruct(Content content, String prefix, boolean supportArr * @return Value parsed from content. */ private ExprValue parseArray( - Content content, - String prefix, - ExprType type, - boolean supportArrays - ) { + Content content, String prefix, ExprType type, boolean supportArrays) { List result = new ArrayList<>(); // ARRAY is mapped to nested but can take the json structure of an Object. if (content.objectValue() instanceof ObjectNode) { result.add(parseStruct(content, prefix, supportArrays)); // non-object type arrays are only supported when parsing inner_hits of OS response. - } else if ( - !(type instanceof OpenSearchDataType + } else if (!(type instanceof OpenSearchDataType && ((OpenSearchDataType) type).getExprType().equals(ARRAY)) && !supportArrays) { return parseInnerArrayValue(content.array().next(), prefix, type, supportArrays); } else { - content.array().forEachRemaining(v -> { - result.add(parseInnerArrayValue(v, prefix, type, supportArrays)); - }); + content + .array() + .forEachRemaining( + v -> { + result.add(parseInnerArrayValue(v, prefix, type, supportArrays)); + }); } return new ExprCollectionValue(result); } /** * Parse inner array value. Can be object type and recurse continues. + * * @param content Array index being parsed. * @param prefix Prefix for value. * @param type Type of inner array value. @@ -355,11 +368,7 @@ private ExprValue parseArray( * @return Inner array value. */ private ExprValue parseInnerArrayValue( - Content content, - String prefix, - ExprType type, - boolean supportArrays - ) { + Content content, String prefix, ExprType type, boolean supportArrays) { if (type instanceof OpenSearchIpType || type instanceof OpenSearchBinaryType || type instanceof OpenSearchDateType @@ -384,6 +393,7 @@ private ExprValue parseInnerArrayValue( /** * Make complete path string for field. + * * @param path Path of field. * @param field Field to append to path. * @return Field appended to path level. diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/OpenSearchExecutionEngine.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/OpenSearchExecutionEngine.java index 31e5c7f957..21046956d0 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/OpenSearchExecutionEngine.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/OpenSearchExecutionEngine.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.executor; import java.util.ArrayList; @@ -36,8 +35,10 @@ public void execute(PhysicalPlan physicalPlan, ResponseListener l } @Override - public void execute(PhysicalPlan physicalPlan, ExecutionContext context, - ResponseListener listener) { + public void execute( + PhysicalPlan physicalPlan, + ExecutionContext context, + ResponseListener listener) { PhysicalPlan plan = executionProtector.protect(physicalPlan); client.schedule( () -> { @@ -51,8 +52,9 @@ public void execute(PhysicalPlan physicalPlan, ExecutionContext context, result.add(plan.next()); } - QueryResponse response = new QueryResponse(physicalPlan.schema(), result, - planSerializer.convertToCursor(plan)); + QueryResponse response = + new QueryResponse( + physicalPlan.schema(), result, planSerializer.convertToCursor(plan)); listener.onResponse(response); } catch (Exception e) { listener.onFailure(e); @@ -64,21 +66,27 @@ public void execute(PhysicalPlan physicalPlan, ExecutionContext context, @Override public void explain(PhysicalPlan plan, ResponseListener listener) { - client.schedule(() -> { - try { - Explain openSearchExplain = new Explain() { - @Override - public ExplainResponseNode visitTableScan(TableScanOperator node, Object context) { - return explain(node, context, explainNode -> { - explainNode.setDescription(Map.of("request", node.explain())); - }); - } - }; + client.schedule( + () -> { + try { + Explain openSearchExplain = + new Explain() { + @Override + public ExplainResponseNode visitTableScan( + TableScanOperator node, Object context) { + return explain( + node, + context, + explainNode -> { + explainNode.setDescription(Map.of("request", node.explain())); + }); + } + }; - listener.onResponse(openSearchExplain.apply(plan)); - } catch (Exception e) { - listener.onFailure(e); - } - }); + listener.onResponse(openSearchExplain.apply(plan)); + } catch (Exception e) { + listener.onFailure(e); + } + }); } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/OpenSearchQueryManager.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/OpenSearchQueryManager.java index 9c6fcdb825..dbe91dc398 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/OpenSearchQueryManager.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/OpenSearchQueryManager.java @@ -18,9 +18,7 @@ import org.opensearch.sql.executor.execution.AbstractPlan; import org.opensearch.threadpool.ThreadPool; -/** - * QueryManager implemented in OpenSearch cluster. - */ +/** QueryManager implemented in OpenSearch cluster. */ @RequiredArgsConstructor public class OpenSearchQueryManager implements QueryManager { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/ExecutionProtector.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/ExecutionProtector.java index 42c49b44d8..3a11ee99d7 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/ExecutionProtector.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/ExecutionProtector.java @@ -3,19 +3,14 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.executor.protector; import org.opensearch.sql.planner.physical.PhysicalPlan; import org.opensearch.sql.planner.physical.PhysicalPlanNodeVisitor; -/** - * Execution Plan Protector. - */ +/** Execution Plan Protector. */ public abstract class ExecutionProtector extends PhysicalPlanNodeVisitor { - /** - * Decorated the PhysicalPlan to run in resource sensitive mode. - */ + /** Decorated the PhysicalPlan to run in resource sensitive mode. */ public abstract PhysicalPlan protect(PhysicalPlan physicalPlan); } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/NoopExecutionProtector.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/NoopExecutionProtector.java index 03e2f0c61c..88a5108159 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/NoopExecutionProtector.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/NoopExecutionProtector.java @@ -3,14 +3,11 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.executor.protector; import org.opensearch.sql.planner.physical.PhysicalPlan; -/** - * No operation execution protector. - */ +/** No operation execution protector. */ public class NoopExecutionProtector extends ExecutionProtector { @Override diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/OpenSearchExecutionProtector.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/OpenSearchExecutionProtector.java index dff5545785..0905c2f4b4 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/OpenSearchExecutionProtector.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/OpenSearchExecutionProtector.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.executor.protector; import lombok.RequiredArgsConstructor; @@ -28,15 +27,11 @@ import org.opensearch.sql.planner.physical.WindowOperator; import org.opensearch.sql.storage.TableScanOperator; -/** - * OpenSearch Execution Protector. - */ +/** OpenSearch Execution Protector. */ @RequiredArgsConstructor public class OpenSearchExecutionProtector extends ExecutionProtector { - /** - * OpenSearch resource monitor. - */ + /** OpenSearch resource monitor. */ private final ResourceMonitor resourceMonitor; public PhysicalPlan protect(PhysicalPlan physicalPlan) { @@ -44,8 +39,8 @@ public PhysicalPlan protect(PhysicalPlan physicalPlan) { } /** - * Don't protect {@link CursorCloseOperator} and entire nested tree, because - * {@link CursorCloseOperator} as designed as no-op. + * Don't protect {@link CursorCloseOperator} and entire nested tree, because {@link + * CursorCloseOperator} as designed as no-op. */ @Override public PhysicalPlan visitCursorClose(CursorCloseOperator node, Object context) { @@ -59,14 +54,18 @@ public PhysicalPlan visitFilter(FilterOperator node, Object context) { @Override public PhysicalPlan visitAggregation(AggregationOperator node, Object context) { - return new AggregationOperator(visitInput(node.getInput(), context), node.getAggregatorList(), - node.getGroupByExprList()); + return new AggregationOperator( + visitInput(node.getInput(), context), node.getAggregatorList(), node.getGroupByExprList()); } @Override public PhysicalPlan visitRareTopN(RareTopNOperator node, Object context) { - return new RareTopNOperator(visitInput(node.getInput(), context), node.getCommandType(), - node.getNoOfResults(), node.getFieldExprList(), node.getGroupByExprList()); + return new RareTopNOperator( + visitInput(node.getInput(), context), + node.getCommandType(), + node.getNoOfResults(), + node.getFieldExprList(), + node.getGroupByExprList()); } @Override @@ -74,9 +73,7 @@ public PhysicalPlan visitRename(RenameOperator node, Object context) { return new RenameOperator(visitInput(node.getInput(), context), node.getMapping()); } - /** - * Decorate with {@link ResourceMonitorPlan}. - */ + /** Decorate with {@link ResourceMonitorPlan}. */ @Override public PhysicalPlan visitTableScan(TableScanOperator node, Object context) { return doProtect(node); @@ -84,7 +81,9 @@ public PhysicalPlan visitTableScan(TableScanOperator node, Object context) { @Override public PhysicalPlan visitProject(ProjectOperator node, Object context) { - return new ProjectOperator(visitInput(node.getInput(), context), node.getProjectList(), + return new ProjectOperator( + visitInput(node.getInput(), context), + node.getProjectList(), node.getNamedParseExpressions()); } @@ -102,15 +101,19 @@ public PhysicalPlan visitEval(EvalOperator node, Object context) { public PhysicalPlan visitNested(NestedOperator node, Object context) { return doProtect( new NestedOperator( - visitInput(node.getInput(), context), node.getFields(), node.getGroupedPathsAndFields() - ) - ); + visitInput(node.getInput(), context), + node.getFields(), + node.getGroupedPathsAndFields())); } @Override public PhysicalPlan visitDedupe(DedupeOperator node, Object context) { - return new DedupeOperator(visitInput(node.getInput(), context), node.getDedupeList(), - node.getAllowedDuplication(), node.getKeepEmpty(), node.getConsecutive()); + return new DedupeOperator( + visitInput(node.getInput(), context), + node.getDedupeList(), + node.getAllowedDuplication(), + node.getKeepEmpty(), + node.getConsecutive()); } @Override @@ -121,20 +124,14 @@ public PhysicalPlan visitWindow(WindowOperator node, Object context) { node.getWindowDefinition()); } - /** - * Decorate with {@link ResourceMonitorPlan}. - */ + /** Decorate with {@link ResourceMonitorPlan}. */ @Override public PhysicalPlan visitSort(SortOperator node, Object context) { - return doProtect( - new SortOperator( - visitInput(node.getInput(), context), - node.getSortList())); + return doProtect(new SortOperator(visitInput(node.getInput(), context), node.getSortList())); } /** - * Values are a sequence of rows of literal value in memory - * which doesn't need memory protection. + * Values are a sequence of rows of literal value in memory which doesn't need memory protection. */ @Override public PhysicalPlan visitValues(ValuesOperator node, Object context) { @@ -144,41 +141,38 @@ public PhysicalPlan visitValues(ValuesOperator node, Object context) { @Override public PhysicalPlan visitLimit(LimitOperator node, Object context) { return new LimitOperator( - visitInput(node.getInput(), context), - node.getLimit(), - node.getOffset()); + visitInput(node.getInput(), context), node.getLimit(), node.getOffset()); } @Override public PhysicalPlan visitMLCommons(PhysicalPlan node, Object context) { MLCommonsOperator mlCommonsOperator = (MLCommonsOperator) node; return doProtect( - new MLCommonsOperator(visitInput(mlCommonsOperator.getInput(), context), - mlCommonsOperator.getAlgorithm(), - mlCommonsOperator.getArguments(), - mlCommonsOperator.getNodeClient()) - ); + new MLCommonsOperator( + visitInput(mlCommonsOperator.getInput(), context), + mlCommonsOperator.getAlgorithm(), + mlCommonsOperator.getArguments(), + mlCommonsOperator.getNodeClient())); } @Override public PhysicalPlan visitAD(PhysicalPlan node, Object context) { ADOperator adOperator = (ADOperator) node; return doProtect( - new ADOperator(visitInput(adOperator.getInput(), context), - adOperator.getArguments(), - adOperator.getNodeClient() - ) - ); + new ADOperator( + visitInput(adOperator.getInput(), context), + adOperator.getArguments(), + adOperator.getNodeClient())); } @Override public PhysicalPlan visitML(PhysicalPlan node, Object context) { MLOperator mlOperator = (MLOperator) node; return doProtect( - new MLOperator(visitInput(mlOperator.getInput(), context), - mlOperator.getArguments(), - mlOperator.getNodeClient()) - ); + new MLOperator( + visitInput(mlOperator.getInput(), context), + mlOperator.getArguments(), + mlOperator.getNodeClient())); } PhysicalPlan visitInput(PhysicalPlan node, Object context) { @@ -199,5 +193,4 @@ protected PhysicalPlan doProtect(PhysicalPlan node) { private boolean isProtected(PhysicalPlan node) { return (node instanceof ResourceMonitorPlan); } - } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/ResourceMonitorPlan.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/ResourceMonitorPlan.java index 4c02affc5e..e3bc48ba72 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/ResourceMonitorPlan.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/ResourceMonitorPlan.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.executor.protector; import java.io.IOException; @@ -19,36 +18,23 @@ import org.opensearch.sql.planner.physical.PhysicalPlan; import org.opensearch.sql.planner.physical.PhysicalPlanNodeVisitor; -/** - * A PhysicalPlan which will run the delegate plan in resource protection manner. - */ +/** A PhysicalPlan which will run the delegate plan in resource protection manner. */ @ToString @RequiredArgsConstructor @EqualsAndHashCode(callSuper = false) public class ResourceMonitorPlan extends PhysicalPlan implements SerializablePlan { - /** - * How many method calls to delegate's next() to perform resource check once. - */ + /** How many method calls to delegate's next() to perform resource check once. */ public static final long NUMBER_OF_NEXT_CALL_TO_CHECK = 1000; - /** - * Delegated PhysicalPlan. - */ + /** Delegated PhysicalPlan. */ private final PhysicalPlan delegate; - /** - * ResourceMonitor. - */ - @ToString.Exclude - private final ResourceMonitor monitor; - - /** - * Count how many calls to delegate's next() already. - */ - @EqualsAndHashCode.Exclude - private long nextCallCount = 0L; + /** ResourceMonitor. */ + @ToString.Exclude private final ResourceMonitor monitor; + /** Count how many calls to delegate's next() already. */ + @EqualsAndHashCode.Exclude private long nextCallCount = 0L; @Override public R accept(PhysicalPlanNodeVisitor visitor, C context) { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/mapping/IndexMapping.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/mapping/IndexMapping.java index 0185ca95b6..87aa9d93dd 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/mapping/IndexMapping.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/mapping/IndexMapping.java @@ -3,39 +3,34 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.mapping; -import java.util.HashMap; -import java.util.LinkedHashMap; import java.util.Map; -import java.util.stream.Collectors; import lombok.Getter; import lombok.ToString; -import org.apache.commons.lang3.EnumUtils; import org.opensearch.cluster.metadata.MappingMetadata; import org.opensearch.sql.opensearch.data.type.OpenSearchDataType; /** - * OpenSearch index mapping. Because there is no specific behavior for different field types, - * string is used to represent field types. + * OpenSearch index mapping. Because there is no specific behavior for different field types, string + * is used to represent field types. */ @ToString public class IndexMapping { /** Field mappings from field name to field type in OpenSearch date type system. */ - @Getter - private final Map fieldMappings; + @Getter private final Map fieldMappings; /** * Maps each column in the index definition to an OpenSearchSQL datatype. + * * @param metaData The metadata retrieved from the index mapping defined by the user. */ @SuppressWarnings("unchecked") public IndexMapping(MappingMetadata metaData) { - this.fieldMappings = OpenSearchDataType.parseMapping( - (Map) metaData.getSourceAsMap().getOrDefault("properties", null) - ); + this.fieldMappings = + OpenSearchDataType.parseMapping( + (Map) metaData.getSourceAsMap().getOrDefault("properties", null)); } /** diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/monitor/OpenSearchMemoryHealthy.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/monitor/OpenSearchMemoryHealthy.java index c0a4aeb0b7..4b7b6c5dcb 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/monitor/OpenSearchMemoryHealthy.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/monitor/OpenSearchMemoryHealthy.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.monitor; import com.google.common.annotations.VisibleForTesting; @@ -11,9 +10,7 @@ import lombok.NoArgsConstructor; import lombok.extern.log4j.Log4j2; -/** - * OpenSearch Memory Monitor. - */ +/** OpenSearch Memory Monitor. */ @Log4j2 public class OpenSearchMemoryHealthy { private final RandomFail randomFail; @@ -25,16 +22,12 @@ public OpenSearchMemoryHealthy() { } @VisibleForTesting - public OpenSearchMemoryHealthy( - RandomFail randomFail, - MemoryUsage memoryUsage) { + public OpenSearchMemoryHealthy(RandomFail randomFail, MemoryUsage memoryUsage) { this.randomFail = randomFail; this.memoryUsage = memoryUsage; } - /** - * Is Memory Healthy. Calculate based on the current heap memory usage. - */ + /** Is Memory Healthy. Calculate based on the current heap memory usage. */ public boolean isMemoryHealthy(long limitBytes) { final long memoryUsage = this.memoryUsage.usage(); log.debug("Memory usage:{}, limit:{}", memoryUsage, limitBytes); @@ -66,12 +59,8 @@ public long usage() { } @NoArgsConstructor - public static class MemoryUsageExceedFastFailureException extends RuntimeException { - - } + public static class MemoryUsageExceedFastFailureException extends RuntimeException {} @NoArgsConstructor - public static class MemoryUsageExceedException extends RuntimeException { - - } + public static class MemoryUsageExceedException extends RuntimeException {} } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/monitor/OpenSearchResourceMonitor.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/monitor/OpenSearchResourceMonitor.java index 5ed82c7a5d..3c689f4933 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/monitor/OpenSearchResourceMonitor.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/monitor/OpenSearchResourceMonitor.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.monitor; import io.github.resilience4j.core.IntervalFunction; @@ -17,8 +16,7 @@ /** * {@link ResourceMonitor} implementation on Elasticsearch. When the heap memory usage exceeds - * certain threshold, the monitor is not healthy. - * Todo, add metrics. + * certain threshold, the monitor is not healthy. Todo, add metrics. */ @Log4j2 public class OpenSearchResourceMonitor extends ResourceMonitor { @@ -26,20 +24,15 @@ public class OpenSearchResourceMonitor extends ResourceMonitor { private final Retry retry; private final OpenSearchMemoryHealthy memoryMonitor; - /** - * Constructor of ElasticsearchCircuitBreaker. - */ - public OpenSearchResourceMonitor( - Settings settings, - OpenSearchMemoryHealthy memoryMonitor) { + /** Constructor of ElasticsearchCircuitBreaker. */ + public OpenSearchResourceMonitor(Settings settings, OpenSearchMemoryHealthy memoryMonitor) { this.settings = settings; RetryConfig config = RetryConfig.custom() .maxAttempts(3) .intervalFunction(IntervalFunction.ofExponentialRandomBackoff(1000)) .retryExceptions(OpenSearchMemoryHealthy.MemoryUsageExceedException.class) - .ignoreExceptions( - OpenSearchMemoryHealthy.MemoryUsageExceedFastFailureException.class) + .ignoreExceptions(OpenSearchMemoryHealthy.MemoryUsageExceedFastFailureException.class) .build(); retry = Retry.of("mem", config); this.memoryMonitor = memoryMonitor; @@ -55,9 +48,7 @@ public boolean isHealthy() { try { ByteSizeValue limit = settings.getSettingValue(Settings.Key.QUERY_MEMORY_LIMIT); Supplier booleanSupplier = - Retry.decorateSupplier(retry, - () -> memoryMonitor - .isMemoryHealthy(limit.getBytes())); + Retry.decorateSupplier(retry, () -> memoryMonitor.isMemoryHealthy(limit.getBytes())); return booleanSupplier.get(); } catch (Exception e) { return false; diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/planner/physical/ADOperator.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/planner/physical/ADOperator.java index 7a0ae7c960..f9c32b7424 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/planner/physical/ADOperator.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/planner/physical/ADOperator.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.planner.physical; import static org.opensearch.sql.utils.MLCommonsConstants.ANOMALY_RATE; @@ -41,65 +40,62 @@ import org.opensearch.sql.planner.physical.PhysicalPlan; import org.opensearch.sql.planner.physical.PhysicalPlanNodeVisitor; -/** - * AD Physical operator to call AD interface to get results for - * algorithm execution. - */ +/** AD Physical operator to call AD interface to get results for algorithm execution. */ @RequiredArgsConstructor @EqualsAndHashCode(callSuper = false) public class ADOperator extends MLCommonsOperatorActions { - @Getter - private final PhysicalPlan input; + @Getter private final PhysicalPlan input; - @Getter - private final Map arguments; + @Getter private final Map arguments; - @Getter - private final NodeClient nodeClient; + @Getter private final NodeClient nodeClient; - @EqualsAndHashCode.Exclude - private Iterator iterator; + @EqualsAndHashCode.Exclude private Iterator iterator; private FunctionName rcfType; @Override public void open() { super.open(); - String categoryField = arguments.containsKey(CATEGORY_FIELD) - ? (String) arguments.get(CATEGORY_FIELD).getValue() : null; - List> - inputDataFrames = generateCategorizedInputDataset(input, categoryField); + String categoryField = + arguments.containsKey(CATEGORY_FIELD) + ? (String) arguments.get(CATEGORY_FIELD).getValue() + : null; + List> inputDataFrames = + generateCategorizedInputDataset(input, categoryField); MLAlgoParams mlAlgoParams = convertArgumentToMLParameter(arguments); - List predictionResults = inputDataFrames.stream() - .map(pair -> getMLPredictionResult(rcfType, mlAlgoParams, pair.getRight(), nodeClient)) - .collect(Collectors.toList()); + List predictionResults = + inputDataFrames.stream() + .map(pair -> getMLPredictionResult(rcfType, mlAlgoParams, pair.getRight(), nodeClient)) + .collect(Collectors.toList()); Iterator> inputDataFramesIter = inputDataFrames.iterator(); Iterator predictionResultIter = predictionResults.iterator(); - iterator = new Iterator() { - private DataFrame inputDataFrame = null; - private Iterator inputRowIter = null; - private MLPredictionOutput predictionResult = null; - private Iterator resultRowIter = null; - - @Override - public boolean hasNext() { - return inputRowIter != null && inputRowIter.hasNext() || inputDataFramesIter.hasNext(); - } - - @Override - public ExprValue next() { - if (inputRowIter == null || !inputRowIter.hasNext()) { - inputDataFrame = inputDataFramesIter.next().getLeft(); - inputRowIter = inputDataFrame.iterator(); - predictionResult = predictionResultIter.next(); - resultRowIter = predictionResult.getPredictionResult().iterator(); - } - return buildResult(inputRowIter, inputDataFrame, predictionResult, resultRowIter); - } - }; + iterator = + new Iterator() { + private DataFrame inputDataFrame = null; + private Iterator inputRowIter = null; + private MLPredictionOutput predictionResult = null; + private Iterator resultRowIter = null; + + @Override + public boolean hasNext() { + return inputRowIter != null && inputRowIter.hasNext() || inputDataFramesIter.hasNext(); + } + + @Override + public ExprValue next() { + if (inputRowIter == null || !inputRowIter.hasNext()) { + inputDataFrame = inputDataFramesIter.next().getLeft(); + inputRowIter = inputDataFrame.iterator(); + predictionResult = predictionResultIter.next(); + resultRowIter = predictionResult.getPredictionResult().iterator(); + } + return buildResult(inputRowIter, inputDataFrame, predictionResult, resultRowIter); + } + }; } @Override @@ -126,53 +122,66 @@ protected MLAlgoParams convertArgumentToMLParameter(Map argumen if (arguments.get(TIME_FIELD) == null) { rcfType = FunctionName.BATCH_RCF; return BatchRCFParams.builder() - .numberOfTrees(arguments.containsKey(NUMBER_OF_TREES) - ? ((Integer) arguments.get(NUMBER_OF_TREES).getValue()) - : null) - .sampleSize(arguments.containsKey(SAMPLE_SIZE) - ? ((Integer) arguments.get(SAMPLE_SIZE).getValue()) - : null) - .outputAfter(arguments.containsKey(OUTPUT_AFTER) - ? ((Integer) arguments.get(OUTPUT_AFTER).getValue()) - : null) - .trainingDataSize(arguments.containsKey(TRAINING_DATA_SIZE) - ? ((Integer) arguments.get(TRAINING_DATA_SIZE).getValue()) - : null) - .anomalyScoreThreshold(arguments.containsKey(ANOMALY_SCORE_THRESHOLD) - ? ((Double) arguments.get(ANOMALY_SCORE_THRESHOLD).getValue()) - : null) + .numberOfTrees( + arguments.containsKey(NUMBER_OF_TREES) + ? ((Integer) arguments.get(NUMBER_OF_TREES).getValue()) + : null) + .sampleSize( + arguments.containsKey(SAMPLE_SIZE) + ? ((Integer) arguments.get(SAMPLE_SIZE).getValue()) + : null) + .outputAfter( + arguments.containsKey(OUTPUT_AFTER) + ? ((Integer) arguments.get(OUTPUT_AFTER).getValue()) + : null) + .trainingDataSize( + arguments.containsKey(TRAINING_DATA_SIZE) + ? ((Integer) arguments.get(TRAINING_DATA_SIZE).getValue()) + : null) + .anomalyScoreThreshold( + arguments.containsKey(ANOMALY_SCORE_THRESHOLD) + ? ((Double) arguments.get(ANOMALY_SCORE_THRESHOLD).getValue()) + : null) .build(); } rcfType = FunctionName.FIT_RCF; return FitRCFParams.builder() - .numberOfTrees(arguments.containsKey(NUMBER_OF_TREES) - ? ((Integer) arguments.get(NUMBER_OF_TREES).getValue()) - : null) - .shingleSize(arguments.containsKey(SHINGLE_SIZE) - ? ((Integer) arguments.get(SHINGLE_SIZE).getValue()) - : null) - .sampleSize(arguments.containsKey(SAMPLE_SIZE) - ? ((Integer) arguments.get(SAMPLE_SIZE).getValue()) - : null) - .outputAfter(arguments.containsKey(OUTPUT_AFTER) - ? ((Integer) arguments.get(OUTPUT_AFTER).getValue()) - : null) - .timeDecay(arguments.containsKey(TIME_DECAY) - ? ((Double) arguments.get(TIME_DECAY).getValue()) - : null) - .anomalyRate(arguments.containsKey(ANOMALY_RATE) - ? ((Double) arguments.get(ANOMALY_RATE).getValue()) - : null) - .timeField(arguments.containsKey(TIME_FIELD) - ? ((String) arguments.get(TIME_FIELD).getValue()) - : null) - .dateFormat(arguments.containsKey(DATE_FORMAT) - ? ((String) arguments.get(DATE_FORMAT).getValue()) - : "yyyy-MM-dd HH:mm:ss") - .timeZone(arguments.containsKey(TIME_ZONE) - ? ((String) arguments.get(TIME_ZONE).getValue()) - : null) + .numberOfTrees( + arguments.containsKey(NUMBER_OF_TREES) + ? ((Integer) arguments.get(NUMBER_OF_TREES).getValue()) + : null) + .shingleSize( + arguments.containsKey(SHINGLE_SIZE) + ? ((Integer) arguments.get(SHINGLE_SIZE).getValue()) + : null) + .sampleSize( + arguments.containsKey(SAMPLE_SIZE) + ? ((Integer) arguments.get(SAMPLE_SIZE).getValue()) + : null) + .outputAfter( + arguments.containsKey(OUTPUT_AFTER) + ? ((Integer) arguments.get(OUTPUT_AFTER).getValue()) + : null) + .timeDecay( + arguments.containsKey(TIME_DECAY) + ? ((Double) arguments.get(TIME_DECAY).getValue()) + : null) + .anomalyRate( + arguments.containsKey(ANOMALY_RATE) + ? ((Double) arguments.get(ANOMALY_RATE).getValue()) + : null) + .timeField( + arguments.containsKey(TIME_FIELD) + ? ((String) arguments.get(TIME_FIELD).getValue()) + : null) + .dateFormat( + arguments.containsKey(DATE_FORMAT) + ? ((String) arguments.get(DATE_FORMAT).getValue()) + : "yyyy-MM-dd HH:mm:ss") + .timeZone( + arguments.containsKey(TIME_ZONE) + ? ((String) arguments.get(TIME_ZONE).getValue()) + : null) .build(); } - } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/planner/physical/MLCommonsOperator.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/planner/physical/MLCommonsOperator.java index de0c23c4e9..ef60782a24 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/planner/physical/MLCommonsOperator.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/planner/physical/MLCommonsOperator.java @@ -30,26 +30,21 @@ import org.opensearch.sql.planner.physical.PhysicalPlanNodeVisitor; /** - * ml-commons Physical operator to call machine learning interface to get results for - * algorithm execution. + * ml-commons Physical operator to call machine learning interface to get results for algorithm + * execution. */ @RequiredArgsConstructor @EqualsAndHashCode(callSuper = false) public class MLCommonsOperator extends MLCommonsOperatorActions { - @Getter - private final PhysicalPlan input; + @Getter private final PhysicalPlan input; - @Getter - private final String algorithm; + @Getter private final String algorithm; - @Getter - private final Map arguments; + @Getter private final Map arguments; - @Getter - private final NodeClient nodeClient; + @Getter private final NodeClient nodeClient; - @EqualsAndHashCode.Exclude - private Iterator iterator; + @EqualsAndHashCode.Exclude private Iterator iterator; @Override public void open() { @@ -57,22 +52,26 @@ public void open() { DataFrame inputDataFrame = generateInputDataset(input); MLAlgoParams mlAlgoParams = convertArgumentToMLParameter(arguments, algorithm); MLPredictionOutput predictionResult = - getMLPredictionResult(FunctionName.valueOf(algorithm.toUpperCase()), - mlAlgoParams, inputDataFrame, nodeClient); + getMLPredictionResult( + FunctionName.valueOf(algorithm.toUpperCase()), + mlAlgoParams, + inputDataFrame, + nodeClient); Iterator inputRowIter = inputDataFrame.iterator(); Iterator resultRowIter = predictionResult.getPredictionResult().iterator(); - iterator = new Iterator() { - @Override - public boolean hasNext() { - return inputRowIter.hasNext(); - } - - @Override - public ExprValue next() { - return buildResult(inputRowIter, inputDataFrame, predictionResult, resultRowIter); - } - }; + iterator = + new Iterator() { + @Override + public boolean hasNext() { + return inputRowIter.hasNext(); + } + + @Override + public ExprValue next() { + return buildResult(inputRowIter, inputDataFrame, predictionResult, resultRowIter); + } + }; } @Override @@ -95,30 +94,33 @@ public List getChild() { return Collections.singletonList(input); } - protected MLAlgoParams convertArgumentToMLParameter(Map arguments, - String algorithm) { + protected MLAlgoParams convertArgumentToMLParameter( + Map arguments, String algorithm) { switch (FunctionName.valueOf(algorithm.toUpperCase())) { case KMEANS: return KMeansParams.builder() - .centroids(arguments.containsKey(CENTROIDS) - ? ((Integer) arguments.get(CENTROIDS).getValue()) + .centroids( + arguments.containsKey(CENTROIDS) + ? ((Integer) arguments.get(CENTROIDS).getValue()) + : null) + .iterations( + arguments.containsKey(ITERATIONS) + ? ((Integer) arguments.get(ITERATIONS).getValue()) + : null) + .distanceType( + arguments.containsKey(DISTANCE_TYPE) + ? (arguments.get(DISTANCE_TYPE).getValue() != null + ? KMeansParams.DistanceType.valueOf( + ((String) arguments.get(DISTANCE_TYPE).getValue()).toUpperCase()) : null) - .iterations(arguments.containsKey(ITERATIONS) - ? ((Integer) arguments.get(ITERATIONS).getValue()) - : null) - .distanceType(arguments.containsKey(DISTANCE_TYPE) - ? (arguments.get(DISTANCE_TYPE).getValue() != null - ? KMeansParams.DistanceType.valueOf(( - (String) arguments.get(DISTANCE_TYPE).getValue()).toUpperCase()) - : null) - : null) - .build(); + : null) + .build(); default: // TODO: update available algorithms in the message when adding a new case throw new IllegalArgumentException( - String.format("unsupported algorithm: %s, available algorithms: %s.", + String.format( + "unsupported algorithm: %s, available algorithms: %s.", FunctionName.valueOf(algorithm.toUpperCase()), KMEANS)); } } - } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/planner/physical/MLCommonsOperatorActions.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/planner/physical/MLCommonsOperatorActions.java index e1f12fb8a7..ddb0e2d5f4 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/planner/physical/MLCommonsOperatorActions.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/planner/physical/MLCommonsOperatorActions.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.planner.physical; import static org.opensearch.sql.utils.MLCommonsConstants.MODELID; @@ -48,13 +47,12 @@ import org.opensearch.sql.opensearch.client.MLClient; import org.opensearch.sql.planner.physical.PhysicalPlan; -/** - * Common method actions for ml-commons related operators. - */ +/** Common method actions for ml-commons related operators. */ public abstract class MLCommonsOperatorActions extends PhysicalPlan { /** * generate ml-commons request input dataset. + * * @param input physical input * @return ml-commons dataframe */ @@ -70,33 +68,37 @@ protected DataFrame generateInputDataset(PhysicalPlan input) { /** * Generate ml-commons request input dataset per each category based on a given category field. * Each category value will be a {@link DataFrame} pair, where the left one contains all fields - * for building response, and the right one contains all fields except the aggregated field for - * ml prediction. This is a temporary solution before ml-commons supports 2 dimensional input. + * for building response, and the right one contains all fields except the aggregated field for ml + * prediction. This is a temporary solution before ml-commons supports 2 dimensional input. * * @param input physical input * @param categoryField String, the field should be aggregated on * @return list of ml-commons dataframe pairs */ - protected List> generateCategorizedInputDataset(PhysicalPlan input, - String categoryField) { + protected List> generateCategorizedInputDataset( + PhysicalPlan input, String categoryField) { Map inputMap = new HashMap<>(); while (input.hasNext()) { Map tupleValue = input.next().tupleValue(); ExprValue categoryValue = categoryField == null ? null : tupleValue.get(categoryField); - MLInputRows inputData = - inputMap.computeIfAbsent(categoryValue, k -> new MLInputRows()); + MLInputRows inputData = inputMap.computeIfAbsent(categoryValue, k -> new MLInputRows()); inputData.addTupleValue(tupleValue); } // categoryField should be excluded for ml-commons predictions - return inputMap.values().stream().filter(inputData -> inputData.size() > 0).map( - inputData -> new ImmutablePair<>(inputData.toDataFrame(), - inputData.toFilteredDataFrame(e -> !e.getKey().equals(categoryField)))) + return inputMap.values().stream() + .filter(inputData -> inputData.size() > 0) + .map( + inputData -> + new ImmutablePair<>( + inputData.toDataFrame(), + inputData.toFilteredDataFrame(e -> !e.getKey().equals(categoryField)))) .collect(Collectors.toList()); } /** * covert result schema into ExprValue. + * * @param columnMetas column metas * @param row row * @return a map of result schema in ExprValue format @@ -113,13 +115,15 @@ protected Map convertRowIntoExprValue(ColumnMeta[] columnMeta /** * populate result map by ml-commons supported data type. + * * @param columnValue column value * @param resultKeyName result kay name * @param resultBuilder result builder */ - protected void populateResultBuilder(ColumnValue columnValue, - String resultKeyName, - ImmutableMap.Builder resultBuilder) { + protected void populateResultBuilder( + ColumnValue columnValue, + String resultKeyName, + ImmutableMap.Builder resultBuilder) { switch (columnValue.columnType()) { case INTEGER: resultBuilder.put(resultKeyName, new ExprIntegerValue(columnValue.intValue())); @@ -149,14 +153,14 @@ protected void populateResultBuilder(ColumnValue columnValue, /** * concert result into ExprValue. + * * @param columnMetas column metas * @param row row * @param schema schema * @return a map of result in ExprValue format */ - protected Map convertResultRowIntoExprValue(ColumnMeta[] columnMetas, - Row row, - Map schema) { + protected Map convertResultRowIntoExprValue( + ColumnMeta[] columnMetas, Row row, Map schema) { ImmutableMap.Builder resultBuilder = new ImmutableMap.Builder<>(); for (int i = 0; i < columnMetas.length; i++) { ColumnValue columnValue = row.getValue(i); @@ -167,29 +171,31 @@ protected Map convertResultRowIntoExprValue(ColumnMeta[] colu resultKeyName = resultKeyName + "1"; } populateResultBuilder(columnValue, resultKeyName, resultBuilder); - } return resultBuilder.build(); } /** * iterate result and built it into ExprTupleValue. + * * @param inputRowIter input row iterator * @param inputDataFrame input data frame * @param predictionResult prediction result * @param resultRowIter result row iterator * @return result in ExprTupleValue format */ - protected ExprTupleValue buildResult(Iterator inputRowIter, - DataFrame inputDataFrame, - MLPredictionOutput predictionResult, - Iterator resultRowIter) { + protected ExprTupleValue buildResult( + Iterator inputRowIter, + DataFrame inputDataFrame, + MLPredictionOutput predictionResult, + Iterator resultRowIter) { ImmutableMap.Builder resultSchemaBuilder = new ImmutableMap.Builder<>(); - resultSchemaBuilder.putAll(convertRowIntoExprValue(inputDataFrame.columnMetas(), - inputRowIter.next())); + resultSchemaBuilder.putAll( + convertRowIntoExprValue(inputDataFrame.columnMetas(), inputRowIter.next())); Map resultSchema = resultSchemaBuilder.build(); ImmutableMap.Builder resultBuilder = new ImmutableMap.Builder<>(); - resultBuilder.putAll(convertResultRowIntoExprValue( + resultBuilder.putAll( + convertResultRowIntoExprValue( predictionResult.getPredictionResult().columnMetas(), resultRowIter.next(), resultSchema)); @@ -199,74 +205,73 @@ protected ExprTupleValue buildResult(Iterator inputRowIter, /** * get ml-commons train and predict result. + * * @param functionName ml-commons algorithm name * @param mlAlgoParams ml-commons algorithm parameters * @param inputDataFrame input data frame * @param nodeClient node client * @return ml-commons train and predict result */ - protected MLPredictionOutput getMLPredictionResult(FunctionName functionName, - MLAlgoParams mlAlgoParams, - DataFrame inputDataFrame, - NodeClient nodeClient) { - MLInput mlinput = MLInput.builder() + protected MLPredictionOutput getMLPredictionResult( + FunctionName functionName, + MLAlgoParams mlAlgoParams, + DataFrame inputDataFrame, + NodeClient nodeClient) { + MLInput mlinput = + MLInput.builder() .algorithm(functionName) .parameters(mlAlgoParams) .inputDataset(new DataFrameInputDataset(inputDataFrame)) .build(); - MachineLearningNodeClient machineLearningClient = - MLClient.getMLClient(nodeClient); + MachineLearningNodeClient machineLearningClient = MLClient.getMLClient(nodeClient); - return (MLPredictionOutput) machineLearningClient - .trainAndPredict(mlinput) - .actionGet(30, TimeUnit.SECONDS); + return (MLPredictionOutput) + machineLearningClient.trainAndPredict(mlinput).actionGet(30, TimeUnit.SECONDS); } /** * get ml-commons train, predict and trainandpredict result. + * * @param inputDataFrame input data frame * @param arguments ml parameters * @param nodeClient node client * @return ml-commons result */ - protected MLOutput getMLOutput(DataFrame inputDataFrame, - Map arguments, - NodeClient nodeClient) { - MLInput mlinput = MLInput.builder() + protected MLOutput getMLOutput( + DataFrame inputDataFrame, Map arguments, NodeClient nodeClient) { + MLInput mlinput = + MLInput.builder() .inputDataset(new DataFrameInputDataset(inputDataFrame)) - //Just the placeholders for algorithm and parameters which must be initialized. - //They will be overridden in ml client. + // Just the placeholders for algorithm and parameters which must be initialized. + // They will be overridden in ml client. .algorithm(FunctionName.SAMPLE_ALGO) .parameters(new SampleAlgoParams(0)) .build(); - MachineLearningNodeClient machineLearningClient = - MLClient.getMLClient(nodeClient); + MachineLearningNodeClient machineLearningClient = MLClient.getMLClient(nodeClient); - return machineLearningClient - .run(mlinput, arguments) - .actionGet(30, TimeUnit.SECONDS); + return machineLearningClient.run(mlinput, arguments).actionGet(30, TimeUnit.SECONDS); } /** * iterate result and built it into ExprTupleValue. + * * @param inputRowIter input row iterator * @param inputDataFrame input data frame * @param mlResult train/predict result * @param resultRowIter predict result iterator * @return result in ExprTupleValue format */ - protected ExprTupleValue buildPPLResult(boolean isPredict, - Iterator inputRowIter, - DataFrame inputDataFrame, - MLOutput mlResult, - Iterator resultRowIter) { + protected ExprTupleValue buildPPLResult( + boolean isPredict, + Iterator inputRowIter, + DataFrame inputDataFrame, + MLOutput mlResult, + Iterator resultRowIter) { if (isPredict) { - return buildResult(inputRowIter, - inputDataFrame, - (MLPredictionOutput) mlResult, - resultRowIter); + return buildResult( + inputRowIter, inputDataFrame, (MLPredictionOutput) mlResult, resultRowIter); } else { return buildTrainResult((MLTrainingOutput) mlResult); } @@ -284,18 +289,21 @@ protected ExprTupleValue buildTrainResult(MLTrainingOutput trainResult) { private static class MLInputRows extends LinkedList> { /** * Add tuple value to input map, skip if any value is null. + * * @param tupleValue a row in input data. */ public void addTupleValue(Map tupleValue) { if (tupleValue.values().stream().anyMatch(e -> e.isNull() || e.isMissing())) { return; } - this.add(tupleValue.entrySet().stream() - .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().value()))); + this.add( + tupleValue.entrySet().stream() + .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().value()))); } /** * Convert to DataFrame. + * * @return DataFrame */ public DataFrame toDataFrame() { @@ -304,15 +312,19 @@ public DataFrame toDataFrame() { /** * Filter each row and convert to DataFrame. + * * @param filter used to filter fields in each row * @return DataFrame */ public DataFrame toFilteredDataFrame(Predicate> filter) { - return DataFrameBuilder.load(this.stream().map( - row -> row.entrySet().stream().filter(filter) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))) - .collect(Collectors.toList())); + return DataFrameBuilder.load( + this.stream() + .map( + row -> + row.entrySet().stream() + .filter(filter) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))) + .collect(Collectors.toList())); } } - } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/planner/physical/MLOperator.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/planner/physical/MLOperator.java index 36834bc23a..6dc7078a0d 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/planner/physical/MLOperator.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/planner/physical/MLOperator.java @@ -25,23 +25,19 @@ import org.opensearch.sql.planner.physical.PhysicalPlanNodeVisitor; /** - * ml-commons Physical operator to call machine learning interface to get results for - * algorithm execution. + * ml-commons Physical operator to call machine learning interface to get results for algorithm + * execution. */ @RequiredArgsConstructor @EqualsAndHashCode(callSuper = false) public class MLOperator extends MLCommonsOperatorActions { - @Getter - private final PhysicalPlan input; + @Getter private final PhysicalPlan input; - @Getter - private final Map arguments; + @Getter private final Map arguments; - @Getter - private final NodeClient nodeClient; + @Getter private final NodeClient nodeClient; - @EqualsAndHashCode.Exclude - private Iterator iterator; + @EqualsAndHashCode.Exclude private Iterator iterator; @Override public void open() { @@ -53,34 +49,36 @@ public void open() { final Iterator inputRowIter = inputDataFrame.iterator(); // Only need to check train here, as action should be already checked in ml client. final boolean isPrediction = ((String) args.get("action")).equals("train") ? false : true; - //For train, only one row to return. - final Iterator trainIter = new ArrayList() { - { - add("train"); - } - }.iterator(); - final Iterator resultRowIter = isPrediction - ? ((MLPredictionOutput) mlOutput).getPredictionResult().iterator() - : null; - iterator = new Iterator() { - @Override - public boolean hasNext() { - if (isPrediction) { - return inputRowIter.hasNext(); - } else { - boolean res = trainIter.hasNext(); - if (res) { - trainIter.next(); + // For train, only one row to return. + final Iterator trainIter = + new ArrayList() { + { + add("train"); + } + }.iterator(); + final Iterator resultRowIter = + isPrediction ? ((MLPredictionOutput) mlOutput).getPredictionResult().iterator() : null; + iterator = + new Iterator() { + @Override + public boolean hasNext() { + if (isPrediction) { + return inputRowIter.hasNext(); + } else { + boolean res = trainIter.hasNext(); + if (res) { + trainIter.next(); + } + return res; + } } - return res; - } - } - @Override - public ExprValue next() { - return buildPPLResult(isPrediction, inputRowIter, inputDataFrame, mlOutput, resultRowIter); - } - }; + @Override + public ExprValue next() { + return buildPPLResult( + isPrediction, inputRowIter, inputDataFrame, mlOutput, resultRowIter); + } + }; } @Override diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequest.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequest.java index 919596eee2..6cf7fe49c2 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequest.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.request; import java.io.IOException; @@ -33,49 +32,31 @@ @ToString public class OpenSearchQueryRequest implements OpenSearchRequest { - /** - * {@link OpenSearchRequest.IndexName}. - */ + /** {@link OpenSearchRequest.IndexName}. */ private final IndexName indexName; - /** - * Search request source builder. - */ + /** Search request source builder. */ private final SearchSourceBuilder sourceBuilder; - /** - * OpenSearchExprValueFactory. - */ - @EqualsAndHashCode.Exclude - @ToString.Exclude + /** OpenSearchExprValueFactory. */ + @EqualsAndHashCode.Exclude @ToString.Exclude private final OpenSearchExprValueFactory exprValueFactory; + /** List of includes expected in the response. */ + @EqualsAndHashCode.Exclude @ToString.Exclude private final List includes; - /** - * List of includes expected in the response. - */ - @EqualsAndHashCode.Exclude - @ToString.Exclude - private final List includes; - - /** - * Indicate the search already done. - */ + /** Indicate the search already done. */ private boolean searchDone = false; - /** - * Constructor of OpenSearchQueryRequest. - */ - public OpenSearchQueryRequest(String indexName, int size, - OpenSearchExprValueFactory factory, List includes) { + /** Constructor of OpenSearchQueryRequest. */ + public OpenSearchQueryRequest( + String indexName, int size, OpenSearchExprValueFactory factory, List includes) { this(new IndexName(indexName), size, factory, includes); } - /** - * Constructor of OpenSearchQueryRequest. - */ - public OpenSearchQueryRequest(IndexName indexName, int size, - OpenSearchExprValueFactory factory, List includes) { + /** Constructor of OpenSearchQueryRequest. */ + public OpenSearchQueryRequest( + IndexName indexName, int size, OpenSearchExprValueFactory factory, List includes) { this.indexName = indexName; this.sourceBuilder = new SearchSourceBuilder(); sourceBuilder.from(0); @@ -85,11 +66,12 @@ public OpenSearchQueryRequest(IndexName indexName, int size, this.includes = includes; } - /** - * Constructor of OpenSearchQueryRequest. - */ - public OpenSearchQueryRequest(IndexName indexName, SearchSourceBuilder sourceBuilder, - OpenSearchExprValueFactory factory, List includes) { + /** Constructor of OpenSearchQueryRequest. */ + public OpenSearchQueryRequest( + IndexName indexName, + SearchSourceBuilder sourceBuilder, + OpenSearchExprValueFactory factory, + List includes) { this.indexName = indexName; this.sourceBuilder = sourceBuilder; this.exprValueFactory = factory; @@ -97,22 +79,24 @@ public OpenSearchQueryRequest(IndexName indexName, SearchSourceBuilder sourceBui } @Override - public OpenSearchResponse search(Function searchAction, - Function scrollAction) { + public OpenSearchResponse search( + Function searchAction, + Function scrollAction) { if (searchDone) { return new OpenSearchResponse(SearchHits.empty(), exprValueFactory, includes); } else { searchDone = true; return new OpenSearchResponse( - searchAction.apply(new SearchRequest() - .indices(indexName.getIndexNames()) - .source(sourceBuilder)), exprValueFactory, includes); + searchAction.apply( + new SearchRequest().indices(indexName.getIndexNames()).source(sourceBuilder)), + exprValueFactory, + includes); } } @Override public void clean(Consumer cleanAction) { - //do nothing. + // do nothing. } @Override @@ -122,7 +106,7 @@ public boolean hasAnotherBatch() { @Override public void writeTo(StreamOutput out) throws IOException { - throw new UnsupportedOperationException("OpenSearchQueryRequest serialization " - + "is not implemented."); + throw new UnsupportedOperationException( + "OpenSearchQueryRequest serialization " + "is not implemented."); } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequest.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequest.java index 5c9d0033c1..f775d55296 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequest.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.request; import java.io.IOException; @@ -20,14 +19,10 @@ import org.opensearch.sql.opensearch.data.value.OpenSearchExprValueFactory; import org.opensearch.sql.opensearch.response.OpenSearchResponse; -/** - * OpenSearch search request. - */ +/** OpenSearch search request. */ public interface OpenSearchRequest extends Writeable { - /** - * Default query timeout in minutes. - */ + /** Default query timeout in minutes. */ TimeValue DEFAULT_QUERY_TIMEOUT = TimeValue.timeValueMinutes(1L); /** @@ -37,8 +32,9 @@ public interface OpenSearchRequest extends Writeable { * @param scrollAction scroll search action. * @return OpenSearchResponse. */ - OpenSearchResponse search(Function searchAction, - Function scrollAction); + OpenSearchResponse search( + Function searchAction, + Function scrollAction); /** * Apply the cleanAction on request. @@ -49,21 +45,20 @@ OpenSearchResponse search(Function searchAction, /** * Get the OpenSearchExprValueFactory. + * * @return OpenSearchExprValueFactory. */ OpenSearchExprValueFactory getExprValueFactory(); /** * Check if there is more data to get from OpenSearch. - * @return True if calling {@ref OpenSearchClient.search} with this request will - * return non-empty response. + * + * @return True if calling {@ref OpenSearchClient.search} with this request will return non-empty + * response. */ boolean hasAnotherBatch(); - /** - * OpenSearch Index Name. - * Indices are separated by ",". - */ + /** OpenSearch Index Name. Indices are separated by ",". */ @EqualsAndHashCode class IndexName implements Writeable { private static final String COMMA = ","; diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilder.java index 80259f15d3..1df3dcb183 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.request; import static java.util.stream.Collectors.mapping; @@ -47,47 +46,36 @@ import org.opensearch.sql.opensearch.data.value.OpenSearchExprValueFactory; import org.opensearch.sql.opensearch.response.agg.OpenSearchAggregationResponseParser; -/** - * OpenSearch search request builder. - */ +/** OpenSearch search request builder. */ @EqualsAndHashCode @Getter @ToString public class OpenSearchRequestBuilder { - /** - * Search request source builder. - */ + /** Search request source builder. */ private final SearchSourceBuilder sourceBuilder; - /** - * Query size of the request -- how many rows will be returned. - */ + /** Query size of the request -- how many rows will be returned. */ private int requestedTotalSize; - /** - * Size of each page request to return. - */ + /** Size of each page request to return. */ private Integer pageSize = null; - /** - * OpenSearchExprValueFactory. - */ - @EqualsAndHashCode.Exclude - @ToString.Exclude + /** OpenSearchExprValueFactory. */ + @EqualsAndHashCode.Exclude @ToString.Exclude private final OpenSearchExprValueFactory exprValueFactory; + private int startFrom = 0; - /** - * Constructor. - */ - public OpenSearchRequestBuilder(int requestedTotalSize, - OpenSearchExprValueFactory exprValueFactory) { + /** Constructor. */ + public OpenSearchRequestBuilder( + int requestedTotalSize, OpenSearchExprValueFactory exprValueFactory) { this.requestedTotalSize = requestedTotalSize; - this.sourceBuilder = new SearchSourceBuilder() - .from(startFrom) - .timeout(OpenSearchRequest.DEFAULT_QUERY_TIMEOUT) - .trackScores(false); + this.sourceBuilder = + new SearchSourceBuilder() + .from(startFrom) + .timeout(OpenSearchRequest.DEFAULT_QUERY_TIMEOUT) + .trackScores(false); this.exprValueFactory = exprValueFactory; } @@ -96,13 +84,11 @@ public OpenSearchRequestBuilder(int requestedTotalSize, * * @return query request or scroll request */ - public OpenSearchRequest build(OpenSearchRequest.IndexName indexName, - int maxResultWindow, TimeValue scrollTimeout) { + public OpenSearchRequest build( + OpenSearchRequest.IndexName indexName, int maxResultWindow, TimeValue scrollTimeout) { int size = requestedTotalSize; FetchSourceContext fetchSource = this.sourceBuilder.fetchSource(); - List includes = fetchSource != null - ? Arrays.asList(fetchSource.includes()) - : List.of(); + List includes = fetchSource != null ? Arrays.asList(fetchSource.includes()) : List.of(); if (pageSize == null) { if (startFrom + size > maxResultWindow) { sourceBuilder.size(maxResultWindow - startFrom); @@ -118,12 +104,11 @@ public OpenSearchRequest build(OpenSearchRequest.IndexName indexName, throw new UnsupportedOperationException("Non-zero offset is not supported with pagination"); } sourceBuilder.size(pageSize); - return new OpenSearchScrollRequest(indexName, scrollTimeout, - sourceBuilder, exprValueFactory, includes); + return new OpenSearchScrollRequest( + indexName, scrollTimeout, sourceBuilder, exprValueFactory, includes); } } - boolean isBoolFilterQuery(QueryBuilder current) { return (current instanceof BoolQueryBuilder); } @@ -131,7 +116,7 @@ boolean isBoolFilterQuery(QueryBuilder current) { /** * Push down query to DSL request. * - * @param query query request + * @param query query request */ public void pushDownFilter(QueryBuilder query) { QueryBuilder current = sourceBuilder.query(); @@ -142,9 +127,7 @@ public void pushDownFilter(QueryBuilder query) { if (isBoolFilterQuery(current)) { ((BoolQueryBuilder) current).filter(query); } else { - sourceBuilder.query(QueryBuilders.boolQuery() - .filter(current) - .filter(query)); + sourceBuilder.query(QueryBuilders.boolQuery().filter(current).filter(query)); } } @@ -181,9 +164,7 @@ public void pushDownSort(List> sortBuilders) { } } - /** - * Pushdown size (limit) and from (offset) to DSL request. - */ + /** Pushdown size (limit) and from (offset) to DSL request. */ public void pushDownLimit(Integer limit, Integer offset) { requestedTotalSize = limit; startFrom = offset; @@ -200,6 +181,7 @@ public void pushDownPageSize(int pageSize) { /** * Add highlight to DSL requests. + * * @param field name of the field to highlight */ public void pushDownHighlight(String field, Map arguments) { @@ -208,32 +190,34 @@ public void pushDownHighlight(String field, Map arguments) { // OS does not allow duplicates of highlight fields if (sourceBuilder.highlighter().fields().stream() .anyMatch(f -> f.name().equals(unquotedField))) { - throw new SemanticCheckException(String.format( - "Duplicate field %s in highlight", field)); + throw new SemanticCheckException(String.format("Duplicate field %s in highlight", field)); } sourceBuilder.highlighter().field(unquotedField); } else { - HighlightBuilder highlightBuilder = - new HighlightBuilder().field(unquotedField); + HighlightBuilder highlightBuilder = new HighlightBuilder().field(unquotedField); sourceBuilder.highlighter(highlightBuilder); } // lastFieldIndex denotes previously set highlighter with field parameter int lastFieldIndex = sourceBuilder.highlighter().fields().size() - 1; if (arguments.containsKey("pre_tags")) { - sourceBuilder.highlighter().fields().get(lastFieldIndex) + sourceBuilder + .highlighter() + .fields() + .get(lastFieldIndex) .preTags(arguments.get("pre_tags").toString()); } if (arguments.containsKey("post_tags")) { - sourceBuilder.highlighter().fields().get(lastFieldIndex) + sourceBuilder + .highlighter() + .fields() + .get(lastFieldIndex) .postTags(arguments.get("post_tags").toString()); } } - /** - * Push down project list to DSL requests. - */ + /** Push down project list to DSL requests. */ public void pushDownProjects(Set projects) { sourceBuilder.fetchSource( projects.stream().map(ReferenceExpression::getAttr).distinct().toArray(String[]::new), @@ -254,21 +238,22 @@ private boolean isSortByDocOnly() { /** * Push down nested to sourceBuilder. + * * @param nestedArgs : Nested arguments to push down. */ public void pushDownNested(List> nestedArgs) { initBoolQueryFilter(); List nestedQueries = extractNestedQueries(query()); - groupFieldNamesByPath(nestedArgs).forEach( - (path, fieldNames) -> - buildInnerHit(fieldNames, findNestedQueryWithSamePath(nestedQueries, path)) - ); + groupFieldNamesByPath(nestedArgs) + .forEach( + (path, fieldNames) -> + buildInnerHit(fieldNames, findNestedQueryWithSamePath(nestedQueries, path))); } /** - * InnerHit must be added to the NestedQueryBuilder. We need to extract - * the nested queries currently in the query if there is already a filter - * push down with nested query. + * InnerHit must be added to the NestedQueryBuilder. We need to extract the nested queries + * currently in the query if there is already a filter push down with nested query. + * * @param query : current query. * @return : grouped nested queries currently in query. */ @@ -289,9 +274,7 @@ public int getMaxResponseSize() { return pageSize == null ? requestedTotalSize : pageSize; } - /** - * Initialize bool query for push down. - */ + /** Initialize bool query for push down. */ private void initBoolQueryFilter() { if (sourceBuilder.query() == null) { sourceBuilder.query(QueryBuilders.boolQuery()); @@ -304,44 +287,42 @@ private void initBoolQueryFilter() { /** * Map all field names in nested queries that use same path. + * * @param fields : Fields for nested queries. * @return : Map of path and associated field names. */ private Map> groupFieldNamesByPath( List> fields) { // TODO filter out reverse nested when supported - .filter(not(isReverseNested())) - return fields.stream().collect( - Collectors.groupingBy( - m -> m.get("path").toString(), - mapping( - m -> m.get("field").toString(), - toList() - ) - ) - ); + return fields.stream() + .collect( + Collectors.groupingBy( + m -> m.get("path").toString(), mapping(m -> m.get("field").toString(), toList()))); } /** * Build inner hits portion to nested query. + * * @param paths : Set of all paths used in nested queries. * @param query : Current pushDown query. */ private void buildInnerHit(List paths, NestedQueryBuilder query) { - query.innerHit(new InnerHitBuilder().setFetchSourceContext( - new FetchSourceContext(true, paths.toArray(new String[0]), null) - )); + query.innerHit( + new InnerHitBuilder() + .setFetchSourceContext( + new FetchSourceContext(true, paths.toArray(new String[0]), null))); } /** - * We need to group nested queries with same path for adding new fields with same path of - * inner hits. If we try to add additional inner hits with same path we get an OS error. + * We need to group nested queries with same path for adding new fields with same path of inner + * hits. If we try to add additional inner hits with same path we get an OS error. + * * @param nestedQueries Current list of nested queries in query. * @param path path comparing with current nested queries. * @return Query with same path or new empty nested query. */ private NestedQueryBuilder findNestedQueryWithSamePath( - List nestedQueries, String path - ) { + List nestedQueries, String path) { return nestedQueries.stream() .filter(query -> isSamePath(path, query)) .findAny() @@ -350,6 +331,7 @@ private NestedQueryBuilder findNestedQueryWithSamePath( /** * Check if is nested query is of the same path value. + * * @param path Value of path to compare with nested query. * @param query nested query builder to compare with path. * @return true if nested query has same path. @@ -358,9 +340,7 @@ private boolean isSamePath(String path, NestedQueryBuilder query) { return nestedQuery(path, query.query(), query.scoreMode()).equals(query); } - /** - * Create a nested query with match all filter to place inner hits. - */ + /** Create a nested query with match all filter to place inner hits. */ private Supplier createEmptyNestedQuery(String path) { return () -> { NestedQueryBuilder nestedQuery = nestedQuery(path, matchAllQuery(), ScoreMode.None); @@ -371,6 +351,7 @@ private Supplier createEmptyNestedQuery(String path) { /** * Return current query. + * * @return : Current source builder query. */ private BoolQueryBuilder query() { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequest.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequest.java index 34e8fcd096..c9490f0767 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequest.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.request; import java.io.IOException; @@ -41,62 +40,56 @@ public class OpenSearchScrollRequest implements OpenSearchRequest { /** * Search request used to initiate paged (scrolled) search. Not needed to get subsequent pages. */ - @EqualsAndHashCode.Exclude - private final transient SearchRequest initialSearchRequest; + @EqualsAndHashCode.Exclude private final transient SearchRequest initialSearchRequest; + /** Scroll context timeout. */ private final TimeValue scrollTimeout; - /** - * {@link OpenSearchRequest.IndexName}. - */ + /** {@link OpenSearchRequest.IndexName}. */ private final IndexName indexName; /** Index name. */ - @EqualsAndHashCode.Exclude - @ToString.Exclude + @EqualsAndHashCode.Exclude @ToString.Exclude private final OpenSearchExprValueFactory exprValueFactory; /** * Scroll id which is set after first request issued. Because OpenSearchClient is shared by * multiple threads so this state has to be maintained here. */ - @Setter - @Getter - private String scrollId = NO_SCROLL_ID; + @Setter @Getter private String scrollId = NO_SCROLL_ID; public static final String NO_SCROLL_ID = ""; - @EqualsAndHashCode.Exclude - private boolean needClean = true; + @EqualsAndHashCode.Exclude private boolean needClean = true; - @Getter - @EqualsAndHashCode.Exclude - @ToString.Exclude - private final List includes; + @Getter @EqualsAndHashCode.Exclude @ToString.Exclude private final List includes; /** Constructor. */ - public OpenSearchScrollRequest(IndexName indexName, - TimeValue scrollTimeout, - SearchSourceBuilder sourceBuilder, - OpenSearchExprValueFactory exprValueFactory, - List includes) { + public OpenSearchScrollRequest( + IndexName indexName, + TimeValue scrollTimeout, + SearchSourceBuilder sourceBuilder, + OpenSearchExprValueFactory exprValueFactory, + List includes) { this.indexName = indexName; this.scrollTimeout = scrollTimeout; this.exprValueFactory = exprValueFactory; - this.initialSearchRequest = new SearchRequest() - .indices(indexName.getIndexNames()) - .scroll(scrollTimeout) - .source(sourceBuilder); + this.initialSearchRequest = + new SearchRequest() + .indices(indexName.getIndexNames()) + .scroll(scrollTimeout) + .source(sourceBuilder); this.includes = includes; } - - /** Executes request using either {@param searchAction} or {@param scrollAction} as appropriate. + /** + * Executes request using either {@param searchAction} or {@param scrollAction} as appropriate. */ @Override - public OpenSearchResponse search(Function searchAction, - Function scrollAction) { + public OpenSearchResponse search( + Function searchAction, + Function scrollAction) { SearchResponse openSearchResponse; if (isScroll()) { openSearchResponse = scrollAction.apply(scrollRequest()); @@ -172,6 +165,7 @@ public void writeTo(StreamOutput out) throws IOException { /** * Constructs OpenSearchScrollRequest from serialized representation. + * * @param in stream to read data from. * @param engine OpenSearchSqlEngine to get node-specific context. * @throws IOException thrown if reading from input {@code in} fails. diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/system/OpenSearchCatIndicesRequest.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/system/OpenSearchCatIndicesRequest.java index 6e85dc00cc..e7685394f4 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/system/OpenSearchCatIndicesRequest.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/system/OpenSearchCatIndicesRequest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.request.system; import static org.opensearch.sql.data.model.ExprValueUtils.stringValue; @@ -18,9 +17,7 @@ import org.opensearch.sql.data.model.ExprValue; import org.opensearch.sql.opensearch.client.OpenSearchClient; -/** - * Cat indices request. - */ +/** Cat indices request. */ @RequiredArgsConstructor public class OpenSearchCatIndicesRequest implements OpenSearchSystemRequest { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/system/OpenSearchDescribeIndexRequest.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/system/OpenSearchDescribeIndexRequest.java index f4fd7b98d3..c4ac270547 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/system/OpenSearchDescribeIndexRequest.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/system/OpenSearchDescribeIndexRequest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.request.system; import static org.opensearch.sql.data.model.ExprValueUtils.integerValue; @@ -23,9 +22,7 @@ import org.opensearch.sql.opensearch.mapping.IndexMapping; import org.opensearch.sql.opensearch.request.OpenSearchRequest; -/** - * Describe index meta data request. - */ +/** Describe index meta data request. */ public class OpenSearchDescribeIndexRequest implements OpenSearchSystemRequest { private static final String DEFAULT_TABLE_CAT = "opensearch"; @@ -36,22 +33,18 @@ public class OpenSearchDescribeIndexRequest implements OpenSearchSystemRequest { private static final String DEFAULT_IS_AUTOINCREMENT = "NO"; - /** - * OpenSearch client connection. - */ + /** OpenSearch client connection. */ private final OpenSearchClient client; - /** - * {@link OpenSearchRequest.IndexName}. - */ + /** {@link OpenSearchRequest.IndexName}. */ private final OpenSearchRequest.IndexName indexName; public OpenSearchDescribeIndexRequest(OpenSearchClient client, String indexName) { this(client, new OpenSearchRequest.IndexName(indexName)); } - public OpenSearchDescribeIndexRequest(OpenSearchClient client, - OpenSearchRequest.IndexName indexName) { + public OpenSearchDescribeIndexRequest( + OpenSearchClient client, OpenSearchRequest.IndexName indexName) { this.client = client; this.indexName = indexName; } @@ -66,10 +59,13 @@ public List search() { List results = new ArrayList<>(); Map meta = client.meta(); int pos = 0; - for (Map.Entry entry - : OpenSearchDataType.traverseAndFlatten(getFieldTypes()).entrySet()) { + for (Map.Entry entry : + OpenSearchDataType.traverseAndFlatten(getFieldTypes()).entrySet()) { results.add( - row(entry.getKey(), entry.getValue().legacyTypeName().toLowerCase(), pos++, + row( + entry.getKey(), + entry.getValue().legacyTypeName().toLowerCase(), + pos++, clusterName(meta))); } return results; @@ -97,8 +93,12 @@ public Map getFieldTypes() { * @return max result window */ public Integer getMaxResultWindow() { - return client.getIndexMaxResultWindows(getLocalIndexNames(indexName.getIndexNames())) - .values().stream().min(Integer::compare).get(); + return client + .getIndexMaxResultWindows(getLocalIndexNames(indexName.getIndexNames())) + .values() + .stream() + .min(Integer::compare) + .get(); } private ExprTupleValue row(String fieldName, String fieldType, int position, String clusterName) { @@ -122,8 +122,8 @@ private ExprTupleValue row(String fieldName, String fieldType, int position, Str } /** - * Return index names without "{cluster}:" prefix. - * Without the prefix, they refer to the indices at the local cluster. + * Return index names without "{cluster}:" prefix. Without the prefix, they refer to the indices + * at the local cluster. * * @param indexNames a string array of index names * @return local cluster index names @@ -140,8 +140,6 @@ private String clusterName(Map meta) { @Override public String toString() { - return "OpenSearchDescribeIndexRequest{" - + "indexName='" + indexName + '\'' - + '}'; + return "OpenSearchDescribeIndexRequest{" + "indexName='" + indexName + '\'' + '}'; } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/system/OpenSearchSystemRequest.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/system/OpenSearchSystemRequest.java index a2fbf79624..2969c7639b 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/system/OpenSearchSystemRequest.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/system/OpenSearchSystemRequest.java @@ -3,15 +3,12 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.request.system; import java.util.List; import org.opensearch.sql.data.model.ExprValue; -/** - * OpenSearch system request query against the system index. - */ +/** OpenSearch system request query against the system index. */ public interface OpenSearchSystemRequest { /** diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/OpenSearchResponse.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/OpenSearchResponse.java index 03abfbf6c1..e43777a740 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/OpenSearchResponse.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/OpenSearchResponse.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.response; import static org.opensearch.sql.opensearch.storage.OpenSearchIndex.METADATAFIELD_TYPE_MAP; @@ -35,52 +34,37 @@ import org.opensearch.sql.data.model.ExprValueUtils; import org.opensearch.sql.opensearch.data.value.OpenSearchExprValueFactory; -/** - * OpenSearch search response. - */ +/** OpenSearch search response. */ @EqualsAndHashCode @ToString public class OpenSearchResponse implements Iterable { - /** - * Search query result (non-aggregation). - */ + /** Search query result (non-aggregation). */ private final SearchHits hits; - /** - * Search aggregation result. - */ + /** Search aggregation result. */ private final Aggregations aggregations; - /** - * List of requested include fields. - */ + /** List of requested include fields. */ private final List includes; - /** - * OpenSearchExprValueFactory used to build ExprValue from search result. - */ - @EqualsAndHashCode.Exclude - private final OpenSearchExprValueFactory exprValueFactory; + /** OpenSearchExprValueFactory used to build ExprValue from search result. */ + @EqualsAndHashCode.Exclude private final OpenSearchExprValueFactory exprValueFactory; - /** - * Constructor of OpenSearchResponse. - */ - public OpenSearchResponse(SearchResponse searchResponse, - OpenSearchExprValueFactory exprValueFactory, - List includes) { + /** Constructor of OpenSearchResponse. */ + public OpenSearchResponse( + SearchResponse searchResponse, + OpenSearchExprValueFactory exprValueFactory, + List includes) { this.hits = searchResponse.getHits(); this.aggregations = searchResponse.getAggregations(); this.exprValueFactory = exprValueFactory; this.includes = includes; } - /** - * Constructor of OpenSearchResponse with SearchHits. - */ - public OpenSearchResponse(SearchHits hits, - OpenSearchExprValueFactory exprValueFactory, - List includes) { + /** Constructor of OpenSearchResponse with SearchHits. */ + public OpenSearchResponse( + SearchHits hits, OpenSearchExprValueFactory exprValueFactory, List includes) { this.hits = hits; this.aggregations = null; this.exprValueFactory = exprValueFactory; @@ -111,48 +95,52 @@ public Iterator iterator() { return handleAggregationResponse(); } else { return Arrays.stream(hits.getHits()) - .map(hit -> { - ImmutableMap.Builder builder = new ImmutableMap.Builder<>(); - addParsedHitsToBuilder(builder, hit); - addMetaDataFieldsToBuilder(builder, hit); - addHighlightsToBuilder(builder, hit); - return (ExprValue) ExprTupleValue.fromExprValueMap(builder.build()); - }).iterator(); + .map( + hit -> { + ImmutableMap.Builder builder = new ImmutableMap.Builder<>(); + addParsedHitsToBuilder(builder, hit); + addMetaDataFieldsToBuilder(builder, hit); + addHighlightsToBuilder(builder, hit); + return (ExprValue) ExprTupleValue.fromExprValueMap(builder.build()); + }) + .iterator(); } } /** - * Parse response for all hits to add to builder. Inner_hits supports arrays of objects - * with nested type. + * Parse response for all hits to add to builder. Inner_hits supports arrays of objects with + * nested type. + * * @param builder builder to build values from response. * @param hit Search hit from response. */ private void addParsedHitsToBuilder( - ImmutableMap.Builder builder, - SearchHit hit - ) { + ImmutableMap.Builder builder, SearchHit hit) { builder.putAll( - exprValueFactory.construct( - hit.getSourceAsString(), - !(hit.getInnerHits() == null || hit.getInnerHits().isEmpty()) - ).tupleValue()); + exprValueFactory + .construct( + hit.getSourceAsString(), + !(hit.getInnerHits() == null || hit.getInnerHits().isEmpty())) + .tupleValue()); } /** * If highlight fields are present in response add the fields to the builder. + * * @param builder builder to build values from response. * @param hit Search hit from response. */ private void addHighlightsToBuilder( - ImmutableMap.Builder builder, - SearchHit hit - ) { + ImmutableMap.Builder builder, SearchHit hit) { if (!hit.getHighlightFields().isEmpty()) { var hlBuilder = ImmutableMap.builder(); for (var es : hit.getHighlightFields().entrySet()) { - hlBuilder.put(es.getKey(), ExprValueUtils.collectionValue( - Arrays.stream(es.getValue().fragments()).map( - Text::toString).collect(Collectors.toList()))); + hlBuilder.put( + es.getKey(), + ExprValueUtils.collectionValue( + Arrays.stream(es.getValue().fragments()) + .map(Text::toString) + .collect(Collectors.toList()))); } builder.put("_highlight", ExprTupleValue.fromExprValueMap(hlBuilder.build())); } @@ -160,58 +148,56 @@ private void addHighlightsToBuilder( /** * Add metadata fields to builder from response. + * * @param builder builder to build values from response. * @param hit Search hit from response. */ private void addMetaDataFieldsToBuilder( - ImmutableMap.Builder builder, - SearchHit hit - ) { - List metaDataFieldSet = includes.stream() - .filter(METADATAFIELD_TYPE_MAP::containsKey) - .collect(Collectors.toList()); - ExprFloatValue maxScore = Float.isNaN(hits.getMaxScore()) - ? null : new ExprFloatValue(hits.getMaxScore()); - - metaDataFieldSet.forEach(metaDataField -> { - if (metaDataField.equals(METADATA_FIELD_INDEX)) { - builder.put(METADATA_FIELD_INDEX, new ExprStringValue(hit.getIndex())); - } else if (metaDataField.equals(METADATA_FIELD_ID)) { - builder.put(METADATA_FIELD_ID, new ExprStringValue(hit.getId())); - } else if (metaDataField.equals(METADATA_FIELD_SCORE)) { - if (!Float.isNaN(hit.getScore())) { - builder.put(METADATA_FIELD_SCORE, new ExprFloatValue(hit.getScore())); - } - } else if (metaDataField.equals(METADATA_FIELD_MAXSCORE)) { - if (maxScore != null) { - builder.put(METADATA_FIELD_MAXSCORE, maxScore); - } - } else if (metaDataField.equals(METADATA_FIELD_SORT)) { - builder.put(METADATA_FIELD_SORT, new ExprLongValue(hit.getSeqNo())); - } else { // if (metaDataField.equals(METADATA_FIELD_ROUTING)){ - builder.put(METADATA_FIELD_ROUTING, new ExprStringValue(hit.getShard().toString())); - } - }); + ImmutableMap.Builder builder, SearchHit hit) { + List metaDataFieldSet = + includes.stream().filter(METADATAFIELD_TYPE_MAP::containsKey).collect(Collectors.toList()); + ExprFloatValue maxScore = + Float.isNaN(hits.getMaxScore()) ? null : new ExprFloatValue(hits.getMaxScore()); + + metaDataFieldSet.forEach( + metaDataField -> { + if (metaDataField.equals(METADATA_FIELD_INDEX)) { + builder.put(METADATA_FIELD_INDEX, new ExprStringValue(hit.getIndex())); + } else if (metaDataField.equals(METADATA_FIELD_ID)) { + builder.put(METADATA_FIELD_ID, new ExprStringValue(hit.getId())); + } else if (metaDataField.equals(METADATA_FIELD_SCORE)) { + if (!Float.isNaN(hit.getScore())) { + builder.put(METADATA_FIELD_SCORE, new ExprFloatValue(hit.getScore())); + } + } else if (metaDataField.equals(METADATA_FIELD_MAXSCORE)) { + if (maxScore != null) { + builder.put(METADATA_FIELD_MAXSCORE, maxScore); + } + } else if (metaDataField.equals(METADATA_FIELD_SORT)) { + builder.put(METADATA_FIELD_SORT, new ExprLongValue(hit.getSeqNo())); + } else { // if (metaDataField.equals(METADATA_FIELD_ROUTING)){ + builder.put(METADATA_FIELD_ROUTING, new ExprStringValue(hit.getShard().toString())); + } + }); } /** * Handle an aggregation response. + * * @return Parsed and built return values from response. */ private Iterator handleAggregationResponse() { - return exprValueFactory.getParser().parse(aggregations).stream().map(entry -> { - ImmutableMap.Builder builder = new ImmutableMap.Builder<>(); - for (Map.Entry value : entry.entrySet()) { - builder.put( - value.getKey(), - exprValueFactory.construct( - value.getKey(), - value.getValue(), - false - ) - ); - } - return (ExprValue) ExprTupleValue.fromExprValueMap(builder.build()); - }).iterator(); + return exprValueFactory.getParser().parse(aggregations).stream() + .map( + entry -> { + ImmutableMap.Builder builder = new ImmutableMap.Builder<>(); + for (Map.Entry value : entry.entrySet()) { + builder.put( + value.getKey(), + exprValueFactory.construct(value.getKey(), value.getValue(), false)); + } + return (ExprValue) ExprTupleValue.fromExprValueMap(builder.build()); + }) + .iterator(); } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/CompositeAggregationParser.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/CompositeAggregationParser.java index 7459300caa..581f708f22 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/CompositeAggregationParser.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/CompositeAggregationParser.java @@ -22,9 +22,7 @@ import org.opensearch.search.aggregations.Aggregations; import org.opensearch.search.aggregations.bucket.composite.CompositeAggregation; -/** - * Composite Aggregation Parser which include composite aggregation and metric parsers. - */ +/** Composite Aggregation Parser which include composite aggregation and metric parsers. */ @EqualsAndHashCode public class CompositeAggregationParser implements OpenSearchAggregationResponseParser { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/FilterParser.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/FilterParser.java index 8358379be0..406f279784 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/FilterParser.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/FilterParser.java @@ -21,9 +21,8 @@ import org.opensearch.search.aggregations.bucket.filter.Filter; /** - * {@link Filter} Parser. - * The current use case is filter aggregation, e.g. avg(age) filter(balance>0). The filter parser - * do nothing and return the result from metricsParser. + * {@link Filter} Parser. The current use case is filter aggregation, e.g. avg(age) + * filter(balance>0). The filter parser do nothing and return the result from metricsParser. */ @Builder @EqualsAndHashCode diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/MetricParser.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/MetricParser.java index 15f05e5b05..0f8f8e284b 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/MetricParser.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/MetricParser.java @@ -16,14 +16,10 @@ import java.util.Map; import org.opensearch.search.aggregations.Aggregation; -/** - * Metric Aggregation Parser. - */ +/** Metric Aggregation Parser. */ public interface MetricParser { - /** - * Get the name of metric parser. - */ + /** Get the name of metric parser. */ String getName(); /** diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/MetricParserHelper.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/MetricParserHelper.java index d5c0141ad2..0d1f770470 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/MetricParserHelper.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/MetricParserHelper.java @@ -23,9 +23,7 @@ import org.opensearch.search.aggregations.Aggregations; import org.opensearch.sql.common.utils.StringUtils; -/** - * Parse multiple metrics in one bucket. - */ +/** Parse multiple metrics in one bucket. */ @EqualsAndHashCode @RequiredArgsConstructor public class MetricParserHelper { @@ -49,8 +47,9 @@ public Map parse(Aggregations aggregations) { if (metricParserMap.containsKey(aggregation.getName())) { resultMap.putAll(metricParserMap.get(aggregation.getName()).parse(aggregation)); } else { - throw new RuntimeException(StringUtils.format("couldn't parse field %s in aggregation " - + "response", aggregation.getName())); + throw new RuntimeException( + StringUtils.format( + "couldn't parse field %s in aggregation " + "response", aggregation.getName())); } } return resultMap; diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/NoBucketAggregationParser.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/NoBucketAggregationParser.java index 5756003523..de0ee5883c 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/NoBucketAggregationParser.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/NoBucketAggregationParser.java @@ -19,9 +19,7 @@ import java.util.Map; import org.opensearch.search.aggregations.Aggregations; -/** - * No Bucket Aggregation Parser which include only metric parsers. - */ +/** No Bucket Aggregation Parser which include only metric parsers. */ public class NoBucketAggregationParser implements OpenSearchAggregationResponseParser { private final MetricParserHelper metricsParser; diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/OpenSearchAggregationResponseParser.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/OpenSearchAggregationResponseParser.java index 3a19747ef3..0c15d72eb6 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/OpenSearchAggregationResponseParser.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/OpenSearchAggregationResponseParser.java @@ -17,13 +17,12 @@ import java.util.Map; import org.opensearch.search.aggregations.Aggregations; -/** - * OpenSearch Aggregation Response Parser. - */ +/** OpenSearch Aggregation Response Parser. */ public interface OpenSearchAggregationResponseParser { /** * Parse the OpenSearch Aggregation Response. + * * @param aggregations Aggregations. * @return aggregation result. */ diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/SingleValueParser.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/SingleValueParser.java index 384e07ad8f..1492fedfc2 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/SingleValueParser.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/SingleValueParser.java @@ -23,9 +23,7 @@ import org.opensearch.search.aggregations.Aggregation; import org.opensearch.search.aggregations.metrics.NumericMetricsAggregation; -/** - * {@link NumericMetricsAggregation.SingleValue} metric parser. - */ +/** {@link NumericMetricsAggregation.SingleValue} metric parser. */ @EqualsAndHashCode @RequiredArgsConstructor public class SingleValueParser implements MetricParser { @@ -35,7 +33,6 @@ public class SingleValueParser implements MetricParser { @Override public Map parse(Aggregation agg) { return Collections.singletonMap( - agg.getName(), - handleNanInfValue(((NumericMetricsAggregation.SingleValue) agg).value())); + agg.getName(), handleNanInfValue(((NumericMetricsAggregation.SingleValue) agg).value())); } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/StatsParser.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/StatsParser.java index c80b75de05..82a2f8648f 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/StatsParser.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/StatsParser.java @@ -24,9 +24,7 @@ import org.opensearch.search.aggregations.Aggregation; import org.opensearch.search.aggregations.metrics.ExtendedStats; -/** - * {@link ExtendedStats} metric parser. - */ +/** {@link ExtendedStats} metric parser. */ @EqualsAndHashCode @RequiredArgsConstructor public class StatsParser implements MetricParser { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/TopHitsParser.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/TopHitsParser.java index a98e1b4ce3..b29b44f033 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/TopHitsParser.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/TopHitsParser.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.response.agg; import java.util.Arrays; @@ -16,21 +15,19 @@ import org.opensearch.search.aggregations.Aggregation; import org.opensearch.search.aggregations.metrics.TopHits; -/** - * {@link TopHits} metric parser. - */ +/** {@link TopHits} metric parser. */ @EqualsAndHashCode @RequiredArgsConstructor public class TopHitsParser implements MetricParser { - @Getter - private final String name; + @Getter private final String name; @Override public Map parse(Aggregation agg) { return Collections.singletonMap( agg.getName(), Arrays.stream(((TopHits) agg).getHits().getHits()) - .flatMap(h -> h.getSourceAsMap().values().stream()).collect(Collectors.toList())); + .flatMap(h -> h.getSourceAsMap().values().stream()) + .collect(Collectors.toList())); } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/Utils.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/Utils.java index 953f4d19b4..9ce46c6de6 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/Utils.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/agg/Utils.java @@ -19,6 +19,7 @@ public class Utils { /** * Utils to handle Nan/Infinite Value. + * * @return null if is Nan or is +-Infinity. */ public static Object handleNanInfValue(double value) { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/error/ErrorMessage.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/error/ErrorMessage.java index f828c2c485..bbcacc1d2c 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/error/ErrorMessage.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/error/ErrorMessage.java @@ -3,34 +3,26 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.response.error; import lombok.Getter; import org.json.JSONObject; import org.opensearch.core.rest.RestStatus; -/** - * Error Message. - */ +/** Error Message. */ public class ErrorMessage { protected Throwable exception; private final int status; - @Getter - private final String type; + @Getter private final String type; - @Getter - private final String reason; + @Getter private final String reason; - @Getter - private final String details; + @Getter private final String details; - /** - * Error Message Constructor. - */ + /** Error Message Constructor. */ public ErrorMessage(Throwable exception, int status) { this.exception = exception; this.status = status; diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/error/ErrorMessageFactory.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/error/ErrorMessageFactory.java index 204c6a8b93..901bfc30c8 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/error/ErrorMessageFactory.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/error/ErrorMessageFactory.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.response.error; import lombok.experimental.UtilityClass; @@ -12,11 +11,11 @@ @UtilityClass public class ErrorMessageFactory { /** - * Create error message based on the exception type. - * Exceptions of OpenSearch exception type and exceptions with wrapped OpenSearch exception causes - * should create {@link OpenSearchErrorMessage} + * Create error message based on the exception type. Exceptions of OpenSearch exception type and + * exceptions with wrapped OpenSearch exception causes should create {@link + * OpenSearchErrorMessage} * - * @param e exception to create error message + * @param e exception to create error message * @param status exception status code * @return error message */ diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/error/OpenSearchErrorMessage.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/error/OpenSearchErrorMessage.java index a90c52922e..87a374d353 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/error/OpenSearchErrorMessage.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/error/OpenSearchErrorMessage.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.response.error; import java.util.Locale; @@ -11,9 +10,7 @@ import org.opensearch.action.search.SearchPhaseExecutionException; import org.opensearch.action.search.ShardSearchFailure; -/** - * OpenSearch Error Message. - */ +/** OpenSearch Error Message. */ public class OpenSearchErrorMessage extends ErrorMessage { OpenSearchErrorMessage(OpenSearchException exception, int status) { @@ -45,21 +42,21 @@ protected String fetchDetails() { } /** - * Could not deliver the exactly same error messages due to the limit of JDBC types. - * Currently our cases occurred only SearchPhaseExecutionException instances - * among all types of OpenSearch exceptions - * according to the survey, see all types: OpenSearchException.OpenSearchExceptionHandle. - * Either add methods of fetching details for different types, or re-make a consistent - * message by not giving - * detailed messages/root causes but only a suggestion message. + * Could not deliver the exactly same error messages due to the limit of JDBC types. Currently our + * cases occurred only SearchPhaseExecutionException instances among all types of OpenSearch + * exceptions according to the survey, see all types: + * OpenSearchException.OpenSearchExceptionHandle. Either add methods of fetching details for + * different types, or re-make a consistent message by not giving detailed messages/root causes + * but only a suggestion message. */ private String fetchSearchPhaseExecutionExceptionDetails( SearchPhaseExecutionException exception) { StringBuilder details = new StringBuilder(); ShardSearchFailure[] shardFailures = exception.shardFailures(); for (ShardSearchFailure failure : shardFailures) { - details.append(String.format(Locale.ROOT, "Shard[%d]: %s\n", failure.shardId(), - failure.getCause().toString())); + details.append( + String.format( + Locale.ROOT, "Shard[%d]: %s\n", failure.shardId(), failure.getCause().toString())); } return details.toString(); } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/security/SecurityAccess.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/security/SecurityAccess.java index 0c1b2e58b1..95c52ea275 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/security/SecurityAccess.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/security/SecurityAccess.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.security; import java.security.AccessController; @@ -17,9 +16,7 @@ */ public class SecurityAccess { - /** - * Execute the operation in privileged mode. - */ + /** Execute the operation in privileged mode. */ public static T doPrivileged(final PrivilegedExceptionAction operation) { SpecialPermission.check(); try { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/setting/LegacyOpenDistroSettings.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/setting/LegacyOpenDistroSettings.java index 3eadea482b..f20551b89d 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/setting/LegacyOpenDistroSettings.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/setting/LegacyOpenDistroSettings.java @@ -18,102 +18,108 @@ @UtilityClass public class LegacyOpenDistroSettings { - public static final Setting SQL_ENABLED_SETTING = Setting.boolSetting( - LegacySettings.Key.SQL_ENABLED.getKeyValue(), - true, - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated); - - public static final Setting SQL_QUERY_SLOWLOG_SETTING = Setting.intSetting( - LegacySettings.Key.SQL_QUERY_SLOWLOG.getKeyValue(), - 2, - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated); - - public static final Setting SQL_CURSOR_KEEPALIVE_SETTING = Setting.positiveTimeSetting( - LegacySettings.Key.SQL_CURSOR_KEEPALIVE.getKeyValue(), - timeValueMinutes(1), - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated); - - public static final Setting METRICS_ROLLING_WINDOW_SETTING = Setting.longSetting( - LegacySettings.Key.METRICS_ROLLING_WINDOW.getKeyValue(), - 3600L, - 2L, - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated); - - public static final Setting METRICS_ROLLING_INTERVAL_SETTING = Setting.longSetting( - LegacySettings.Key.METRICS_ROLLING_INTERVAL.getKeyValue(), - 60L, - 1L, - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated); - - public static final Setting PPL_ENABLED_SETTING = Setting.boolSetting( - LegacySettings.Key.PPL_ENABLED.getKeyValue(), - true, - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated); - - public static final Setting - PPL_QUERY_MEMORY_LIMIT_SETTING = Setting.memorySizeSetting( - LegacySettings.Key.PPL_QUERY_MEMORY_LIMIT.getKeyValue(), - "85%", - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated); - - public static final Setting QUERY_SIZE_LIMIT_SETTING = Setting.intSetting( - LegacySettings.Key.QUERY_SIZE_LIMIT.getKeyValue(), - 200, - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated); + public static final Setting SQL_ENABLED_SETTING = + Setting.boolSetting( + LegacySettings.Key.SQL_ENABLED.getKeyValue(), + true, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated); - /** - * Deprecated and will be removed then. - * From OpenSearch 1.0, the new engine is always enabled. - */ - public static final Setting SQL_NEW_ENGINE_ENABLED_SETTING = Setting.boolSetting( - LegacySettings.Key.SQL_NEW_ENGINE_ENABLED.getKeyValue(), - true, - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated); + public static final Setting SQL_QUERY_SLOWLOG_SETTING = + Setting.intSetting( + LegacySettings.Key.SQL_QUERY_SLOWLOG.getKeyValue(), + 2, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated); + + public static final Setting SQL_CURSOR_KEEPALIVE_SETTING = + Setting.positiveTimeSetting( + LegacySettings.Key.SQL_CURSOR_KEEPALIVE.getKeyValue(), + timeValueMinutes(1), + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated); + + public static final Setting METRICS_ROLLING_WINDOW_SETTING = + Setting.longSetting( + LegacySettings.Key.METRICS_ROLLING_WINDOW.getKeyValue(), + 3600L, + 2L, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated); + + public static final Setting METRICS_ROLLING_INTERVAL_SETTING = + Setting.longSetting( + LegacySettings.Key.METRICS_ROLLING_INTERVAL.getKeyValue(), + 60L, + 1L, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated); + + public static final Setting PPL_ENABLED_SETTING = + Setting.boolSetting( + LegacySettings.Key.PPL_ENABLED.getKeyValue(), + true, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated); + + public static final Setting PPL_QUERY_MEMORY_LIMIT_SETTING = + Setting.memorySizeSetting( + LegacySettings.Key.PPL_QUERY_MEMORY_LIMIT.getKeyValue(), + "85%", + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated); + + public static final Setting QUERY_SIZE_LIMIT_SETTING = + Setting.intSetting( + LegacySettings.Key.QUERY_SIZE_LIMIT.getKeyValue(), + 200, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated); + + /** Deprecated and will be removed then. From OpenSearch 1.0, the new engine is always enabled. */ + public static final Setting SQL_NEW_ENGINE_ENABLED_SETTING = + Setting.boolSetting( + LegacySettings.Key.SQL_NEW_ENGINE_ENABLED.getKeyValue(), + true, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated); /** - * Deprecated and will be removed then. - * From OpenSearch 1.0, the query analysis in legacy engine is disabled. + * Deprecated and will be removed then. From OpenSearch 1.0, the query analysis in legacy engine + * is disabled. */ - public static final Setting QUERY_ANALYSIS_ENABLED_SETTING = Setting.boolSetting( - LegacySettings.Key.QUERY_ANALYSIS_ENABLED.getKeyValue(), - false, - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated); + public static final Setting QUERY_ANALYSIS_ENABLED_SETTING = + Setting.boolSetting( + LegacySettings.Key.QUERY_ANALYSIS_ENABLED.getKeyValue(), + false, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated); /** - * Deprecated and will be removed then. - * From OpenSearch 1.0, the query analysis suggestion in legacy engine is disabled. + * Deprecated and will be removed then. From OpenSearch 1.0, the query analysis suggestion in + * legacy engine is disabled. */ public static final Setting QUERY_ANALYSIS_SEMANTIC_SUGGESTION_SETTING = Setting.boolSetting( - LegacySettings.Key.QUERY_ANALYSIS_SEMANTIC_SUGGESTION.getKeyValue(), - false, - Setting.Property.NodeScope, - Setting.Property.Dynamic, - Setting.Property.Deprecated); + LegacySettings.Key.QUERY_ANALYSIS_SEMANTIC_SUGGESTION.getKeyValue(), + false, + Setting.Property.NodeScope, + Setting.Property.Dynamic, + Setting.Property.Deprecated); /** - * Deprecated and will be removed then. - * From OpenSearch 1.0, the query analysis threshold in legacy engine is disabled. + * Deprecated and will be removed then. From OpenSearch 1.0, the query analysis threshold in + * legacy engine is disabled. */ public static final Setting QUERY_ANALYSIS_SEMANTIC_THRESHOLD_SETTING = Setting.intSetting( @@ -124,8 +130,8 @@ public class LegacyOpenDistroSettings { Setting.Property.Deprecated); /** - * Deprecated and will be removed then. - * From OpenSearch 1.0, the query response format is default to JDBC format. + * Deprecated and will be removed then. From OpenSearch 1.0, the query response format is default + * to JDBC format. */ public static final Setting QUERY_RESPONSE_FORMAT_SETTING = Setting.simpleString( @@ -136,8 +142,8 @@ public class LegacyOpenDistroSettings { Setting.Property.Deprecated); /** - * Deprecated and will be removed then. - * From OpenSearch 1.0, the cursor feature is enabled by default. + * Deprecated and will be removed then. From OpenSearch 1.0, the cursor feature is enabled by + * default. */ public static final Setting SQL_CURSOR_ENABLED_SETTING = Setting.boolSetting( @@ -146,10 +152,10 @@ public class LegacyOpenDistroSettings { Setting.Property.NodeScope, Setting.Property.Dynamic, Setting.Property.Deprecated); + /** - * Deprecated and will be removed then. - * From OpenSearch 1.0, the fetch_size in query body will decide whether create the cursor - * context. No cursor will be created if the fetch_size = 0. + * Deprecated and will be removed then. From OpenSearch 1.0, the fetch_size in query body will + * decide whether create the cursor context. No cursor will be created if the fetch_size = 0. */ public static final Setting SQL_CURSOR_FETCH_SIZE_SETTING = Setting.intSetting( @@ -159,9 +165,7 @@ public class LegacyOpenDistroSettings { Setting.Property.Dynamic, Setting.Property.Deprecated); - /** - * Used by Plugin to init Setting. - */ + /** Used by Plugin to init Setting. */ public static List> legacySettings() { return new ImmutableList.Builder>() .add(SQL_ENABLED_SETTING) diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/setting/OpenSearchSettings.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/setting/OpenSearchSettings.java index 0810312974..133903dabe 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/setting/OpenSearchSettings.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/setting/OpenSearchSettings.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.setting; import static org.opensearch.common.settings.Settings.EMPTY; @@ -27,129 +26,172 @@ import org.opensearch.sql.common.setting.LegacySettings; import org.opensearch.sql.common.setting.Settings; -/** - * Setting implementation on OpenSearch. - */ +/** Setting implementation on OpenSearch. */ @Log4j2 public class OpenSearchSettings extends Settings { - /** - * Default settings. - */ + /** Default settings. */ private final Map> defaultSettings; - /** - * Latest setting value for each registered key. Thread-safe is required. - */ + + /** Latest setting value for each registered key. Thread-safe is required. */ @VisibleForTesting private final Map latestSettings = new ConcurrentHashMap<>(); - public static final Setting SQL_ENABLED_SETTING = Setting.boolSetting( - Key.SQL_ENABLED.getKeyValue(), - LegacyOpenDistroSettings.SQL_ENABLED_SETTING, - Setting.Property.NodeScope, - Setting.Property.Dynamic); - - public static final Setting SQL_SLOWLOG_SETTING = Setting.intSetting( - Key.SQL_SLOWLOG.getKeyValue(), - LegacyOpenDistroSettings.SQL_QUERY_SLOWLOG_SETTING, - 0, - Setting.Property.NodeScope, - Setting.Property.Dynamic); - - public static final Setting SQL_CURSOR_KEEP_ALIVE_SETTING = Setting.positiveTimeSetting( - Key.SQL_CURSOR_KEEP_ALIVE.getKeyValue(), - LegacyOpenDistroSettings.SQL_CURSOR_KEEPALIVE_SETTING, - Setting.Property.NodeScope, - Setting.Property.Dynamic); - - public static final Setting SQL_DELETE_ENABLED_SETTING = Setting.boolSetting( - Key.SQL_DELETE_ENABLED.getKeyValue(), - false, - Setting.Property.NodeScope, - Setting.Property.Dynamic); - - public static final Setting PPL_ENABLED_SETTING = Setting.boolSetting( - Key.PPL_ENABLED.getKeyValue(), - LegacyOpenDistroSettings.PPL_ENABLED_SETTING, - Setting.Property.NodeScope, - Setting.Property.Dynamic); - - public static final Setting QUERY_MEMORY_LIMIT_SETTING = new Setting<>( - Key.QUERY_MEMORY_LIMIT.getKeyValue(), - LegacyOpenDistroSettings.PPL_QUERY_MEMORY_LIMIT_SETTING, - (s) -> MemorySizeValue.parseBytesSizeValueOrHeapRatio( - s, LegacySettings.Key.PPL_QUERY_MEMORY_LIMIT.getKeyValue()), - Setting.Property.NodeScope, - Setting.Property.Dynamic); - - public static final Setting QUERY_SIZE_LIMIT_SETTING = Setting.intSetting( - Key.QUERY_SIZE_LIMIT.getKeyValue(), - LegacyOpenDistroSettings.QUERY_SIZE_LIMIT_SETTING, - 0, - Setting.Property.NodeScope, - Setting.Property.Dynamic); - - public static final Setting METRICS_ROLLING_WINDOW_SETTING = Setting.longSetting( - Key.METRICS_ROLLING_WINDOW.getKeyValue(), - LegacyOpenDistroSettings.METRICS_ROLLING_WINDOW_SETTING, - 2L, - Setting.Property.NodeScope, - Setting.Property.Dynamic); - - public static final Setting METRICS_ROLLING_INTERVAL_SETTING = Setting.longSetting( - Key.METRICS_ROLLING_INTERVAL.getKeyValue(), - LegacyOpenDistroSettings.METRICS_ROLLING_INTERVAL_SETTING, - 1L, - Setting.Property.NodeScope, - Setting.Property.Dynamic); + public static final Setting SQL_ENABLED_SETTING = + Setting.boolSetting( + Key.SQL_ENABLED.getKeyValue(), + LegacyOpenDistroSettings.SQL_ENABLED_SETTING, + Setting.Property.NodeScope, + Setting.Property.Dynamic); + + public static final Setting SQL_SLOWLOG_SETTING = + Setting.intSetting( + Key.SQL_SLOWLOG.getKeyValue(), + LegacyOpenDistroSettings.SQL_QUERY_SLOWLOG_SETTING, + 0, + Setting.Property.NodeScope, + Setting.Property.Dynamic); + + public static final Setting SQL_CURSOR_KEEP_ALIVE_SETTING = + Setting.positiveTimeSetting( + Key.SQL_CURSOR_KEEP_ALIVE.getKeyValue(), + LegacyOpenDistroSettings.SQL_CURSOR_KEEPALIVE_SETTING, + Setting.Property.NodeScope, + Setting.Property.Dynamic); + + public static final Setting SQL_DELETE_ENABLED_SETTING = + Setting.boolSetting( + Key.SQL_DELETE_ENABLED.getKeyValue(), + false, + Setting.Property.NodeScope, + Setting.Property.Dynamic); + + public static final Setting PPL_ENABLED_SETTING = + Setting.boolSetting( + Key.PPL_ENABLED.getKeyValue(), + LegacyOpenDistroSettings.PPL_ENABLED_SETTING, + Setting.Property.NodeScope, + Setting.Property.Dynamic); + + public static final Setting QUERY_MEMORY_LIMIT_SETTING = + new Setting<>( + Key.QUERY_MEMORY_LIMIT.getKeyValue(), + LegacyOpenDistroSettings.PPL_QUERY_MEMORY_LIMIT_SETTING, + (s) -> + MemorySizeValue.parseBytesSizeValueOrHeapRatio( + s, LegacySettings.Key.PPL_QUERY_MEMORY_LIMIT.getKeyValue()), + Setting.Property.NodeScope, + Setting.Property.Dynamic); + + public static final Setting QUERY_SIZE_LIMIT_SETTING = + Setting.intSetting( + Key.QUERY_SIZE_LIMIT.getKeyValue(), + LegacyOpenDistroSettings.QUERY_SIZE_LIMIT_SETTING, + 0, + Setting.Property.NodeScope, + Setting.Property.Dynamic); + + public static final Setting METRICS_ROLLING_WINDOW_SETTING = + Setting.longSetting( + Key.METRICS_ROLLING_WINDOW.getKeyValue(), + LegacyOpenDistroSettings.METRICS_ROLLING_WINDOW_SETTING, + 2L, + Setting.Property.NodeScope, + Setting.Property.Dynamic); + + public static final Setting METRICS_ROLLING_INTERVAL_SETTING = + Setting.longSetting( + Key.METRICS_ROLLING_INTERVAL.getKeyValue(), + LegacyOpenDistroSettings.METRICS_ROLLING_INTERVAL_SETTING, + 1L, + Setting.Property.NodeScope, + Setting.Property.Dynamic); // we are keeping this to not break upgrades if the config is already present. // This will be completely removed in 3.0. - public static final Setting DATASOURCE_CONFIG = SecureSetting.secureFile( - "plugins.query.federation.datasources.config", - null, - Setting.Property.Deprecated); - - public static final Setting DATASOURCE_MASTER_SECRET_KEY = Setting.simpleString( - ENCYRPTION_MASTER_KEY.getKeyValue(), - Setting.Property.NodeScope, - Setting.Property.Final, - Setting.Property.Filtered); - - public static final Setting DATASOURCE_URI_ALLOW_HOSTS = Setting.simpleString( - Key.DATASOURCES_URI_ALLOWHOSTS.getKeyValue(), - ".*", - Setting.Property.NodeScope, - Setting.Property.Dynamic); + public static final Setting DATASOURCE_CONFIG = + SecureSetting.secureFile( + "plugins.query.federation.datasources.config", null, Setting.Property.Deprecated); - /** - * Construct OpenSearchSetting. - * The OpenSearchSetting must be singleton. - */ + public static final Setting DATASOURCE_MASTER_SECRET_KEY = + Setting.simpleString( + ENCYRPTION_MASTER_KEY.getKeyValue(), + Setting.Property.NodeScope, + Setting.Property.Final, + Setting.Property.Filtered); + + public static final Setting DATASOURCE_URI_ALLOW_HOSTS = + Setting.simpleString( + Key.DATASOURCES_URI_ALLOWHOSTS.getKeyValue(), + ".*", + Setting.Property.NodeScope, + Setting.Property.Dynamic); + + /** Construct OpenSearchSetting. The OpenSearchSetting must be singleton. */ @SuppressWarnings("unchecked") public OpenSearchSettings(ClusterSettings clusterSettings) { ImmutableMap.Builder> settingBuilder = new ImmutableMap.Builder<>(); - register(settingBuilder, clusterSettings, Key.SQL_ENABLED, - SQL_ENABLED_SETTING, new Updater(Key.SQL_ENABLED)); - register(settingBuilder, clusterSettings, Key.SQL_SLOWLOG, - SQL_SLOWLOG_SETTING, new Updater(Key.SQL_SLOWLOG)); - register(settingBuilder, clusterSettings, Key.SQL_CURSOR_KEEP_ALIVE, - SQL_CURSOR_KEEP_ALIVE_SETTING, new Updater(Key.SQL_CURSOR_KEEP_ALIVE)); - register(settingBuilder, clusterSettings, Key.SQL_DELETE_ENABLED, - SQL_DELETE_ENABLED_SETTING, new Updater(Key.SQL_DELETE_ENABLED)); - register(settingBuilder, clusterSettings, Key.PPL_ENABLED, - PPL_ENABLED_SETTING, new Updater(Key.PPL_ENABLED)); - register(settingBuilder, clusterSettings, Key.QUERY_MEMORY_LIMIT, - QUERY_MEMORY_LIMIT_SETTING, new Updater(Key.QUERY_MEMORY_LIMIT)); - register(settingBuilder, clusterSettings, Key.QUERY_SIZE_LIMIT, - QUERY_SIZE_LIMIT_SETTING, new Updater(Key.QUERY_SIZE_LIMIT)); - register(settingBuilder, clusterSettings, Key.METRICS_ROLLING_WINDOW, - METRICS_ROLLING_WINDOW_SETTING, new Updater(Key.METRICS_ROLLING_WINDOW)); - register(settingBuilder, clusterSettings, Key.METRICS_ROLLING_INTERVAL, - METRICS_ROLLING_INTERVAL_SETTING, new Updater(Key.METRICS_ROLLING_INTERVAL)); - register(settingBuilder, clusterSettings, Key.DATASOURCES_URI_ALLOWHOSTS, - DATASOURCE_URI_ALLOW_HOSTS, new Updater(Key.DATASOURCES_URI_ALLOWHOSTS)); - registerNonDynamicSettings(settingBuilder, clusterSettings, Key.CLUSTER_NAME, - ClusterName.CLUSTER_NAME_SETTING); + register( + settingBuilder, + clusterSettings, + Key.SQL_ENABLED, + SQL_ENABLED_SETTING, + new Updater(Key.SQL_ENABLED)); + register( + settingBuilder, + clusterSettings, + Key.SQL_SLOWLOG, + SQL_SLOWLOG_SETTING, + new Updater(Key.SQL_SLOWLOG)); + register( + settingBuilder, + clusterSettings, + Key.SQL_CURSOR_KEEP_ALIVE, + SQL_CURSOR_KEEP_ALIVE_SETTING, + new Updater(Key.SQL_CURSOR_KEEP_ALIVE)); + register( + settingBuilder, + clusterSettings, + Key.SQL_DELETE_ENABLED, + SQL_DELETE_ENABLED_SETTING, + new Updater(Key.SQL_DELETE_ENABLED)); + register( + settingBuilder, + clusterSettings, + Key.PPL_ENABLED, + PPL_ENABLED_SETTING, + new Updater(Key.PPL_ENABLED)); + register( + settingBuilder, + clusterSettings, + Key.QUERY_MEMORY_LIMIT, + QUERY_MEMORY_LIMIT_SETTING, + new Updater(Key.QUERY_MEMORY_LIMIT)); + register( + settingBuilder, + clusterSettings, + Key.QUERY_SIZE_LIMIT, + QUERY_SIZE_LIMIT_SETTING, + new Updater(Key.QUERY_SIZE_LIMIT)); + register( + settingBuilder, + clusterSettings, + Key.METRICS_ROLLING_WINDOW, + METRICS_ROLLING_WINDOW_SETTING, + new Updater(Key.METRICS_ROLLING_WINDOW)); + register( + settingBuilder, + clusterSettings, + Key.METRICS_ROLLING_INTERVAL, + METRICS_ROLLING_INTERVAL_SETTING, + new Updater(Key.METRICS_ROLLING_INTERVAL)); + register( + settingBuilder, + clusterSettings, + Key.DATASOURCES_URI_ALLOWHOSTS, + DATASOURCE_URI_ALLOW_HOSTS, + new Updater(Key.DATASOURCES_URI_ALLOWHOSTS)); + registerNonDynamicSettings( + settingBuilder, clusterSettings, Key.CLUSTER_NAME, ClusterName.CLUSTER_NAME_SETTING); defaultSettings = settingBuilder.build(); } @@ -159,36 +201,33 @@ public T getSettingValue(Settings.Key key) { return (T) latestSettings.getOrDefault(key, defaultSettings.get(key).getDefault(EMPTY)); } - /** - * Register the pair of {key, setting}. - */ - private void register(ImmutableMap.Builder> settingBuilder, - ClusterSettings clusterSettings, Settings.Key key, - Setting setting, - Consumer updater) { + /** Register the pair of {key, setting}. */ + private void register( + ImmutableMap.Builder> settingBuilder, + ClusterSettings clusterSettings, + Settings.Key key, + Setting setting, + Consumer updater) { if (clusterSettings.get(setting) != null) { latestSettings.put(key, clusterSettings.get(setting)); } settingBuilder.put(key, setting); - clusterSettings - .addSettingsUpdateConsumer(setting, updater); + clusterSettings.addSettingsUpdateConsumer(setting, updater); } - /** - * Register Non Dynamic Settings without consumer. - */ + /** Register Non Dynamic Settings without consumer. */ private void registerNonDynamicSettings( ImmutableMap.Builder> settingBuilder, - ClusterSettings clusterSettings, Settings.Key key, + ClusterSettings clusterSettings, + Settings.Key key, Setting setting) { settingBuilder.put(key, setting); latestSettings.put(key, clusterSettings.get(setting)); } - /** - * Add the inner class only for UT coverage purpose. - * Lambda could be much elegant solution. But which is hard to test. + * Add the inner class only for UT coverage purpose. Lambda could be much elegant solution. But + * which is hard to test. */ @VisibleForTesting @RequiredArgsConstructor @@ -202,9 +241,7 @@ public void accept(Object newValue) { } } - /** - * Used by Plugin to init Setting. - */ + /** Used by Plugin to init Setting. */ public static List> pluginSettings() { return new ImmutableList.Builder>() .add(SQL_ENABLED_SETTING) @@ -220,9 +257,7 @@ public static List> pluginSettings() { .build(); } - /** - * Init Non Dynamic Plugin Settings. - */ + /** Init Non Dynamic Plugin Settings. */ public static List> pluginNonDynamicSettings() { return new ImmutableList.Builder>() .add(DATASOURCE_MASTER_SECRET_KEY) @@ -230,9 +265,7 @@ public static List> pluginNonDynamicSettings() { .build(); } - /** - * Used by local cluster to get settings from a setting instance. - */ + /** Used by local cluster to get settings from a setting instance. */ public List> getSettings() { return pluginSettings(); } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchDataSourceFactory.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchDataSourceFactory.java index 011f6236fb..b30d460c00 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchDataSourceFactory.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchDataSourceFactory.java @@ -28,7 +28,9 @@ public DataSourceType getDataSourceType() { @Override public DataSource createDataSource(DataSourceMetadata metadata) { - return new DataSource(metadata.getName(), DataSourceType.OPENSEARCH, + return new DataSource( + metadata.getName(), + DataSourceType.OPENSEARCH, new OpenSearchStorageEngine(client, settings)); } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchIndex.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchIndex.java index 62617f744e..c6afdb8511 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchIndex.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchIndex.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage; import com.google.common.annotations.VisibleForTesting; @@ -47,43 +46,33 @@ public class OpenSearchIndex implements Table { public static final String METADATA_FIELD_ROUTING = "_routing"; - public static final java.util.Map METADATAFIELD_TYPE_MAP = Map.of( - METADATA_FIELD_ID, ExprCoreType.STRING, - METADATA_FIELD_INDEX, ExprCoreType.STRING, - METADATA_FIELD_SCORE, ExprCoreType.FLOAT, - METADATA_FIELD_MAXSCORE, ExprCoreType.FLOAT, - METADATA_FIELD_SORT, ExprCoreType.LONG, - METADATA_FIELD_ROUTING, ExprCoreType.STRING - ); + public static final java.util.Map METADATAFIELD_TYPE_MAP = + Map.of( + METADATA_FIELD_ID, ExprCoreType.STRING, + METADATA_FIELD_INDEX, ExprCoreType.STRING, + METADATA_FIELD_SCORE, ExprCoreType.FLOAT, + METADATA_FIELD_MAXSCORE, ExprCoreType.FLOAT, + METADATA_FIELD_SORT, ExprCoreType.LONG, + METADATA_FIELD_ROUTING, ExprCoreType.STRING); /** OpenSearch client connection. */ private final OpenSearchClient client; private final Settings settings; - /** - * {@link OpenSearchRequest.IndexName}. - */ + /** {@link OpenSearchRequest.IndexName}. */ private final OpenSearchRequest.IndexName indexName; - /** - * The cached mapping of field and type in index. - */ + /** The cached mapping of field and type in index. */ private Map cachedFieldOpenSearchTypes = null; - /** - * The cached ExprType of fields. - */ + /** The cached ExprType of fields. */ private Map cachedFieldTypes = null; - /** - * The cached max result window setting of index. - */ + /** The cached max result window setting of index. */ private Integer cachedMaxResultWindow = null; - /** - * Constructor. - */ + /** Constructor. */ public OpenSearchIndex(OpenSearchClient client, Settings settings, String indexName) { this.client = client; this.settings = settings; @@ -113,22 +102,24 @@ public void create(Map schema) { * or lazy evaluate when query engine pulls field type. */ /** - * Get simplified parsed mapping info. Unlike {@link #getFieldOpenSearchTypes()} - * it returns a flattened map. + * Get simplified parsed mapping info. Unlike {@link #getFieldOpenSearchTypes()} it returns a + * flattened map. + * * @return A map between field names and matching `ExprCoreType`s. */ @Override public Map getFieldTypes() { if (cachedFieldOpenSearchTypes == null) { - cachedFieldOpenSearchTypes = new OpenSearchDescribeIndexRequest(client, indexName) - .getFieldTypes(); + cachedFieldOpenSearchTypes = + new OpenSearchDescribeIndexRequest(client, indexName).getFieldTypes(); } if (cachedFieldTypes == null) { - cachedFieldTypes = OpenSearchDataType.traverseAndFlatten(cachedFieldOpenSearchTypes) - .entrySet().stream().collect( - LinkedHashMap::new, - (map, item) -> map.put(item.getKey(), item.getValue().getExprType()), - Map::putAll); + cachedFieldTypes = + OpenSearchDataType.traverseAndFlatten(cachedFieldOpenSearchTypes).entrySet().stream() + .collect( + LinkedHashMap::new, + (map, item) -> map.put(item.getKey(), item.getValue().getExprType()), + Map::putAll); } return cachedFieldTypes; } @@ -140,19 +131,18 @@ public Map getReservedFieldTypes() { /** * Get parsed mapping info. + * * @return A complete map between field names and their types. */ public Map getFieldOpenSearchTypes() { if (cachedFieldOpenSearchTypes == null) { - cachedFieldOpenSearchTypes = new OpenSearchDescribeIndexRequest(client, indexName) - .getFieldTypes(); + cachedFieldOpenSearchTypes = + new OpenSearchDescribeIndexRequest(client, indexName).getFieldTypes(); } return cachedFieldOpenSearchTypes; } - /** - * Get the max result window setting of the table. - */ + /** Get the max result window setting of the table. */ public Integer getMaxResultWindow() { if (cachedMaxResultWindow == null) { cachedMaxResultWindow = @@ -161,9 +151,7 @@ public Integer getMaxResultWindow() { return cachedMaxResultWindow; } - /** - * TODO: Push down operations to index scan operator as much as possible in future. - */ + /** TODO: Push down operations to index scan operator as much as possible in future. */ @Override public PhysicalPlan implement(LogicalPlan plan) { // TODO: Leave it here to avoid impact Prometheus and AD operators. Need to move to Planner. @@ -175,12 +163,13 @@ public TableScanBuilder createScanBuilder() { final int querySizeLimit = settings.getSettingValue(Settings.Key.QUERY_SIZE_LIMIT); final TimeValue cursorKeepAlive = settings.getSettingValue(Settings.Key.SQL_CURSOR_KEEP_ALIVE); - var builder = new OpenSearchRequestBuilder( - querySizeLimit, - createExprValueFactory()); + var builder = new OpenSearchRequestBuilder(querySizeLimit, createExprValueFactory()); Function createScanOperator = - requestBuilder -> new OpenSearchIndexScan(client, requestBuilder.getMaxResponseSize(), - requestBuilder.build(indexName, getMaxResultWindow(), cursorKeepAlive)); + requestBuilder -> + new OpenSearchIndexScan( + client, + requestBuilder.getMaxResponseSize(), + requestBuilder.build(indexName, getMaxResultWindow(), cursorKeepAlive)); return new OpenSearchIndexScanBuilder(builder, createScanOperator); } @@ -193,27 +182,27 @@ private OpenSearchExprValueFactory createExprValueFactory() { @VisibleForTesting @RequiredArgsConstructor - public static class OpenSearchDefaultImplementor - extends DefaultImplementor { + public static class OpenSearchDefaultImplementor extends DefaultImplementor { private final OpenSearchClient client; @Override public PhysicalPlan visitMLCommons(LogicalMLCommons node, OpenSearchIndexScan context) { - return new MLCommonsOperator(visitChild(node, context), node.getAlgorithm(), - node.getArguments(), client.getNodeClient()); + return new MLCommonsOperator( + visitChild(node, context), + node.getAlgorithm(), + node.getArguments(), + client.getNodeClient()); } @Override public PhysicalPlan visitAD(LogicalAD node, OpenSearchIndexScan context) { - return new ADOperator(visitChild(node, context), - node.getArguments(), client.getNodeClient()); + return new ADOperator(visitChild(node, context), node.getArguments(), client.getNodeClient()); } @Override public PhysicalPlan visitML(LogicalML node, OpenSearchIndexScan context) { - return new MLOperator(visitChild(node, context), - node.getArguments(), client.getNodeClient()); + return new MLOperator(visitChild(node, context), node.getArguments(), client.getNodeClient()); } } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchStorageEngine.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchStorageEngine.java index c915fa549b..7c022e2190 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchStorageEngine.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchStorageEngine.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage; import static org.opensearch.sql.utils.SystemIndexUtils.isSystemIndex; @@ -22,10 +21,9 @@ public class OpenSearchStorageEngine implements StorageEngine { /** OpenSearch client connection. */ - @Getter - private final OpenSearchClient client; - @Getter - private final Settings settings; + @Getter private final OpenSearchClient client; + + @Getter private final Settings settings; @Override public Table getTable(DataSourceSchemaName dataSourceSchemaName, String name) { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScan.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScan.java index 2ee863b6db..b2e9319bb1 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScan.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScan.java @@ -3,10 +3,8 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.scan; -import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; @@ -27,9 +25,7 @@ import org.opensearch.sql.planner.SerializablePlan; import org.opensearch.sql.storage.TableScanOperator; -/** - * OpenSearch index scan operator. - */ +/** OpenSearch index scan operator. */ @EqualsAndHashCode(onlyExplicitlyIncluded = true, callSuper = false) @ToString(onlyExplicitlyIncluded = true) public class OpenSearchIndexScan extends TableScanOperator implements SerializablePlan { @@ -38,14 +34,10 @@ public class OpenSearchIndexScan extends TableScanOperator implements Serializab private OpenSearchClient client; /** Search request. */ - @EqualsAndHashCode.Include - @ToString.Include - private OpenSearchRequest request; + @EqualsAndHashCode.Include @ToString.Include private OpenSearchRequest request; /** Largest number of rows allowed in the response. */ - @EqualsAndHashCode.Include - @ToString.Include - private int maxResponseSize; + @EqualsAndHashCode.Include @ToString.Include private int maxResponseSize; /** Number of rows returned. */ private Integer queryCount; @@ -53,12 +45,9 @@ public class OpenSearchIndexScan extends TableScanOperator implements Serializab /** Search response for current batch. */ private Iterator iterator; - /** - * Creates index scan based on a provided OpenSearchRequestBuilder. - */ - public OpenSearchIndexScan(OpenSearchClient client, - int maxResponseSize, - OpenSearchRequest request) { + /** Creates index scan based on a provided OpenSearchRequestBuilder. */ + public OpenSearchIndexScan( + OpenSearchClient client, int maxResponseSize, OpenSearchRequest request) { this.client = client; this.maxResponseSize = maxResponseSize; this.request = request; @@ -107,12 +96,13 @@ public String explain() { return request.toString(); } - /** No-args constructor. + /** + * No-args constructor. + * * @deprecated Exists only to satisfy Java serialization API. */ @Deprecated(since = "introduction") - public OpenSearchIndexScan() { - } + public OpenSearchIndexScan() {} @Override public void readExternal(ObjectInput in) throws IOException { @@ -120,8 +110,9 @@ public void readExternal(ObjectInput in) throws IOException { byte[] requestStream = new byte[reqSize]; in.read(requestStream); - var engine = (OpenSearchStorageEngine) ((PlanSerializer.CursorDeserializationStream) in) - .resolveObject("engine"); + var engine = + (OpenSearchStorageEngine) + ((PlanSerializer.CursorDeserializationStream) in).resolveObject("engine"); try (BytesStreamInput bsi = new BytesStreamInput(requestStream)) { request = new OpenSearchScrollRequest(bsi, engine); diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanAggregationBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanAggregationBuilder.java index 84883b5209..02ac21a39d 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanAggregationBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanAggregationBuilder.java @@ -22,16 +22,9 @@ import org.opensearch.sql.opensearch.storage.serialization.DefaultExpressionSerializer; import org.opensearch.sql.planner.logical.LogicalAggregation; import org.opensearch.sql.planner.logical.LogicalFilter; -import org.opensearch.sql.planner.logical.LogicalHighlight; -import org.opensearch.sql.planner.logical.LogicalLimit; -import org.opensearch.sql.planner.logical.LogicalNested; -import org.opensearch.sql.planner.logical.LogicalPaginate; -import org.opensearch.sql.planner.logical.LogicalProject; import org.opensearch.sql.planner.logical.LogicalSort; -/** - * Index scan builder for aggregate query used by {@link OpenSearchIndexScanBuilder} internally. - */ +/** Index scan builder for aggregate query used by {@link OpenSearchIndexScanBuilder} internally. */ @EqualsAndHashCode class OpenSearchIndexScanAggregationBuilder implements PushDownQueryBuilder { @@ -47,9 +40,8 @@ class OpenSearchIndexScanAggregationBuilder implements PushDownQueryBuilder { /** Sorting items pushed down. */ private List> sortList; - - OpenSearchIndexScanAggregationBuilder(OpenSearchRequestBuilder requestBuilder, - LogicalAggregation aggregation) { + OpenSearchIndexScanAggregationBuilder( + OpenSearchRequestBuilder requestBuilder, LogicalAggregation aggregation) { this.requestBuilder = requestBuilder; aggregatorList = aggregation.getAggregatorList(); groupByList = aggregation.getGroupByList(); @@ -62,8 +54,7 @@ public OpenSearchRequestBuilder build() { Pair, OpenSearchAggregationResponseParser> aggregationBuilder = builder.buildAggregationBuilder(aggregatorList, groupByList, sortList); requestBuilder.pushDownAggregation(aggregationBuilder); - requestBuilder.pushTypeMapping( - builder.buildTypeMapping(aggregatorList, groupByList)); + requestBuilder.pushTypeMapping(builder.buildTypeMapping(aggregatorList, groupByList)); return requestBuilder; } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanBuilder.java index edcbedc7a7..8a2f3e98f4 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanBuilder.java @@ -30,29 +30,24 @@ public class OpenSearchIndexScanBuilder extends TableScanBuilder { private final Function scanFactory; - /** - * Delegated index scan builder for non-aggregate or aggregate query. - */ - @EqualsAndHashCode.Include - private PushDownQueryBuilder delegate; + + /** Delegated index scan builder for non-aggregate or aggregate query. */ + @EqualsAndHashCode.Include private PushDownQueryBuilder delegate; /** Is limit operator pushed down. */ private boolean isLimitPushedDown = false; - /** - * Constructor used during query execution. - */ - public OpenSearchIndexScanBuilder(OpenSearchRequestBuilder requestBuilder, + /** Constructor used during query execution. */ + public OpenSearchIndexScanBuilder( + OpenSearchRequestBuilder requestBuilder, Function scanFactory) { this.delegate = new OpenSearchIndexScanQueryBuilder(requestBuilder); this.scanFactory = scanFactory; - } - /** - * Constructor used for unit tests. - */ - protected OpenSearchIndexScanBuilder(PushDownQueryBuilder translator, + /** Constructor used for unit tests. */ + protected OpenSearchIndexScanBuilder( + PushDownQueryBuilder translator, Function scanFactory) { this.delegate = translator; this.scanFactory = scanFactory; @@ -117,13 +112,16 @@ public boolean pushDownNested(LogicalNested nested) { /** * Valid if sorting is only by fields. + * * @param sort Logical sort * @return True if sorting by fields only */ private boolean sortByFieldsOnly(LogicalSort sort) { return sort.getSortList().stream() - .map(sortItem -> sortItem.getRight() instanceof ReferenceExpression - || isNestedFunction(sortItem.getRight())) + .map( + sortItem -> + sortItem.getRight() instanceof ReferenceExpression + || isNestedFunction(sortItem.getRight())) .reduce(true, Boolean::logicalAnd); } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanQueryBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanQueryBuilder.java index 590272a9f1..f4b0b05256 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanQueryBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanQueryBuilder.java @@ -35,8 +35,8 @@ import org.opensearch.sql.planner.logical.LogicalSort; /** - * Index scan builder for simple non-aggregate query used by - * {@link OpenSearchIndexScanBuilder} internally. + * Index scan builder for simple non-aggregate query used by {@link OpenSearchIndexScanBuilder} + * internally. */ @VisibleForTesting @EqualsAndHashCode @@ -50,13 +50,11 @@ public OpenSearchIndexScanQueryBuilder(OpenSearchRequestBuilder requestBuilder) @Override public boolean pushDownFilter(LogicalFilter filter) { - FilterQueryBuilder queryBuilder = new FilterQueryBuilder( - new DefaultExpressionSerializer()); + FilterQueryBuilder queryBuilder = new FilterQueryBuilder(new DefaultExpressionSerializer()); Expression queryCondition = filter.getCondition(); QueryBuilder query = queryBuilder.build(queryCondition); requestBuilder.pushDownFilter(query); - requestBuilder.pushDownTrackedScore( - trackScoresFromOpenSearchFunction(queryCondition)); + requestBuilder.pushDownTrackedScore(trackScoresFromOpenSearchFunction(queryCondition)); return true; } @@ -64,9 +62,10 @@ public boolean pushDownFilter(LogicalFilter filter) { public boolean pushDownSort(LogicalSort sort) { List> sortList = sort.getSortList(); final SortQueryBuilder builder = new SortQueryBuilder(); - requestBuilder.pushDownSort(sortList.stream() - .map(sortItem -> builder.build(sortItem.getValue(), sortItem.getKey())) - .collect(Collectors.toList())); + requestBuilder.pushDownSort( + sortList.stream() + .map(sortItem -> builder.build(sortItem.getValue(), sortItem.getKey())) + .collect(Collectors.toList())); return true; } @@ -78,8 +77,7 @@ public boolean pushDownLimit(LogicalLimit limit) { @Override public boolean pushDownProject(LogicalProject project) { - requestBuilder.pushDownProjects( - findReferenceExpressions(project.getProjectList())); + requestBuilder.pushDownProjects(findReferenceExpressions(project.getProjectList())); // Return false intentionally to keep the original project operator return false; @@ -105,8 +103,8 @@ private boolean trackScoresFromOpenSearchFunction(Expression condition) { return true; } if (condition instanceof FunctionExpression) { - return ((FunctionExpression) condition).getArguments().stream() - .anyMatch(this::trackScoresFromOpenSearchFunction); + return ((FunctionExpression) condition) + .getArguments().stream().anyMatch(this::trackScoresFromOpenSearchFunction); } return false; } @@ -114,8 +112,7 @@ private boolean trackScoresFromOpenSearchFunction(Expression condition) { @Override public boolean pushDownNested(LogicalNested nested) { requestBuilder.pushDownNested(nested.getFields()); - requestBuilder.pushDownProjects( - findReferenceExpressions(nested.getProjectList())); + requestBuilder.pushDownProjects(findReferenceExpressions(nested.getProjectList())); // Return false intentionally to keep the original nested operator // Since we return false we need to pushDownProject here as it won't be // pushed down due to no matching push down rule. @@ -130,8 +127,8 @@ public OpenSearchRequestBuilder build() { /** * Find reference expression from expression. - * @param expressions a list of expression. * + * @param expressions a list of expression. * @return a set of ReferenceExpression */ public static Set findReferenceExpressions( @@ -145,18 +142,20 @@ public static Set findReferenceExpressions( /** * Find reference expression from expression. - * @param expression expression. * + * @param expression expression. * @return a list of ReferenceExpression */ public static List findReferenceExpression(NamedExpression expression) { List results = new ArrayList<>(); - expression.accept(new ExpressionNodeVisitor<>() { - @Override - public Object visitReference(ReferenceExpression node, Object context) { - return results.add(node); - } - }, null); + expression.accept( + new ExpressionNodeVisitor<>() { + @Override + public Object visitReference(ReferenceExpression node, Object context) { + return results.add(node); + } + }, + null); return results; } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/PushDownQueryBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/PushDownQueryBuilder.java index 274bc4647d..b855b9a8b5 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/PushDownQueryBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/PushDownQueryBuilder.java @@ -14,9 +14,7 @@ import org.opensearch.sql.planner.logical.LogicalProject; import org.opensearch.sql.planner.logical.LogicalSort; -/** - * Translates a logical query plan into OpenSearch DSL and an appropriate request. - */ +/** Translates a logical query plan into OpenSearch DSL and an appropriate request. */ public interface PushDownQueryBuilder { default boolean pushDownFilter(LogicalFilter filter) { return false; diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/ExpressionScriptEngine.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/ExpressionScriptEngine.java index 855aae645d..167bf88f30 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/ExpressionScriptEngine.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/ExpressionScriptEngine.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script; import com.google.common.collect.ImmutableMap; @@ -21,29 +20,23 @@ import org.opensearch.sql.opensearch.storage.serialization.ExpressionSerializer; /** - * Custom expression script engine that supports using core engine expression code in DSL - * as a new script language just like built-in Painless language. + * Custom expression script engine that supports using core engine expression code in DSL as a new + * script language just like built-in Painless language. */ @RequiredArgsConstructor public class ExpressionScriptEngine implements ScriptEngine { - /** - * Expression script language name. - */ + /** Expression script language name. */ public static final String EXPRESSION_LANG_NAME = "opensearch_query_expression"; - /** - * All supported script contexts and function to create factory from expression. - */ + /** All supported script contexts and function to create factory from expression. */ private static final Map, Function> CONTEXTS = new ImmutableMap.Builder, Function>() .put(FilterScript.CONTEXT, ExpressionFilterScriptFactory::new) .put(AggregationScript.CONTEXT, ExpressionAggregationScriptFactory::new) .build(); - /** - * Expression serializer that (de-)serializes expression. - */ + /** Expression serializer that (de-)serializes expression. */ private final ExpressionSerializer serializer; @Override @@ -52,10 +45,8 @@ public String getType() { } @Override - public T compile(String scriptName, - String scriptCode, - ScriptContext context, - Map params) { + public T compile( + String scriptName, String scriptCode, ScriptContext context, Map params) { /* * Note that in fact the expression source is already compiled in query engine. * The "code" is actually a serialized expression tree by our serializer. @@ -66,13 +57,15 @@ public T compile(String scriptName, if (CONTEXTS.containsKey(context)) { return context.factoryClazz.cast(CONTEXTS.get(context).apply(expression)); } - throw new IllegalStateException(String.format("Script context is currently not supported: " - + "all supported contexts [%s], given context [%s] ", CONTEXTS, context)); + throw new IllegalStateException( + String.format( + "Script context is currently not supported: " + + "all supported contexts [%s], given context [%s] ", + CONTEXTS, context)); } @Override public Set> getSupportedContexts() { return CONTEXTS.keySet(); } - } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/StringUtils.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/StringUtils.java index 7b68bd5c92..a485296b52 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/StringUtils.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/StringUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script; import lombok.experimental.UtilityClass; @@ -12,6 +11,7 @@ public class StringUtils { /** * Converts sql wildcard character % and _ to * and ?. + * * @param text string to be converted * @return converted string */ diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilder.java index 8b1cb08cfa..a218151b2e 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.aggregation; import com.google.common.annotations.VisibleForTesting; @@ -39,32 +38,23 @@ import org.opensearch.sql.opensearch.storage.serialization.ExpressionSerializer; /** - * Build the AggregationBuilder from the list of {@link NamedAggregator} - * and list of {@link NamedExpression}. + * Build the AggregationBuilder from the list of {@link NamedAggregator} and list of {@link + * NamedExpression}. */ @RequiredArgsConstructor public class AggregationQueryBuilder extends ExpressionNodeVisitor { - /** - * How many composite buckets should be returned. - */ + /** How many composite buckets should be returned. */ public static final int AGGREGATION_BUCKET_SIZE = 1000; - /** - * Bucket Aggregation builder. - */ + /** Bucket Aggregation builder. */ private final BucketAggregationBuilder bucketBuilder; - /** - * Metric Aggregation builder. - */ + /** Metric Aggregation builder. */ private final MetricAggregationBuilder metricBuilder; - /** - * Aggregation Query Builder Constructor. - */ - public AggregationQueryBuilder( - ExpressionSerializer serializer) { + /** Aggregation Query Builder Constructor. */ + public AggregationQueryBuilder(ExpressionSerializer serializer) { this.bucketBuilder = new BucketAggregationBuilder(serializer); this.metricBuilder = new MetricAggregationBuilder(serializer); } @@ -93,7 +83,10 @@ public AggregationQueryBuilder( bucketBuilder.build( groupByList.stream() .sorted(groupSortOrder) - .map(expr -> Triple.of(expr, + .map( + expr -> + Triple.of( + expr, groupSortOrder.sortOrder(expr), groupSortOrder.missingOrder(expr))) .collect(Collectors.toList()))) @@ -103,72 +96,62 @@ public AggregationQueryBuilder( } } - /** - * Build mapping for OpenSearchExprValueFactory. - */ + /** Build mapping for OpenSearchExprValueFactory. */ public Map buildTypeMapping( - List namedAggregatorList, - List groupByList) { + List namedAggregatorList, List groupByList) { ImmutableMap.Builder builder = new ImmutableMap.Builder<>(); - namedAggregatorList.forEach(agg -> builder.put(agg.getName(), - OpenSearchDataType.of(agg.type()))); - groupByList.forEach(group -> builder.put(group.getNameOrAlias(), - OpenSearchDataType.of(group.type()))); + namedAggregatorList.forEach( + agg -> builder.put(agg.getName(), OpenSearchDataType.of(agg.type()))); + groupByList.forEach( + group -> builder.put(group.getNameOrAlias(), OpenSearchDataType.of(group.type()))); return builder.build(); } - /** - * Group By field sort order. - */ + /** Group By field sort order. */ @VisibleForTesting public static class GroupSortOrder implements Comparator { /** - * The default order of group field. - * The order is ASC NULL_FIRST. - * The field should be the last one in the group list. + * The default order of group field. The order is ASC NULL_FIRST. The field should be the last + * one in the group list. */ private static final Pair DEFAULT_ORDER = Pair.of(Sort.SortOption.DEFAULT_ASC, Integer.MAX_VALUE); - /** - * The mapping between {@link Sort.SortOrder} and {@link SortOrder}. - */ + /** The mapping between {@link Sort.SortOrder} and {@link SortOrder}. */ private static final Map SORT_MAP = new ImmutableMap.Builder() .put(Sort.SortOrder.ASC, SortOrder.ASC) - .put(Sort.SortOrder.DESC, SortOrder.DESC).build(); + .put(Sort.SortOrder.DESC, SortOrder.DESC) + .build(); - /** - * The mapping between {@link Sort.NullOrder} and {@link MissingOrder}. - */ + /** The mapping between {@link Sort.NullOrder} and {@link MissingOrder}. */ private static final Map NULL_MAP = new ImmutableMap.Builder() .put(Sort.NullOrder.NULL_FIRST, MissingOrder.FIRST) - .put(Sort.NullOrder.NULL_LAST, MissingOrder.LAST).build(); + .put(Sort.NullOrder.NULL_LAST, MissingOrder.LAST) + .build(); private final Map> map = new HashMap<>(); - /** - * Constructor of GroupSortOrder. - */ + /** Constructor of GroupSortOrder. */ public GroupSortOrder(List> sortList) { if (null == sortList) { return; } int pos = 0; for (Pair sortPair : sortList) { - map.put(((ReferenceExpression) sortPair.getRight()).getAttr(), + map.put( + ((ReferenceExpression) sortPair.getRight()).getAttr(), Pair.of(sortPair.getLeft(), pos++)); } } /** - * Compare the two expressions. The comparison is based on the pos in the sort list. - * If the expression is defined in the sort list. then the order of the expression is the pos - * in sort list. - * If the expression isn't defined in the sort list. the the order of the expression is the - * Integer.MAX_VALUE. you can think it is at the end of the sort list. + * Compare the two expressions. The comparison is based on the pos in the sort list. If the + * expression is defined in the sort list. then the order of the expression is the pos in sort + * list. If the expression isn't defined in the sort list. the the order of the expression is + * the Integer.MAX_VALUE. you can think it is at the end of the sort list. * * @param o1 NamedExpression * @param o2 NamedExpression @@ -176,24 +159,19 @@ public GroupSortOrder(List> sortList) { */ @Override public int compare(NamedExpression o1, NamedExpression o2) { - final Pair o1Value = - map.getOrDefault(o1.getName(), DEFAULT_ORDER); - final Pair o2Value = - map.getOrDefault(o2.getName(), DEFAULT_ORDER); + final Pair o1Value = map.getOrDefault(o1.getName(), DEFAULT_ORDER); + final Pair o2Value = map.getOrDefault(o2.getName(), DEFAULT_ORDER); return o1Value.getRight().compareTo(o2Value.getRight()); } - /** - * Get the {@link SortOrder} for expression. - * By default, the {@link SortOrder} is ASC. - */ + /** Get the {@link SortOrder} for expression. By default, the {@link SortOrder} is ASC. */ public SortOrder sortOrder(NamedExpression expression) { return SORT_MAP.get(sortOption(expression).getSortOrder()); } /** - * Get the {@link MissingOrder} for expression. - * By default, the {@link MissingOrder} is ASC missing first / DESC missing last. + * Get the {@link MissingOrder} for expression. By default, the {@link MissingOrder} is ASC + * missing first / DESC missing last. */ public MissingOrder missingOrder(NamedExpression expression) { return NULL_MAP.get(sortOption(expression).getNullOrder()); diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScript.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScript.java index 2a371afaa3..7e7b2e959a 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScript.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScript.java @@ -3,14 +3,11 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.aggregation; import static java.time.temporal.ChronoUnit.MILLIS; import java.time.LocalTime; -import java.time.ZoneId; -import java.time.ZonedDateTime; import java.util.Map; import lombok.EqualsAndHashCode; import org.apache.lucene.index.LeafReaderContext; @@ -24,20 +21,14 @@ import org.opensearch.sql.opensearch.data.type.OpenSearchDataType; import org.opensearch.sql.opensearch.storage.script.core.ExpressionScript; -/** - * Aggregation expression script that executed on each document. - */ +/** Aggregation expression script that executed on each document. */ @EqualsAndHashCode(callSuper = false) public class ExpressionAggregationScript extends AggregationScript { - /** - * Expression Script. - */ + /** Expression Script. */ private final ExpressionScript expressionScript; - /** - * Constructor of ExpressionAggregationScript. - */ + /** Constructor of ExpressionAggregationScript. */ public ExpressionAggregationScript( Expression expression, SearchLookup lookup, @@ -53,7 +44,7 @@ public Object execute() { if (expr.type() instanceof OpenSearchDataType) { return expr.value(); } - switch ((ExprCoreType)expr.type()) { + switch ((ExprCoreType) expr.type()) { case TIME: // Can't get timestamp from `ExprTimeValue` return MILLIS.between(LocalTime.MIN, expr.timeValue()); @@ -66,8 +57,8 @@ public Object execute() { } } - private ExprValue evaluateExpression(Expression expression, Environment valueEnv) { + private ExprValue evaluateExpression( + Expression expression, Environment valueEnv) { ExprValue result = expression.valueOf(valueEnv); // The missing value is treated as null value in doc_value, so we can't distinguish with them. diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScriptFactory.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScriptFactory.java index 3138ee90fc..c0b92e5438 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScriptFactory.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScriptFactory.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.aggregation; import java.util.Map; @@ -12,9 +11,7 @@ import org.opensearch.search.lookup.SearchLookup; import org.opensearch.sql.expression.Expression; -/** - * Aggregation Expression script factory that generates leaf factory. - */ +/** Aggregation Expression script factory that generates leaf factory. */ @EqualsAndHashCode public class ExpressionAggregationScriptFactory implements AggregationScript.Factory { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScriptLeafFactory.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScriptLeafFactory.java index 7d22f724e3..13f9c95c8f 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScriptLeafFactory.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScriptLeafFactory.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.aggregation; import java.util.Map; @@ -12,29 +11,19 @@ import org.opensearch.search.lookup.SearchLookup; import org.opensearch.sql.expression.Expression; -/** - * Expression script leaf factory that produces script executor for each leaf. - */ +/** Expression script leaf factory that produces script executor for each leaf. */ public class ExpressionAggregationScriptLeafFactory implements AggregationScript.LeafFactory { - /** - * Expression to execute. - */ + /** Expression to execute. */ private final Expression expression; - /** - * Expression to execute. - */ + /** Expression to execute. */ private final Map params; - /** - * Expression to execute. - */ + /** Expression to execute. */ private final SearchLookup lookup; - /** - * Constructor of ExpressionAggregationScriptLeafFactory. - */ + /** Constructor of ExpressionAggregationScriptLeafFactory. */ public ExpressionAggregationScriptLeafFactory( Expression expression, Map params, SearchLookup lookup) { this.expression = expression; diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/AggregationBuilderHelper.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/AggregationBuilderHelper.java index 156b565976..7dd02d82d0 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/AggregationBuilderHelper.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/AggregationBuilderHelper.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.aggregation.dsl; import static java.util.Collections.emptyMap; @@ -20,9 +19,7 @@ import org.opensearch.sql.opensearch.data.type.OpenSearchTextType; import org.opensearch.sql.opensearch.storage.serialization.ExpressionSerializer; -/** - * Abstract Aggregation Builder. - */ +/** Abstract Aggregation Builder. */ @RequiredArgsConstructor public class AggregationBuilderHelper { @@ -34,20 +31,23 @@ public class AggregationBuilderHelper { * @param expression Expression * @return AggregationBuilder */ - public T build(Expression expression, Function fieldBuilder, - Function scriptBuilder) { + public T build( + Expression expression, Function fieldBuilder, Function scriptBuilder) { if (expression instanceof ReferenceExpression) { String fieldName = ((ReferenceExpression) expression).getAttr(); return fieldBuilder.apply( - OpenSearchTextType.convertTextToKeyword(fieldName, expression.type())); + OpenSearchTextType.convertTextToKeyword(fieldName, expression.type())); } else if (expression instanceof FunctionExpression || expression instanceof LiteralExpression) { - return scriptBuilder.apply(new Script( - DEFAULT_SCRIPT_TYPE, EXPRESSION_LANG_NAME, serializer.serialize(expression), - emptyMap())); + return scriptBuilder.apply( + new Script( + DEFAULT_SCRIPT_TYPE, + EXPRESSION_LANG_NAME, + serializer.serialize(expression), + emptyMap())); } else { - throw new IllegalStateException(String.format("metric aggregation doesn't support " - + "expression %s", expression)); + throw new IllegalStateException( + String.format("metric aggregation doesn't support " + "expression %s", expression)); } } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/BucketAggregationBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/BucketAggregationBuilder.java index 1a6a82be96..4485626742 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/BucketAggregationBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/BucketAggregationBuilder.java @@ -26,29 +26,24 @@ import org.opensearch.sql.expression.span.SpanExpression; import org.opensearch.sql.opensearch.storage.serialization.ExpressionSerializer; -/** - * Bucket Aggregation Builder. - */ +/** Bucket Aggregation Builder. */ public class BucketAggregationBuilder { private final AggregationBuilderHelper helper; - public BucketAggregationBuilder( - ExpressionSerializer serializer) { + public BucketAggregationBuilder(ExpressionSerializer serializer) { this.helper = new AggregationBuilderHelper(serializer); } - /** - * Build the list of CompositeValuesSourceBuilder. - */ + /** Build the list of CompositeValuesSourceBuilder. */ public List> build( List> groupList) { ImmutableList.Builder> resultBuilder = new ImmutableList.Builder<>(); for (Triple groupPair : groupList) { resultBuilder.add( - buildCompositeValuesSourceBuilder(groupPair.getLeft(), - groupPair.getMiddle(), groupPair.getRight())); + buildCompositeValuesSourceBuilder( + groupPair.getLeft(), groupPair.getMiddle(), groupPair.getRight())); } return resultBuilder.build(); } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/MetricAggregationBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/MetricAggregationBuilder.java index 5e7d34abce..c99fbfdc49 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/MetricAggregationBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/MetricAggregationBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.aggregation.dsl; import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; @@ -33,18 +32,14 @@ import org.opensearch.sql.opensearch.storage.script.filter.FilterQueryBuilder; import org.opensearch.sql.opensearch.storage.serialization.ExpressionSerializer; -/** - * Build the Metric Aggregation and List of {@link MetricParser} from {@link NamedAggregator}. - */ +/** Build the Metric Aggregation and List of {@link MetricParser} from {@link NamedAggregator}. */ public class MetricAggregationBuilder extends ExpressionNodeVisitor, Object> { private final AggregationBuilderHelper helper; private final FilterQueryBuilder filterBuilder; - /** - * Constructor. - */ + /** Constructor. */ public MetricAggregationBuilder(ExpressionSerializer serializer) { this.helper = new AggregationBuilderHelper(serializer); this.filterBuilder = new FilterQueryBuilder(serializer); @@ -87,8 +82,9 @@ public Pair visitNamedAggregator( name, new SingleValueParser(name)); default: - throw new IllegalStateException(String.format( - "unsupported distinct aggregator %s", node.getFunctionName().getFunctionName())); + throw new IllegalStateException( + String.format( + "unsupported distinct aggregator %s", node.getFunctionName().getFunctionName())); } } @@ -186,14 +182,13 @@ private Pair make( return Pair.of(aggregationBuilder, parser); } - /** - * Make {@link CardinalityAggregationBuilder} for distinct count aggregations. - */ - private Pair make(CardinalityAggregationBuilder builder, - Expression expression, - Expression condition, - String name, - MetricParser parser) { + /** Make {@link CardinalityAggregationBuilder} for distinct count aggregations. */ + private Pair make( + CardinalityAggregationBuilder builder, + Expression expression, + Expression condition, + String name, + MetricParser parser) { CardinalityAggregationBuilder aggregationBuilder = helper.build(expression, builder::field, builder::script); if (condition != null) { @@ -204,15 +199,14 @@ private Pair make(CardinalityAggregationBuilde return Pair.of(aggregationBuilder, parser); } - /** - * Make {@link TopHitsAggregationBuilder} for take aggregations. - */ - private Pair make(TopHitsAggregationBuilder builder, - Expression expression, - Expression size, - Expression condition, - String name, - MetricParser parser) { + /** Make {@link TopHitsAggregationBuilder} for take aggregations. */ + private Pair make( + TopHitsAggregationBuilder builder, + Expression expression, + Expression size, + Expression condition, + String name, + MetricParser parser) { String fieldName = ((ReferenceExpression) expression).getAttr(); builder.fetchSource(fieldName, null); builder.size(size.valueOf().integerValue()); @@ -245,8 +239,8 @@ private Expression replaceStarOrLiteral(Expression countArg) { * Make builder to build FilterAggregation for aggregations with filter in the bucket. * * @param subAggBuilder AggregationBuilder instance which the filter is applied to. - * @param condition Condition expression in the filter. - * @param name Name of the FilterAggregation instance to build. + * @param condition Condition expression in the filter. + * @param name Name of the FilterAggregation instance to build. * @return {@link FilterAggregationBuilder}. */ private FilterAggregationBuilder makeFilterAggregation( diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/core/ExpressionScript.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/core/ExpressionScript.java index 9bdb15d63a..3a9ff02ba0 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/core/ExpressionScript.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/core/ExpressionScript.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.core; import static java.util.stream.Collectors.toMap; @@ -33,36 +32,27 @@ import org.opensearch.sql.opensearch.data.value.OpenSearchExprValueFactory; /** - * Expression script executor that executes the expression on each document - * and determine if the document is supposed to be filtered out or not. + * Expression script executor that executes the expression on each document and determine if the + * document is supposed to be filtered out or not. */ @EqualsAndHashCode(callSuper = false) public class ExpressionScript { - /** - * Expression to execute. - */ + /** Expression to execute. */ private final Expression expression; - /** - * ElasticsearchExprValueFactory. - */ - @EqualsAndHashCode.Exclude - private final OpenSearchExprValueFactory valueFactory; + /** ElasticsearchExprValueFactory. */ + @EqualsAndHashCode.Exclude private final OpenSearchExprValueFactory valueFactory; - /** - * Reference Fields. - */ - @EqualsAndHashCode.Exclude - private final Set fields; + /** Reference Fields. */ + @EqualsAndHashCode.Exclude private final Set fields; - /** - * Expression constructor. - */ + /** Expression constructor. */ public ExpressionScript(Expression expression) { this.expression = expression; - this.fields = AccessController.doPrivileged((PrivilegedAction>) () -> - extractFields(expression)); + this.fields = + AccessController.doPrivileged( + (PrivilegedAction>) () -> extractFields(expression)); this.valueFactory = AccessController.doPrivileged( (PrivilegedAction) () -> buildValueFactory(fields)); @@ -72,65 +62,67 @@ public ExpressionScript(Expression expression) { * Evaluate on the doc generate by the doc provider. * * @param docProvider doc provider. - * @param evaluator evaluator + * @param evaluator evaluator * @return expr value */ - public ExprValue execute(Supplier>> docProvider, - BiFunction, ExprValue> evaluator) { - return AccessController.doPrivileged((PrivilegedAction) () -> { - Environment valueEnv = - buildValueEnv(fields, valueFactory, docProvider); - ExprValue result = evaluator.apply(expression, valueEnv); - return result; - }); + public ExprValue execute( + Supplier>> docProvider, + BiFunction, ExprValue> evaluator) { + return AccessController.doPrivileged( + (PrivilegedAction) + () -> { + Environment valueEnv = + buildValueEnv(fields, valueFactory, docProvider); + ExprValue result = evaluator.apply(expression, valueEnv); + return result; + }); } private Set extractFields(Expression expr) { Set fields = new HashSet<>(); - expr.accept(new ExpressionNodeVisitor>() { - @Override - public Object visitReference(ReferenceExpression node, Set context) { - context.add(node); - return null; - } - - @Override - public Object visitParse(ParseExpression node, Set context) { - node.getSourceField().accept(this, context); - return null; - } - }, fields); + expr.accept( + new ExpressionNodeVisitor>() { + @Override + public Object visitReference(ReferenceExpression node, Set context) { + context.add(node); + return null; + } + + @Override + public Object visitParse(ParseExpression node, Set context) { + node.getSourceField().accept(this, context); + return null; + } + }, + fields); return fields; } private OpenSearchExprValueFactory buildValueFactory(Set fields) { - Map typeEnv = fields.stream().collect(toMap( - ReferenceExpression::getAttr, e -> OpenSearchDataType.of(e.type()))); + Map typeEnv = + fields.stream() + .collect(toMap(ReferenceExpression::getAttr, e -> OpenSearchDataType.of(e.type()))); return new OpenSearchExprValueFactory(typeEnv); } private Environment buildValueEnv( - Set fields, OpenSearchExprValueFactory valueFactory, + Set fields, + OpenSearchExprValueFactory valueFactory, Supplier>> docProvider) { Map valueEnv = new HashMap<>(); for (ReferenceExpression field : fields) { String fieldName = field.getAttr(); - ExprValue exprValue = valueFactory.construct( - fieldName, - getDocValue(field, docProvider), - false - ); + ExprValue exprValue = + valueFactory.construct(fieldName, getDocValue(field, docProvider), false); valueEnv.put(field, exprValue); } // Encapsulate map data structure into anonymous Environment class return valueEnv::get; } - private Object getDocValue(ReferenceExpression field, - Supplier>> docProvider) { + private Object getDocValue( + ReferenceExpression field, Supplier>> docProvider) { String fieldName = OpenSearchTextType.convertTextToKeyword(field.getAttr(), field.type()); ScriptDocValues docValue = docProvider.get().get(fieldName); if (docValue == null || docValue.isEmpty()) { @@ -145,9 +137,9 @@ private Object getDocValue(ReferenceExpression field, } /** - * DocValue only support long and double so cast to integer and float if needed. - * The doc value must be Long and Double for expr type Long/Integer and Double/Float respectively. - * Otherwise there must be bugs in our engine that causes the mismatch. + * DocValue only support long and double so cast to integer and float if needed. The doc value + * must be Long and Double for expr type Long/Integer and Double/Float respectively. Otherwise + * there must be bugs in our engine that causes the mismatch. */ private Object castNumberToFieldType(Object value, ExprType type) { if (value == null) { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScript.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScript.java index adce89d0df..557cbbe4c9 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScript.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScript.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.filter; import java.util.Map; @@ -19,21 +18,20 @@ import org.opensearch.sql.opensearch.storage.script.core.ExpressionScript; /** - * Expression script executor that executes the expression on each document - * and determine if the document is supposed to be filtered out or not. + * Expression script executor that executes the expression on each document and determine if the + * document is supposed to be filtered out or not. */ @EqualsAndHashCode(callSuper = false) class ExpressionFilterScript extends FilterScript { - /** - * Expression Script. - */ + /** Expression Script. */ private final ExpressionScript expressionScript; - public ExpressionFilterScript(Expression expression, - SearchLookup lookup, - LeafReaderContext context, - Map params) { + public ExpressionFilterScript( + Expression expression, + SearchLookup lookup, + LeafReaderContext context, + Map params) { super(params, lookup, context); this.expressionScript = new ExpressionScript(expression); } @@ -43,19 +41,20 @@ public boolean execute() { return expressionScript.execute(this::getDoc, this::evaluateExpression).booleanValue(); } - private ExprValue evaluateExpression(Expression expression, - Environment valueEnv) { + private ExprValue evaluateExpression( + Expression expression, Environment valueEnv) { ExprValue result = expression.valueOf(valueEnv); if (result.isNull()) { return ExprBooleanValue.of(false); } if (result.type() != ExprCoreType.BOOLEAN) { - throw new IllegalStateException(String.format( - "Expression has wrong result type instead of boolean: " - + "expression [%s], result [%s]", expression, result)); + throw new IllegalStateException( + String.format( + "Expression has wrong result type instead of boolean: " + + "expression [%s], result [%s]", + expression, result)); } return result; } - } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScriptFactory.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScriptFactory.java index e35482d618..5db10733a7 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScriptFactory.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScriptFactory.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.filter; import java.util.Map; @@ -12,15 +11,11 @@ import org.opensearch.search.lookup.SearchLookup; import org.opensearch.sql.expression.Expression; -/** - * Expression script factory that generates leaf factory. - */ +/** Expression script factory that generates leaf factory. */ @EqualsAndHashCode public class ExpressionFilterScriptFactory implements FilterScript.Factory { - /** - * Expression to execute. - */ + /** Expression to execute. */ private final Expression expression; public ExpressionFilterScriptFactory(Expression expression) { @@ -37,5 +32,4 @@ public boolean isResultDeterministic() { public FilterScript.LeafFactory newFactory(Map params, SearchLookup lookup) { return new ExpressionFilterScriptLeafFactory(expression, params, lookup); } - } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScriptLeafFactory.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScriptLeafFactory.java index 22b4be1b69..6c04ca7233 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScriptLeafFactory.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScriptLeafFactory.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.filter; import java.util.Map; @@ -12,29 +11,20 @@ import org.opensearch.search.lookup.SearchLookup; import org.opensearch.sql.expression.Expression; -/** - * Expression script leaf factory that produces script executor for each leaf. - */ +/** Expression script leaf factory that produces script executor for each leaf. */ class ExpressionFilterScriptLeafFactory implements FilterScript.LeafFactory { - /** - * Expression to execute. - */ + /** Expression to execute. */ private final Expression expression; - /** - * Parameters for the expression. - */ + /** Parameters for the expression. */ private final Map params; - /** - * Document lookup that returns doc values. - */ + /** Document lookup that returns doc values. */ private final SearchLookup lookup; - public ExpressionFilterScriptLeafFactory(Expression expression, - Map params, - SearchLookup lookup) { + public ExpressionFilterScriptLeafFactory( + Expression expression, Map params, SearchLookup lookup) { this.expression = expression; this.params = params; this.lookup = lookup; @@ -44,5 +34,4 @@ public ExpressionFilterScriptLeafFactory(Expression expression, public FilterScript newInstance(LeafReaderContext ctx) { return new ExpressionFilterScript(expression, lookup, ctx, params); } - } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/FilterQueryBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/FilterQueryBuilder.java index 51b10d2c41..fa0fe19105 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/FilterQueryBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/FilterQueryBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.filter; import static java.util.Collections.emptyMap; @@ -45,14 +44,10 @@ @RequiredArgsConstructor public class FilterQueryBuilder extends ExpressionNodeVisitor { - /** - * Serializer that serializes expression for build DSL query. - */ + /** Serializer that serializes expression for build DSL query. */ private final ExpressionSerializer serializer; - /** - * Mapping from function name to lucene query builder. - */ + /** Mapping from function name to lucene query builder. */ private final Map luceneQueries = ImmutableMap.builder() .put(BuiltinFunctionName.EQUAL.getName(), new TermQuery()) @@ -82,8 +77,9 @@ public class FilterQueryBuilder extends ExpressionNodeVisitor accumulator) { + private BoolQueryBuilder buildBoolQuery( + FunctionExpression node, + Object context, + BiFunction accumulator) { BoolQueryBuilder boolQuery = QueryBuilders.boolQuery(); for (Expression arg : node.getArguments()) { accumulator.apply(boolQuery, arg.accept(this, context)); @@ -131,8 +129,8 @@ private BoolQueryBuilder buildBoolQuery(FunctionExpression node, } private ScriptQueryBuilder buildScriptQuery(FunctionExpression node) { - return new ScriptQueryBuilder(new Script( - DEFAULT_SCRIPT_TYPE, EXPRESSION_LANG_NAME, serializer.serialize(node), emptyMap())); + return new ScriptQueryBuilder( + new Script( + DEFAULT_SCRIPT_TYPE, EXPRESSION_LANG_NAME, serializer.serialize(node), emptyMap())); } - } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/LikeQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/LikeQuery.java index 699af4f3fd..44c1c30200 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/LikeQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/LikeQuery.java @@ -21,10 +21,9 @@ public QueryBuilder doBuild(String fieldName, ExprType fieldType, ExprValue lite } /** - * Though WildcardQueryBuilder is required, LikeQuery needed its own class as - * it is not a relevance function which wildcard_query is. The arguments in - * LIKE are of type ReferenceExpression while wildcard_query are of type - * NamedArgumentExpression + * Though WildcardQueryBuilder is required, LikeQuery needed its own class as it is not a + * relevance function which wildcard_query is. The arguments in LIKE are of type + * ReferenceExpression while wildcard_query are of type NamedArgumentExpression */ protected WildcardQueryBuilder createBuilder(String field, String query) { String matchText = StringUtils.convertSqlWildcardToLucene(query); diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/LuceneQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/LuceneQuery.java index a45c535383..c8e12876d6 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/LuceneQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/LuceneQuery.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.filter.lucene; import static org.opensearch.sql.analysis.NestedAnalyzer.isNestedFunction; @@ -35,31 +34,28 @@ import org.opensearch.sql.expression.function.BuiltinFunctionName; import org.opensearch.sql.expression.function.FunctionName; -/** - * Lucene query abstraction that builds Lucene query from function expression. - */ +/** Lucene query abstraction that builds Lucene query from function expression. */ public abstract class LuceneQuery { /** - * Check if function expression supported by current Lucene query. - * Default behavior is that report supported if: - * 1. Left is a reference - * 2. Right side is a literal + * Check if function expression supported by current Lucene query. Default behavior is that report + * supported if: 1. Left is a reference 2. Right side is a literal * - * @param func function - * @return return true if supported, otherwise false. + * @param func function + * @return return true if supported, otherwise false. */ public boolean canSupport(FunctionExpression func) { return (func.getArguments().size() == 2) - && (func.getArguments().get(0) instanceof ReferenceExpression) - && (func.getArguments().get(1) instanceof LiteralExpression - || literalExpressionWrappedByCast(func)) + && (func.getArguments().get(0) instanceof ReferenceExpression) + && (func.getArguments().get(1) instanceof LiteralExpression + || literalExpressionWrappedByCast(func)) || isMultiParameterQuery(func); } /** * Check if predicate expression has nested function on left side of predicate expression. * Validation for right side being a `LiteralExpression` is done in NestedQuery. + * * @param func function. * @return return true if function has supported nested function expression. */ @@ -70,8 +66,8 @@ public boolean isNestedPredicate(FunctionExpression func) { /** * Check if the function expression has multiple named argument expressions as the parameters. * - * @param func function - * @return return true if the expression is a multi-parameter function. + * @param func function + * @return return true if the expression is a multi-parameter function. */ private boolean isMultiParameterQuery(FunctionExpression func) { for (Expression expr : func.getArguments()) { @@ -95,139 +91,163 @@ private boolean literalExpressionWrappedByCast(FunctionExpression func) { } /** - * Build Lucene query from function expression. - * The cast function is converted to literal expressions before generating DSL. + * Build Lucene query from function expression. The cast function is converted to literal + * expressions before generating DSL. * - * @param func function - * @return query + * @param func function + * @return query */ public QueryBuilder build(FunctionExpression func) { ReferenceExpression ref = (ReferenceExpression) func.getArguments().get(0); Expression expr = func.getArguments().get(1); - ExprValue literalValue = expr instanceof LiteralExpression ? expr - .valueOf() : cast((FunctionExpression) expr); + ExprValue literalValue = + expr instanceof LiteralExpression ? expr.valueOf() : cast((FunctionExpression) expr); return doBuild(ref.getAttr(), ref.type(), literalValue); } private ExprValue cast(FunctionExpression castFunction) { - return castMap.get(castFunction.getFunctionName()).apply( - (LiteralExpression) castFunction.getArguments().get(0)); + return castMap + .get(castFunction.getFunctionName()) + .apply((LiteralExpression) castFunction.getArguments().get(0)); } - /** - * Type converting map. - */ - private final Map> castMap = ImmutableMap - .>builder() - .put(BuiltinFunctionName.CAST_TO_STRING.getName(), expr -> { - if (!expr.type().equals(ExprCoreType.STRING)) { - return new ExprStringValue(String.valueOf(expr.valueOf().value())); - } else { - return expr.valueOf(); - } - }) - .put(BuiltinFunctionName.CAST_TO_BYTE.getName(), expr -> { - if (ExprCoreType.numberTypes().contains(expr.type())) { - return new ExprByteValue(expr.valueOf().byteValue()); - } else if (expr.type().equals(ExprCoreType.BOOLEAN)) { - return new ExprByteValue(expr.valueOf().booleanValue() ? 1 : 0); - } else { - return new ExprByteValue(Byte.valueOf(expr.valueOf().stringValue())); - } - }) - .put(BuiltinFunctionName.CAST_TO_SHORT.getName(), expr -> { - if (ExprCoreType.numberTypes().contains(expr.type())) { - return new ExprShortValue(expr.valueOf().shortValue()); - } else if (expr.type().equals(ExprCoreType.BOOLEAN)) { - return new ExprShortValue(expr.valueOf().booleanValue() ? 1 : 0); - } else { - return new ExprShortValue(Short.valueOf(expr.valueOf().stringValue())); - } - }) - .put(BuiltinFunctionName.CAST_TO_INT.getName(), expr -> { - if (ExprCoreType.numberTypes().contains(expr.type())) { - return new ExprIntegerValue(expr.valueOf().integerValue()); - } else if (expr.type().equals(ExprCoreType.BOOLEAN)) { - return new ExprIntegerValue(expr.valueOf().booleanValue() ? 1 : 0); - } else { - return new ExprIntegerValue(Integer.valueOf(expr.valueOf().stringValue())); - } - }) - .put(BuiltinFunctionName.CAST_TO_LONG.getName(), expr -> { - if (ExprCoreType.numberTypes().contains(expr.type())) { - return new ExprLongValue(expr.valueOf().longValue()); - } else if (expr.type().equals(ExprCoreType.BOOLEAN)) { - return new ExprLongValue(expr.valueOf().booleanValue() ? 1 : 0); - } else { - return new ExprLongValue(Long.valueOf(expr.valueOf().stringValue())); - } - }) - .put(BuiltinFunctionName.CAST_TO_FLOAT.getName(), expr -> { - if (ExprCoreType.numberTypes().contains(expr.type())) { - return new ExprFloatValue(expr.valueOf().floatValue()); - } else if (expr.type().equals(ExprCoreType.BOOLEAN)) { - return new ExprFloatValue(expr.valueOf().booleanValue() ? 1 : 0); - } else { - return new ExprFloatValue(Float.valueOf(expr.valueOf().stringValue())); - } - }) - .put(BuiltinFunctionName.CAST_TO_DOUBLE.getName(), expr -> { - if (ExprCoreType.numberTypes().contains(expr.type())) { - return new ExprDoubleValue(expr.valueOf().doubleValue()); - } else if (expr.type().equals(ExprCoreType.BOOLEAN)) { - return new ExprDoubleValue(expr.valueOf().booleanValue() ? 1 : 0); - } else { - return new ExprDoubleValue(Double.valueOf(expr.valueOf().stringValue())); - } - }) - .put(BuiltinFunctionName.CAST_TO_BOOLEAN.getName(), expr -> { - if (ExprCoreType.numberTypes().contains(expr.type())) { - return expr.valueOf().doubleValue() != 0 - ? ExprBooleanValue.of(true) : ExprBooleanValue.of(false); - } else if (expr.type().equals(ExprCoreType.STRING)) { - return ExprBooleanValue.of(Boolean.valueOf(expr.valueOf().stringValue())); - } else { - return expr.valueOf(); - } - }) - .put(BuiltinFunctionName.CAST_TO_DATE.getName(), expr -> { - if (expr.type().equals(ExprCoreType.STRING)) { - return new ExprDateValue(expr.valueOf().stringValue()); - } else { - return new ExprDateValue(expr.valueOf().dateValue()); - } - }) - .put(BuiltinFunctionName.CAST_TO_TIME.getName(), expr -> { - if (expr.type().equals(ExprCoreType.STRING)) { - return new ExprTimeValue(expr.valueOf().stringValue()); - } else { - return new ExprTimeValue(expr.valueOf().timeValue()); - } - }) - .put(BuiltinFunctionName.CAST_TO_DATETIME.getName(), expr -> { - if (expr.type().equals(ExprCoreType.STRING)) { - return new ExprDatetimeValue(expr.valueOf().stringValue()); - } else { - return new ExprDatetimeValue(expr.valueOf().datetimeValue()); - } - }) - .put(BuiltinFunctionName.CAST_TO_TIMESTAMP.getName(), expr -> { - if (expr.type().equals(ExprCoreType.STRING)) { - return new ExprTimestampValue(expr.valueOf().stringValue()); - } else { - return new ExprTimestampValue(expr.valueOf().timestampValue()); - } - }) - .build(); + /** Type converting map. */ + private final Map> castMap = + ImmutableMap.>builder() + .put( + BuiltinFunctionName.CAST_TO_STRING.getName(), + expr -> { + if (!expr.type().equals(ExprCoreType.STRING)) { + return new ExprStringValue(String.valueOf(expr.valueOf().value())); + } else { + return expr.valueOf(); + } + }) + .put( + BuiltinFunctionName.CAST_TO_BYTE.getName(), + expr -> { + if (ExprCoreType.numberTypes().contains(expr.type())) { + return new ExprByteValue(expr.valueOf().byteValue()); + } else if (expr.type().equals(ExprCoreType.BOOLEAN)) { + return new ExprByteValue(expr.valueOf().booleanValue() ? 1 : 0); + } else { + return new ExprByteValue(Byte.valueOf(expr.valueOf().stringValue())); + } + }) + .put( + BuiltinFunctionName.CAST_TO_SHORT.getName(), + expr -> { + if (ExprCoreType.numberTypes().contains(expr.type())) { + return new ExprShortValue(expr.valueOf().shortValue()); + } else if (expr.type().equals(ExprCoreType.BOOLEAN)) { + return new ExprShortValue(expr.valueOf().booleanValue() ? 1 : 0); + } else { + return new ExprShortValue(Short.valueOf(expr.valueOf().stringValue())); + } + }) + .put( + BuiltinFunctionName.CAST_TO_INT.getName(), + expr -> { + if (ExprCoreType.numberTypes().contains(expr.type())) { + return new ExprIntegerValue(expr.valueOf().integerValue()); + } else if (expr.type().equals(ExprCoreType.BOOLEAN)) { + return new ExprIntegerValue(expr.valueOf().booleanValue() ? 1 : 0); + } else { + return new ExprIntegerValue(Integer.valueOf(expr.valueOf().stringValue())); + } + }) + .put( + BuiltinFunctionName.CAST_TO_LONG.getName(), + expr -> { + if (ExprCoreType.numberTypes().contains(expr.type())) { + return new ExprLongValue(expr.valueOf().longValue()); + } else if (expr.type().equals(ExprCoreType.BOOLEAN)) { + return new ExprLongValue(expr.valueOf().booleanValue() ? 1 : 0); + } else { + return new ExprLongValue(Long.valueOf(expr.valueOf().stringValue())); + } + }) + .put( + BuiltinFunctionName.CAST_TO_FLOAT.getName(), + expr -> { + if (ExprCoreType.numberTypes().contains(expr.type())) { + return new ExprFloatValue(expr.valueOf().floatValue()); + } else if (expr.type().equals(ExprCoreType.BOOLEAN)) { + return new ExprFloatValue(expr.valueOf().booleanValue() ? 1 : 0); + } else { + return new ExprFloatValue(Float.valueOf(expr.valueOf().stringValue())); + } + }) + .put( + BuiltinFunctionName.CAST_TO_DOUBLE.getName(), + expr -> { + if (ExprCoreType.numberTypes().contains(expr.type())) { + return new ExprDoubleValue(expr.valueOf().doubleValue()); + } else if (expr.type().equals(ExprCoreType.BOOLEAN)) { + return new ExprDoubleValue(expr.valueOf().booleanValue() ? 1 : 0); + } else { + return new ExprDoubleValue(Double.valueOf(expr.valueOf().stringValue())); + } + }) + .put( + BuiltinFunctionName.CAST_TO_BOOLEAN.getName(), + expr -> { + if (ExprCoreType.numberTypes().contains(expr.type())) { + return expr.valueOf().doubleValue() != 0 + ? ExprBooleanValue.of(true) + : ExprBooleanValue.of(false); + } else if (expr.type().equals(ExprCoreType.STRING)) { + return ExprBooleanValue.of(Boolean.valueOf(expr.valueOf().stringValue())); + } else { + return expr.valueOf(); + } + }) + .put( + BuiltinFunctionName.CAST_TO_DATE.getName(), + expr -> { + if (expr.type().equals(ExprCoreType.STRING)) { + return new ExprDateValue(expr.valueOf().stringValue()); + } else { + return new ExprDateValue(expr.valueOf().dateValue()); + } + }) + .put( + BuiltinFunctionName.CAST_TO_TIME.getName(), + expr -> { + if (expr.type().equals(ExprCoreType.STRING)) { + return new ExprTimeValue(expr.valueOf().stringValue()); + } else { + return new ExprTimeValue(expr.valueOf().timeValue()); + } + }) + .put( + BuiltinFunctionName.CAST_TO_DATETIME.getName(), + expr -> { + if (expr.type().equals(ExprCoreType.STRING)) { + return new ExprDatetimeValue(expr.valueOf().stringValue()); + } else { + return new ExprDatetimeValue(expr.valueOf().datetimeValue()); + } + }) + .put( + BuiltinFunctionName.CAST_TO_TIMESTAMP.getName(), + expr -> { + if (expr.type().equals(ExprCoreType.STRING)) { + return new ExprTimestampValue(expr.valueOf().stringValue()); + } else { + return new ExprTimestampValue(expr.valueOf().timestampValue()); + } + }) + .build(); /** - * Build method that subclass implements by default which is to build query - * from reference and literal in function arguments. + * Build method that subclass implements by default which is to build query from reference and + * literal in function arguments. * - * @param fieldName field name - * @param fieldType field type - * @param literal field value literal - * @return query + * @param fieldName field name + * @param fieldType field type + * @param literal field value literal + * @return query */ protected QueryBuilder doBuild(String fieldName, ExprType fieldType, ExprValue literal) { throw new UnsupportedOperationException( diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/NestedQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/NestedQuery.java index 358637791c..f098d5df5a 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/NestedQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/NestedQuery.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.filter.lucene; import org.apache.lucene.search.join.ScoreMode; @@ -15,21 +14,20 @@ import org.opensearch.sql.expression.LiteralExpression; import org.opensearch.sql.expression.ReferenceExpression; -/** - * Lucene query that build nested query. - */ +/** Lucene query that build nested query. */ public class NestedQuery extends LuceneQuery { /** * Build query for 'nested' function used in predicate expression. Supports 'nested' function on * left and literal on right. + * * @param func Function expression. * @param innerQuery Comparison query to be place inside nested query. * @return Nested query. */ public QueryBuilder buildNested(FunctionExpression func, LuceneQuery innerQuery) { // Generate inner query for placement inside nested query - FunctionExpression nestedFunc = (FunctionExpression)func.getArguments().get(0); + FunctionExpression nestedFunc = (FunctionExpression) func.getArguments().get(0); validateArgs(nestedFunc, func.getArguments().get(1)); ExprValue literalValue = func.getArguments().get(1).valueOf(); ReferenceExpression ref = (ReferenceExpression) nestedFunc.getArguments().get(0); @@ -38,14 +36,17 @@ public QueryBuilder buildNested(FunctionExpression func, LuceneQuery innerQuery) // Generate nested query boolean hasPathParam = nestedFunc.getArguments().size() == 2; - String pathStr = hasPathParam ? nestedFunc.getArguments().get(1).toString() : - getNestedPathString((ReferenceExpression) nestedFunc.getArguments().get(0)); + String pathStr = + hasPathParam + ? nestedFunc.getArguments().get(1).toString() + : getNestedPathString((ReferenceExpression) nestedFunc.getArguments().get(0)); return QueryBuilders.nestedQuery(pathStr, innerQueryResult, ScoreMode.None); } /** - * Dynamically generate path for nested field. An example field of 'office.section.cubicle' - * would dynamically generate the path 'office.section'. + * Dynamically generate path for nested field. An example field of 'office.section.cubicle' would + * dynamically generate the path 'office.section'. + * * @param field nested field to generate path for. * @return path for nested field. */ @@ -59,31 +60,27 @@ private String getNestedPathString(ReferenceExpression field) { /** * Validate arguments in nested function and predicate expression. + * * @param nestedFunc Nested function expression. */ private void validateArgs(FunctionExpression nestedFunc, Expression rightExpression) { if (nestedFunc.getArguments().size() > 2) { throw new IllegalArgumentException( - "nested function supports 2 parameters (field, path) or 1 parameter (field)" - ); + "nested function supports 2 parameters (field, path) or 1 parameter (field)"); } for (var arg : nestedFunc.getArguments()) { if (!(arg instanceof ReferenceExpression)) { throw new IllegalArgumentException( - String.format("Illegal nested field name: %s", - arg.toString() - ) - ); + String.format("Illegal nested field name: %s", arg.toString())); } } if (!(rightExpression instanceof LiteralExpression)) { throw new IllegalArgumentException( - String.format("Illegal argument on right side of predicate expression: %s", - rightExpression.toString() - ) - ); + String.format( + "Illegal argument on right side of predicate expression: %s", + rightExpression.toString())); } } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/RangeQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/RangeQuery.java index 7e13cad592..2e33e3cc7c 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/RangeQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/RangeQuery.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.filter.lucene; import lombok.RequiredArgsConstructor; @@ -14,19 +13,19 @@ import org.opensearch.sql.data.type.ExprCoreType; import org.opensearch.sql.data.type.ExprType; -/** - * Lucene query that builds range query for non-quality comparison. - */ +/** Lucene query that builds range query for non-quality comparison. */ @RequiredArgsConstructor public class RangeQuery extends LuceneQuery { public enum Comparison { - LT, GT, LTE, GTE, BETWEEN + LT, + GT, + LTE, + GTE, + BETWEEN } - /** - * Comparison that range query build for. - */ + /** Comparison that range query build for. */ private final Comparison comparison; @Override @@ -55,5 +54,4 @@ private Object value(ExprValue literal) { return literal.value(); } } - } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/TermQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/TermQuery.java index c98de1cd84..cd506898d7 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/TermQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/TermQuery.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.filter.lucene; import org.opensearch.index.query.QueryBuilder; @@ -13,9 +12,7 @@ import org.opensearch.sql.data.type.ExprType; import org.opensearch.sql.opensearch.data.type.OpenSearchTextType; -/** - * Lucene query that build term query for equality comparison. - */ +/** Lucene query that build term query for equality comparison. */ public class TermQuery extends LuceneQuery { @Override diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/FunctionParameterRepository.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/FunctionParameterRepository.java index 1adddff95d..f86fc08f04 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/FunctionParameterRepository.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/FunctionParameterRepository.java @@ -31,192 +31,254 @@ public class FunctionParameterRepository { public static final Map> - MatchBoolPrefixQueryBuildActions = ImmutableMap.>builder() - .put("analyzer", (b, v) -> b.analyzer(v.stringValue())) - .put("boost", (b, v) -> b.boost(convertFloatValue(v, "boost"))) - .put("fuzziness", (b, v) -> b.fuzziness(convertFuzziness(v))) - .put("fuzzy_rewrite", (b, v) -> b.fuzzyRewrite(checkRewrite(v, "fuzzy_rewrite"))) - .put("fuzzy_transpositions", (b, v) -> b.fuzzyTranspositions( - convertBoolValue(v, "fuzzy_transpositions"))) - .put("max_expansions", (b, v) -> b.maxExpansions(convertIntValue(v, "max_expansions"))) - .put("minimum_should_match", (b, v) -> b.minimumShouldMatch(v.stringValue())) - .put("operator", (b, v) -> b.operator(convertOperator(v, "operator"))) - .put("prefix_length", (b, v) -> b.prefixLength(convertIntValue(v, "prefix_length"))) - .build(); + MatchBoolPrefixQueryBuildActions = + ImmutableMap + .>builder() + .put("analyzer", (b, v) -> b.analyzer(v.stringValue())) + .put("boost", (b, v) -> b.boost(convertFloatValue(v, "boost"))) + .put("fuzziness", (b, v) -> b.fuzziness(convertFuzziness(v))) + .put("fuzzy_rewrite", (b, v) -> b.fuzzyRewrite(checkRewrite(v, "fuzzy_rewrite"))) + .put( + "fuzzy_transpositions", + (b, v) -> b.fuzzyTranspositions(convertBoolValue(v, "fuzzy_transpositions"))) + .put( + "max_expansions", (b, v) -> b.maxExpansions(convertIntValue(v, "max_expansions"))) + .put("minimum_should_match", (b, v) -> b.minimumShouldMatch(v.stringValue())) + .put("operator", (b, v) -> b.operator(convertOperator(v, "operator"))) + .put("prefix_length", (b, v) -> b.prefixLength(convertIntValue(v, "prefix_length"))) + .build(); public static final Map> - MatchPhrasePrefixQueryBuildActions = ImmutableMap.>builder() - .put("analyzer", (b, v) -> b.analyzer(v.stringValue())) - .put("boost", (b, v) -> b.boost(convertFloatValue(v, "boost"))) - .put("max_expansions", (b, v) -> b.maxExpansions(convertIntValue(v, "max_expansions"))) - .put("slop", (b, v) -> b.slop(convertIntValue(v, "slop"))) - .put("zero_terms_query", (b, v) -> b.zeroTermsQuery(convertZeroTermsQuery(v))) - .build(); + MatchPhrasePrefixQueryBuildActions = + ImmutableMap + .>builder() + .put("analyzer", (b, v) -> b.analyzer(v.stringValue())) + .put("boost", (b, v) -> b.boost(convertFloatValue(v, "boost"))) + .put( + "max_expansions", (b, v) -> b.maxExpansions(convertIntValue(v, "max_expansions"))) + .put("slop", (b, v) -> b.slop(convertIntValue(v, "slop"))) + .put("zero_terms_query", (b, v) -> b.zeroTermsQuery(convertZeroTermsQuery(v))) + .build(); public static final Map> - MatchPhraseQueryBuildActions = ImmutableMap.>builder() - .put("analyzer", (b, v) -> b.analyzer(v.stringValue())) - .put("boost", (b, v) -> b.boost(convertFloatValue(v, "boost"))) - .put("slop", (b, v) -> b.slop(convertIntValue(v, "slop"))) - .put("zero_terms_query", (b, v) -> b.zeroTermsQuery(convertZeroTermsQuery(v))) - .build(); + MatchPhraseQueryBuildActions = + ImmutableMap.>builder() + .put("analyzer", (b, v) -> b.analyzer(v.stringValue())) + .put("boost", (b, v) -> b.boost(convertFloatValue(v, "boost"))) + .put("slop", (b, v) -> b.slop(convertIntValue(v, "slop"))) + .put("zero_terms_query", (b, v) -> b.zeroTermsQuery(convertZeroTermsQuery(v))) + .build(); public static final Map> - MatchQueryBuildActions = ImmutableMap.>builder() - .put("analyzer", (b, v) -> b.analyzer(v.stringValue())) - .put("auto_generate_synonyms_phrase_query", (b, v) -> b.autoGenerateSynonymsPhraseQuery( - convertBoolValue(v, "auto_generate_synonyms_phrase_query"))) - .put("boost", (b, v) -> b.boost(convertFloatValue(v, "boost"))) - .put("fuzziness", (b, v) -> b.fuzziness(convertFuzziness(v))) - .put("fuzzy_rewrite", (b, v) -> b.fuzzyRewrite(checkRewrite(v, "fuzzy_rewrite"))) - .put("fuzzy_transpositions", (b, v) -> b.fuzzyTranspositions( - convertBoolValue(v, "fuzzy_transpositions"))) - .put("lenient", (b, v) -> b.lenient(convertBoolValue(v, "lenient"))) - .put("minimum_should_match", (b, v) -> b.minimumShouldMatch(v.stringValue())) - .put("max_expansions", (b, v) -> b.maxExpansions(convertIntValue(v, "max_expansions"))) - .put("operator", (b, v) -> b.operator(convertOperator(v, "operator"))) - .put("prefix_length", (b, v) -> b.prefixLength(convertIntValue(v, "prefix_length"))) - .put("zero_terms_query", (b, v) -> b.zeroTermsQuery(convertZeroTermsQuery(v))) - .build(); + MatchQueryBuildActions = + ImmutableMap.>builder() + .put("analyzer", (b, v) -> b.analyzer(v.stringValue())) + .put( + "auto_generate_synonyms_phrase_query", + (b, v) -> + b.autoGenerateSynonymsPhraseQuery( + convertBoolValue(v, "auto_generate_synonyms_phrase_query"))) + .put("boost", (b, v) -> b.boost(convertFloatValue(v, "boost"))) + .put("fuzziness", (b, v) -> b.fuzziness(convertFuzziness(v))) + .put("fuzzy_rewrite", (b, v) -> b.fuzzyRewrite(checkRewrite(v, "fuzzy_rewrite"))) + .put( + "fuzzy_transpositions", + (b, v) -> b.fuzzyTranspositions(convertBoolValue(v, "fuzzy_transpositions"))) + .put("lenient", (b, v) -> b.lenient(convertBoolValue(v, "lenient"))) + .put("minimum_should_match", (b, v) -> b.minimumShouldMatch(v.stringValue())) + .put( + "max_expansions", (b, v) -> b.maxExpansions(convertIntValue(v, "max_expansions"))) + .put("operator", (b, v) -> b.operator(convertOperator(v, "operator"))) + .put("prefix_length", (b, v) -> b.prefixLength(convertIntValue(v, "prefix_length"))) + .put("zero_terms_query", (b, v) -> b.zeroTermsQuery(convertZeroTermsQuery(v))) + .build(); @SuppressWarnings("deprecation") // cutoffFrequency is deprecated public static final Map> - MultiMatchQueryBuildActions = ImmutableMap.>builder() - .put("analyzer", (b, v) -> b.analyzer(v.stringValue())) - .put("auto_generate_synonyms_phrase_query", (b, v) -> b.autoGenerateSynonymsPhraseQuery( - convertBoolValue(v, "auto_generate_synonyms_phrase_query"))) - .put("boost", (b, v) -> b.boost(convertFloatValue(v, "boost"))) - .put("cutoff_frequency", (b, v) -> b.cutoffFrequency( - convertFloatValue(v, "cutoff_frequency"))) - .put("fuzziness", (b, v) -> b.fuzziness(convertFuzziness(v))) - .put("fuzzy_transpositions", (b, v) -> b.fuzzyTranspositions( - convertBoolValue(v, "fuzzy_transpositions"))) - .put("lenient", (b, v) -> b.lenient(convertBoolValue(v, "lenient"))) - .put("max_expansions", (b, v) -> b.maxExpansions(convertIntValue(v, "max_expansions"))) - .put("minimum_should_match", (b, v) -> b.minimumShouldMatch(v.stringValue())) - .put("operator", (b, v) -> b.operator(convertOperator(v, "operator"))) - .put("prefix_length", (b, v) -> b.prefixLength(convertIntValue(v, "prefix_length"))) - .put("slop", (b, v) -> b.slop(convertIntValue(v, "slop"))) - .put("tie_breaker", (b, v) -> b.tieBreaker(convertFloatValue(v, "tie_breaker"))) - .put("type", (b, v) -> b.type(convertType(v))) - .put("zero_terms_query", (b, v) -> b.zeroTermsQuery(convertZeroTermsQuery(v))) - .build(); + MultiMatchQueryBuildActions = + ImmutableMap.>builder() + .put("analyzer", (b, v) -> b.analyzer(v.stringValue())) + .put( + "auto_generate_synonyms_phrase_query", + (b, v) -> + b.autoGenerateSynonymsPhraseQuery( + convertBoolValue(v, "auto_generate_synonyms_phrase_query"))) + .put("boost", (b, v) -> b.boost(convertFloatValue(v, "boost"))) + .put( + "cutoff_frequency", + (b, v) -> b.cutoffFrequency(convertFloatValue(v, "cutoff_frequency"))) + .put("fuzziness", (b, v) -> b.fuzziness(convertFuzziness(v))) + .put( + "fuzzy_transpositions", + (b, v) -> b.fuzzyTranspositions(convertBoolValue(v, "fuzzy_transpositions"))) + .put("lenient", (b, v) -> b.lenient(convertBoolValue(v, "lenient"))) + .put( + "max_expansions", (b, v) -> b.maxExpansions(convertIntValue(v, "max_expansions"))) + .put("minimum_should_match", (b, v) -> b.minimumShouldMatch(v.stringValue())) + .put("operator", (b, v) -> b.operator(convertOperator(v, "operator"))) + .put("prefix_length", (b, v) -> b.prefixLength(convertIntValue(v, "prefix_length"))) + .put("slop", (b, v) -> b.slop(convertIntValue(v, "slop"))) + .put("tie_breaker", (b, v) -> b.tieBreaker(convertFloatValue(v, "tie_breaker"))) + .put("type", (b, v) -> b.type(convertType(v))) + .put("zero_terms_query", (b, v) -> b.zeroTermsQuery(convertZeroTermsQuery(v))) + .build(); public static final Map> - QueryStringQueryBuildActions = ImmutableMap.>builder() - .put("allow_leading_wildcard", (b, v) -> b.allowLeadingWildcard( - convertBoolValue(v, "allow_leading_wildcard"))) - .put("analyzer", (b, v) -> b.analyzer(v.stringValue())) - .put("analyze_wildcard", (b, v) -> b.analyzeWildcard( - convertBoolValue(v, "analyze_wildcard"))) - .put("auto_generate_synonyms_phrase_query", (b, v) -> b.autoGenerateSynonymsPhraseQuery( - convertBoolValue(v, "auto_generate_synonyms_phrase_query"))) - .put("boost", (b, v) -> b.boost(convertFloatValue(v, "boost"))) - .put("default_operator", (b, v) -> b.defaultOperator( - convertOperator(v, "default_operator"))) - .put("enable_position_increments", (b, v) -> b.enablePositionIncrements( - convertBoolValue(v, "enable_position_increments"))) - .put("escape", (b, v) -> b.escape(convertBoolValue(v, "escape"))) - .put("fuzziness", (b, v) -> b.fuzziness(convertFuzziness(v))) - .put("fuzzy_max_expansions", (b, v) -> b.fuzzyMaxExpansions( - convertIntValue(v, "fuzzy_max_expansions"))) - .put("fuzzy_prefix_length", (b, v) -> b.fuzzyPrefixLength( - convertIntValue(v, "fuzzy_prefix_length"))) - .put("fuzzy_rewrite", (b, v) -> b.fuzzyRewrite(checkRewrite(v, "fuzzy_rewrite"))) - .put("fuzzy_transpositions", (b, v) -> b.fuzzyTranspositions( - convertBoolValue(v, "fuzzy_transpositions"))) - .put("lenient", (b, v) -> b.lenient(convertBoolValue(v, "lenient"))) - .put("max_determinized_states", (b, v) -> b.maxDeterminizedStates( - convertIntValue(v, "max_determinized_states"))) - .put("minimum_should_match", (b, v) -> b.minimumShouldMatch(v.stringValue())) - .put("phrase_slop", (b, v) -> b.phraseSlop(convertIntValue(v, "phrase_slop"))) - .put("quote_analyzer", (b, v) -> b.quoteAnalyzer(v.stringValue())) - .put("quote_field_suffix", (b, v) -> b.quoteFieldSuffix(v.stringValue())) - .put("rewrite", (b, v) -> b.rewrite(checkRewrite(v, "rewrite"))) - .put("tie_breaker", (b, v) -> b.tieBreaker(convertFloatValue(v, "tie_breaker"))) - .put("time_zone", (b, v) -> b.timeZone(checkTimeZone(v))) - .put("type", (b, v) -> b.type(convertType(v))) - .build(); + QueryStringQueryBuildActions = + ImmutableMap.>builder() + .put( + "allow_leading_wildcard", + (b, v) -> b.allowLeadingWildcard(convertBoolValue(v, "allow_leading_wildcard"))) + .put("analyzer", (b, v) -> b.analyzer(v.stringValue())) + .put( + "analyze_wildcard", + (b, v) -> b.analyzeWildcard(convertBoolValue(v, "analyze_wildcard"))) + .put( + "auto_generate_synonyms_phrase_query", + (b, v) -> + b.autoGenerateSynonymsPhraseQuery( + convertBoolValue(v, "auto_generate_synonyms_phrase_query"))) + .put("boost", (b, v) -> b.boost(convertFloatValue(v, "boost"))) + .put( + "default_operator", + (b, v) -> b.defaultOperator(convertOperator(v, "default_operator"))) + .put( + "enable_position_increments", + (b, v) -> + b.enablePositionIncrements(convertBoolValue(v, "enable_position_increments"))) + .put("escape", (b, v) -> b.escape(convertBoolValue(v, "escape"))) + .put("fuzziness", (b, v) -> b.fuzziness(convertFuzziness(v))) + .put( + "fuzzy_max_expansions", + (b, v) -> b.fuzzyMaxExpansions(convertIntValue(v, "fuzzy_max_expansions"))) + .put( + "fuzzy_prefix_length", + (b, v) -> b.fuzzyPrefixLength(convertIntValue(v, "fuzzy_prefix_length"))) + .put("fuzzy_rewrite", (b, v) -> b.fuzzyRewrite(checkRewrite(v, "fuzzy_rewrite"))) + .put( + "fuzzy_transpositions", + (b, v) -> b.fuzzyTranspositions(convertBoolValue(v, "fuzzy_transpositions"))) + .put("lenient", (b, v) -> b.lenient(convertBoolValue(v, "lenient"))) + .put( + "max_determinized_states", + (b, v) -> b.maxDeterminizedStates(convertIntValue(v, "max_determinized_states"))) + .put("minimum_should_match", (b, v) -> b.minimumShouldMatch(v.stringValue())) + .put("phrase_slop", (b, v) -> b.phraseSlop(convertIntValue(v, "phrase_slop"))) + .put("quote_analyzer", (b, v) -> b.quoteAnalyzer(v.stringValue())) + .put("quote_field_suffix", (b, v) -> b.quoteFieldSuffix(v.stringValue())) + .put("rewrite", (b, v) -> b.rewrite(checkRewrite(v, "rewrite"))) + .put("tie_breaker", (b, v) -> b.tieBreaker(convertFloatValue(v, "tie_breaker"))) + .put("time_zone", (b, v) -> b.timeZone(checkTimeZone(v))) + .put("type", (b, v) -> b.type(convertType(v))) + .build(); public static final Map> - SimpleQueryStringQueryBuildActions = ImmutableMap.>builder() - .put("analyzer", (b, v) -> b.analyzer(v.stringValue())) - .put("analyze_wildcard", (b, v) -> b.analyzeWildcard( - convertBoolValue(v, "analyze_wildcard"))) - .put("auto_generate_synonyms_phrase_query", (b, v) -> b.autoGenerateSynonymsPhraseQuery( - convertBoolValue(v, "auto_generate_synonyms_phrase_query"))) - .put("boost", (b, v) -> b.boost(convertFloatValue(v, "boost"))) - .put("default_operator", (b, v) -> b.defaultOperator( - convertOperator(v, "default_operator"))) - .put("flags", (b, v) -> b.flags(convertFlags(v))) - .put("fuzzy_max_expansions", (b, v) -> b.fuzzyMaxExpansions( - convertIntValue(v, "fuzzy_max_expansions"))) - .put("fuzzy_prefix_length", (b, v) -> b.fuzzyPrefixLength( - convertIntValue(v, "fuzzy_prefix_length"))) - .put("fuzzy_transpositions", (b, v) -> b.fuzzyTranspositions( - convertBoolValue(v, "fuzzy_transpositions"))) - .put("lenient", (b, v) -> b.lenient(convertBoolValue(v, "lenient"))) - .put("minimum_should_match", (b, v) -> b.minimumShouldMatch(v.stringValue())) - .put("quote_field_suffix", (b, v) -> b.quoteFieldSuffix(v.stringValue())) - .build(); + SimpleQueryStringQueryBuildActions = + ImmutableMap.>builder() + .put("analyzer", (b, v) -> b.analyzer(v.stringValue())) + .put( + "analyze_wildcard", + (b, v) -> b.analyzeWildcard(convertBoolValue(v, "analyze_wildcard"))) + .put( + "auto_generate_synonyms_phrase_query", + (b, v) -> + b.autoGenerateSynonymsPhraseQuery( + convertBoolValue(v, "auto_generate_synonyms_phrase_query"))) + .put("boost", (b, v) -> b.boost(convertFloatValue(v, "boost"))) + .put( + "default_operator", + (b, v) -> b.defaultOperator(convertOperator(v, "default_operator"))) + .put("flags", (b, v) -> b.flags(convertFlags(v))) + .put( + "fuzzy_max_expansions", + (b, v) -> b.fuzzyMaxExpansions(convertIntValue(v, "fuzzy_max_expansions"))) + .put( + "fuzzy_prefix_length", + (b, v) -> b.fuzzyPrefixLength(convertIntValue(v, "fuzzy_prefix_length"))) + .put( + "fuzzy_transpositions", + (b, v) -> b.fuzzyTranspositions(convertBoolValue(v, "fuzzy_transpositions"))) + .put("lenient", (b, v) -> b.lenient(convertBoolValue(v, "lenient"))) + .put("minimum_should_match", (b, v) -> b.minimumShouldMatch(v.stringValue())) + .put("quote_field_suffix", (b, v) -> b.quoteFieldSuffix(v.stringValue())) + .build(); public static final Map> - WildcardQueryBuildActions = ImmutableMap.>builder() - .put("boost", (b, v) -> b.boost(convertFloatValue(v, "boost"))) - .put("case_insensitive", (b, v) -> b.caseInsensitive(convertBoolValue(v, "case_insensitive"))) - .put("rewrite", (b, v) -> b.rewrite(checkRewrite(v, "rewrite"))) - .build(); + WildcardQueryBuildActions = + ImmutableMap.>builder() + .put("boost", (b, v) -> b.boost(convertFloatValue(v, "boost"))) + .put( + "case_insensitive", + (b, v) -> b.caseInsensitive(convertBoolValue(v, "case_insensitive"))) + .put("rewrite", (b, v) -> b.rewrite(checkRewrite(v, "rewrite"))) + .build(); public static final Map ArgumentLimitations = ImmutableMap.builder() - .put("boost", "Accepts only floating point values greater than 0.") - .put("tie_breaker", "Accepts only floating point values in range 0 to 1.") - .put("rewrite", "Available values are: constant_score, " - + "scoring_boolean, constant_score_boolean, top_terms_X, top_terms_boost_X, " - + "top_terms_blended_freqs_X, where X is an integer value.") - .put("flags", String.format( - "Available values are: %s and any combinations of these separated by '|'.", - Arrays.stream(SimpleQueryStringFlag.class.getEnumConstants()) - .map(Enum::toString).collect(Collectors.joining(", ")))) - .put("time_zone", "For more information, follow this link: " - + "https://docs.oracle.com/javase/8/docs/api/java/time/ZoneId.html#of-java.lang.String-") - .put("fuzziness", "Available values are: " - + "'AUTO', 'AUTO:x,y' or z, where x, y, z - integer values.") - .put("operator", String.format("Available values are: %s.", - Arrays.stream(Operator.class.getEnumConstants()) - .map(Enum::toString).collect(Collectors.joining(", ")))) - .put("type", String.format("Available values are: %s.", - Arrays.stream(MultiMatchQueryBuilder.Type.class.getEnumConstants()) - .map(Enum::toString).collect(Collectors.joining(", ")))) - .put("zero_terms_query", String.format("Available values are: %s.", - Arrays.stream(MatchQuery.ZeroTermsQuery.class.getEnumConstants()) - .map(Enum::toString).collect(Collectors.joining(", ")))) - .put("int", "Accepts only integer values.") - .put("float", "Accepts only floating point values.") - .put("bool", "Accepts only boolean values: 'true' or 'false'.") - .build(); - + .put("boost", "Accepts only floating point values greater than 0.") + .put("tie_breaker", "Accepts only floating point values in range 0 to 1.") + .put( + "rewrite", + "Available values are: constant_score, " + + "scoring_boolean, constant_score_boolean, top_terms_X, top_terms_boost_X, " + + "top_terms_blended_freqs_X, where X is an integer value.") + .put( + "flags", + String.format( + "Available values are: %s and any combinations of these separated by '|'.", + Arrays.stream(SimpleQueryStringFlag.class.getEnumConstants()) + .map(Enum::toString) + .collect(Collectors.joining(", ")))) + .put( + "time_zone", + "For more information, follow this link: " + + "https://docs.oracle.com/javase/8/docs/api/java/time/ZoneId.html#of-java.lang.String-") + .put( + "fuzziness", + "Available values are: " + "'AUTO', 'AUTO:x,y' or z, where x, y, z - integer values.") + .put( + "operator", + String.format( + "Available values are: %s.", + Arrays.stream(Operator.class.getEnumConstants()) + .map(Enum::toString) + .collect(Collectors.joining(", ")))) + .put( + "type", + String.format( + "Available values are: %s.", + Arrays.stream(MultiMatchQueryBuilder.Type.class.getEnumConstants()) + .map(Enum::toString) + .collect(Collectors.joining(", ")))) + .put( + "zero_terms_query", + String.format( + "Available values are: %s.", + Arrays.stream(MatchQuery.ZeroTermsQuery.class.getEnumConstants()) + .map(Enum::toString) + .collect(Collectors.joining(", ")))) + .put("int", "Accepts only integer values.") + .put("float", "Accepts only floating point values.") + .put("bool", "Accepts only boolean values: 'true' or 'false'.") + .build(); private static String formatErrorMessage(String name, String value) { return formatErrorMessage(name, value, name); } private static String formatErrorMessage(String name, String value, String limitationName) { - return String.format("Invalid %s value: '%s'. %s", - name, value, ArgumentLimitations.containsKey(name) ? ArgumentLimitations.get(name) + return String.format( + "Invalid %s value: '%s'. %s", + name, + value, + ArgumentLimitations.containsKey(name) + ? ArgumentLimitations.get(name) : ArgumentLimitations.getOrDefault(limitationName, "")); } /** * Check whether value is valid for 'rewrite' or 'fuzzy_rewrite'. + * * @param value Value * @param name Value name * @return Converted @@ -233,6 +295,7 @@ public static String checkRewrite(ExprValue value, String name) { /** * Convert ExprValue to Flags. + * * @param value Value * @return Array of flags */ @@ -248,6 +311,7 @@ public static SimpleQueryStringFlag[] convertFlags(ExprValue value) { /** * Check whether ExprValue could be converted to timezone object. + * * @param value Value * @return Converted to string */ @@ -262,6 +326,7 @@ public static String checkTimeZone(ExprValue value) { /** * Convert ExprValue to Fuzziness object. + * * @param value Value * @return Fuzziness */ @@ -275,6 +340,7 @@ public static Fuzziness convertFuzziness(ExprValue value) { /** * Convert ExprValue to Operator object, could be used for 'operator' and 'default_operator'. + * * @param value Value * @param name Value name * @return Operator @@ -289,13 +355,14 @@ public static Operator convertOperator(ExprValue value, String name) { /** * Convert ExprValue to Type object. + * * @param value Value * @return Type */ public static MultiMatchQueryBuilder.Type convertType(ExprValue value) { try { - return MultiMatchQueryBuilder.Type.parse(value.stringValue().toLowerCase(), - LoggingDeprecationHandler.INSTANCE); + return MultiMatchQueryBuilder.Type.parse( + value.stringValue().toLowerCase(), LoggingDeprecationHandler.INSTANCE); } catch (Exception e) { throw new RuntimeException(formatErrorMessage("type", value.stringValue()), e); } @@ -303,6 +370,7 @@ public static MultiMatchQueryBuilder.Type convertType(ExprValue value) { /** * Convert ExprValue to ZeroTermsQuery object. + * * @param value Value * @return ZeroTermsQuery */ @@ -316,6 +384,7 @@ public static MatchQuery.ZeroTermsQuery convertZeroTermsQuery(ExprValue value) { /** * Convert ExprValue to int. + * * @param value Value * @param name Value name * @return int @@ -330,6 +399,7 @@ public static int convertIntValue(ExprValue value, String name) { /** * Convert ExprValue to float. + * * @param value Value * @param name Value name * @return float @@ -344,6 +414,7 @@ public static float convertFloatValue(ExprValue value, String name) { /** * Convert ExprValue to bool. + * * @param value Value * @param name Value name * @return bool diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MatchBoolPrefixQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MatchBoolPrefixQuery.java index 7044a56035..5443d7154d 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MatchBoolPrefixQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MatchBoolPrefixQuery.java @@ -8,14 +8,11 @@ import org.opensearch.index.query.MatchBoolPrefixQueryBuilder; import org.opensearch.index.query.QueryBuilders; -/** - * Initializes MatchBoolPrefixQueryBuilder from a FunctionExpression. - */ -public class MatchBoolPrefixQuery - extends SingleFieldQuery { +/** Initializes MatchBoolPrefixQueryBuilder from a FunctionExpression. */ +public class MatchBoolPrefixQuery extends SingleFieldQuery { /** - * Constructor for MatchBoolPrefixQuery to configure RelevanceQuery - * with support of optional parameters. + * Constructor for MatchBoolPrefixQuery to configure RelevanceQuery with support of optional + * parameters. */ public MatchBoolPrefixQuery() { super(FunctionParameterRepository.MatchBoolPrefixQueryBuildActions); @@ -23,9 +20,10 @@ public MatchBoolPrefixQuery() { /** * Maps correct query builder function to class. - * @param field Field to execute query in - * @param query Text used to search field - * @return Object of executed query + * + * @param field Field to execute query in + * @param query Text used to search field + * @return Object of executed query */ @Override protected MatchBoolPrefixQueryBuilder createBuilder(String field, String query) { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MatchPhrasePrefixQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MatchPhrasePrefixQuery.java index 8ee9ae299e..5a9b5e0d1c 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MatchPhrasePrefixQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MatchPhrasePrefixQuery.java @@ -8,12 +8,10 @@ import org.opensearch.index.query.MatchPhrasePrefixQueryBuilder; import org.opensearch.index.query.QueryBuilders; -/** - * Lucene query that builds a match_phrase_prefix query. - */ +/** Lucene query that builds a match_phrase_prefix query. */ public class MatchPhrasePrefixQuery extends SingleFieldQuery { /** - * Default constructor for MatchPhrasePrefixQuery configures how RelevanceQuery.build() handles + * Default constructor for MatchPhrasePrefixQuery configures how RelevanceQuery.build() handles * named arguments. */ public MatchPhrasePrefixQuery() { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MatchPhraseQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MatchPhraseQuery.java index 2afaca1a7a..3c823b7cae 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MatchPhraseQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MatchPhraseQuery.java @@ -8,13 +8,11 @@ import org.opensearch.index.query.MatchPhraseQueryBuilder; import org.opensearch.index.query.QueryBuilders; -/** - * Lucene query that builds a match_phrase query. - */ +/** Lucene query that builds a match_phrase query. */ public class MatchPhraseQuery extends SingleFieldQuery { /** - * Default constructor for MatchPhraseQuery configures how RelevanceQuery.build() handles - * named arguments. + * Default constructor for MatchPhraseQuery configures how RelevanceQuery.build() handles named + * arguments. */ public MatchPhraseQuery() { super(FunctionParameterRepository.MatchPhraseQueryBuildActions); diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MatchQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MatchQuery.java index a4de1c0831..b40d4fb85b 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MatchQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MatchQuery.java @@ -8,13 +8,11 @@ import org.opensearch.index.query.MatchQueryBuilder; import org.opensearch.index.query.QueryBuilders; -/** - * Initializes MatchQueryBuilder from a FunctionExpression. - */ +/** Initializes MatchQueryBuilder from a FunctionExpression. */ public class MatchQuery extends SingleFieldQuery { /** - * Default constructor for MatchQuery configures how RelevanceQuery.build() handles - * named arguments. + * Default constructor for MatchQuery configures how RelevanceQuery.build() handles named + * arguments. */ public MatchQuery() { super(FunctionParameterRepository.MatchQueryBuildActions); diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MultiFieldQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MultiFieldQuery.java index 9f37951072..b6e854a3f8 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MultiFieldQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MultiFieldQuery.java @@ -13,7 +13,8 @@ import org.opensearch.sql.expression.NamedArgumentExpression; /** - * Base class to represent relevance queries that search multiple fields. + * Base class to represent relevance queries that search multiple fields. + * * @param The builder class for the OpenSearch query. */ abstract class MultiFieldQuery extends RelevanceQuery { @@ -25,26 +26,24 @@ public MultiFieldQuery(Map> queryBuildActions) { @Override public T createQueryBuilder(List arguments) { // Extract 'fields' and 'query' - var fields = arguments.stream() - .filter(a -> a.getArgName().equalsIgnoreCase("fields")) - .findFirst() - .orElseThrow(() -> new SemanticCheckException("'fields' parameter is missing.")); - - var query = arguments.stream() - .filter(a -> a.getArgName().equalsIgnoreCase("query")) - .findFirst() - .orElseThrow(() -> new SemanticCheckException("'query' parameter is missing")); - - var fieldsAndWeights = fields - .getValue() - .valueOf() - .tupleValue() - .entrySet() - .stream() - .collect(ImmutableMap.toImmutableMap(e -> e.getKey(), e -> e.getValue().floatValue())); + var fields = + arguments.stream() + .filter(a -> a.getArgName().equalsIgnoreCase("fields")) + .findFirst() + .orElseThrow(() -> new SemanticCheckException("'fields' parameter is missing.")); + + var query = + arguments.stream() + .filter(a -> a.getArgName().equalsIgnoreCase("query")) + .findFirst() + .orElseThrow(() -> new SemanticCheckException("'query' parameter is missing")); + + var fieldsAndWeights = + fields.getValue().valueOf().tupleValue().entrySet().stream() + .collect(ImmutableMap.toImmutableMap(e -> e.getKey(), e -> e.getValue().floatValue())); return createBuilder(fieldsAndWeights, query.getValue().valueOf().stringValue()); } - protected abstract T createBuilder(ImmutableMap fields, String query); + protected abstract T createBuilder(ImmutableMap fields, String query); } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MultiMatchQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MultiMatchQuery.java index a791bf756b..826e6d7dde 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MultiMatchQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MultiMatchQuery.java @@ -11,8 +11,8 @@ public class MultiMatchQuery extends MultiFieldQuery { /** - * Default constructor for MultiMatch configures how RelevanceQuery.build() handles - * named arguments. + * Default constructor for MultiMatch configures how RelevanceQuery.build() handles named + * arguments. */ public MultiMatchQuery() { super(FunctionParameterRepository.MultiMatchQueryBuildActions); diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/NoFieldQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/NoFieldQuery.java index 1467cf8e4b..ba79147c8c 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/NoFieldQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/NoFieldQuery.java @@ -7,7 +7,6 @@ import java.util.List; import java.util.Map; -import java.util.Objects; import java.util.stream.Collectors; import org.opensearch.index.query.QueryBuilder; import org.opensearch.sql.common.antlr.SyntaxCheckException; @@ -34,36 +33,39 @@ protected void ignoreArguments(List arguments) { protected void checkValidArguments(String argNormalized, T queryBuilder) { if (!getQueryBuildActions().containsKey(argNormalized)) { throw new SemanticCheckException( - String.format("Parameter %s is invalid for %s function.", - argNormalized, getQueryName())); + String.format("Parameter %s is invalid for %s function.", argNormalized, getQueryName())); } } + /** - * Override build function because RelevanceQuery requires 2 fields, - * but NoFieldQuery must have no fields. + * Override build function because RelevanceQuery requires 2 fields, but NoFieldQuery must have no + * fields. * * @param func : Contains function name and passed in arguments. * @return : QueryBuilder object */ - @Override public QueryBuilder build(FunctionExpression func) { - var arguments = func.getArguments().stream().map( - a -> (NamedArgumentExpression) a).collect(Collectors.toList()); + var arguments = + func.getArguments().stream() + .map(a -> (NamedArgumentExpression) a) + .collect(Collectors.toList()); if (arguments.size() < 1) { - throw new SyntaxCheckException(String.format( - "%s requires at least one parameter", func.getFunctionName())); + throw new SyntaxCheckException( + String.format("%s requires at least one parameter", func.getFunctionName())); } return loadArguments(arguments); } - @Override public T createQueryBuilder(List arguments) { // Extract 'query' - var query = arguments.stream().filter(a -> a.getArgName().equalsIgnoreCase("query")).findFirst() - .orElseThrow(() -> new SemanticCheckException("'query' parameter is missing")); + var query = + arguments.stream() + .filter(a -> a.getArgName().equalsIgnoreCase("query")) + .findFirst() + .orElseThrow(() -> new SemanticCheckException("'query' parameter is missing")); return createBuilder(query.getValue().valueOf().stringValue()); } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/QueryQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/QueryQuery.java index 35d5a43a41..0346b7712e 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/QueryQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/QueryQuery.java @@ -8,16 +8,14 @@ import org.opensearch.index.query.QueryBuilders; import org.opensearch.index.query.QueryStringQueryBuilder; -/** - * Class for Lucene query that builds the 'query' query. - */ +/** Class for Lucene query that builds the 'query' query. */ public class QueryQuery extends NoFieldQuery { private final String queryQueryName = "query"; /** - * Default constructor for QueryQuery configures how RelevanceQuery.build() handles - * named arguments by calling the constructor of QueryStringQuery. + * Default constructor for QueryQuery configures how RelevanceQuery.build() handles named + * arguments by calling the constructor of QueryStringQuery. */ public QueryQuery() { super(FunctionParameterRepository.QueryStringQueryBuildActions); diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/QueryStringQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/QueryStringQuery.java index 43131baa3e..410c55cea6 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/QueryStringQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/QueryStringQuery.java @@ -9,13 +9,11 @@ import org.opensearch.index.query.QueryBuilders; import org.opensearch.index.query.QueryStringQueryBuilder; -/** - * Class for Lucene query that builds the query_string query. - */ +/** Class for Lucene query that builds the query_string query. */ public class QueryStringQuery extends MultiFieldQuery { /** - * Default constructor for QueryString configures how RelevanceQuery.build() handles - * named arguments. + * Default constructor for QueryString configures how RelevanceQuery.build() handles named + * arguments. */ public QueryStringQuery() { super(FunctionParameterRepository.QueryStringQueryBuildActions); @@ -29,8 +27,8 @@ public QueryStringQuery() { * @return : Builder for query_string query */ @Override - protected QueryStringQueryBuilder createBuilder(ImmutableMap fields, - String query) { + protected QueryStringQueryBuilder createBuilder( + ImmutableMap fields, String query) { return QueryBuilders.queryStringQuery(query).fields(fields); } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/RelevanceQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/RelevanceQuery.java index b8641a5c0b..87faf320ec 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/RelevanceQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/RelevanceQuery.java @@ -20,37 +20,39 @@ import org.opensearch.sql.expression.NamedArgumentExpression; import org.opensearch.sql.opensearch.storage.script.filter.lucene.LuceneQuery; -/** - * Base class for query abstraction that builds a relevance query from function expression. - */ +/** Base class for query abstraction that builds a relevance query from function expression. */ @RequiredArgsConstructor public abstract class RelevanceQuery extends LuceneQuery { - @Getter - private final Map> queryBuildActions; + @Getter private final Map> queryBuildActions; protected void ignoreArguments(List arguments) { - arguments.removeIf(a -> a.getArgName().equalsIgnoreCase("field") - || a.getArgName().equalsIgnoreCase("fields") - || a.getArgName().equalsIgnoreCase("query")); + arguments.removeIf( + a -> + a.getArgName().equalsIgnoreCase("field") + || a.getArgName().equalsIgnoreCase("fields") + || a.getArgName().equalsIgnoreCase("query")); } protected void checkValidArguments(String argNormalized, T queryBuilder) { if (!queryBuildActions.containsKey(argNormalized)) { throw new SemanticCheckException( - String.format("Parameter %s is invalid for %s function.", - argNormalized, queryBuilder.getWriteableName())); + String.format( + "Parameter %s is invalid for %s function.", + argNormalized, queryBuilder.getWriteableName())); } } protected T loadArguments(List arguments) throws SemanticCheckException { // Aggregate parameters by name, so getting a Map - arguments.stream().collect(Collectors.groupingBy(a -> a.getArgName().toLowerCase())) - .forEach((k, v) -> { - if (v.size() > 1) { - throw new SemanticCheckException( - String.format("Parameter '%s' can only be specified once.", k)); - } - }); + arguments.stream() + .collect(Collectors.groupingBy(a -> a.getArgName().toLowerCase())) + .forEach( + (k, v) -> { + if (v.size() > 1) { + throw new SemanticCheckException( + String.format("Parameter '%s' can only be specified once.", k)); + } + }); T queryBuilder = createQueryBuilder(arguments); @@ -63,9 +65,7 @@ protected T loadArguments(List arguments) throws Semant checkValidArguments(argNormalized, queryBuilder); - (Objects.requireNonNull( - queryBuildActions - .get(argNormalized))) + (Objects.requireNonNull(queryBuildActions.get(argNormalized))) .apply(queryBuilder, arg.getValue().valueOf()); } @@ -74,15 +74,16 @@ protected T loadArguments(List arguments) throws Semant @Override public QueryBuilder build(FunctionExpression func) { - var arguments = func.getArguments().stream() - .map(a -> (NamedArgumentExpression)a).collect(Collectors.toList()); + var arguments = + func.getArguments().stream() + .map(a -> (NamedArgumentExpression) a) + .collect(Collectors.toList()); if (arguments.size() < 2) { throw new SyntaxCheckException( String.format("%s requires at least two parameters", getQueryName())); } return loadArguments(arguments); - } protected abstract T createQueryBuilder(List arguments); @@ -90,12 +91,10 @@ public QueryBuilder build(FunctionExpression func) { protected abstract String getQueryName(); /** - * Convenience interface for a function that updates a QueryBuilder - * based on ExprValue. + * Convenience interface for a function that updates a QueryBuilder based on ExprValue. * * @param Concrete query builder */ - protected interface QueryBuilderStep extends - BiFunction { - } + protected interface QueryBuilderStep + extends BiFunction {} } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SimpleQueryStringQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SimpleQueryStringQuery.java index 157921572a..86dd44c118 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SimpleQueryStringQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SimpleQueryStringQuery.java @@ -11,16 +11,16 @@ public class SimpleQueryStringQuery extends MultiFieldQuery { /** - * Default constructor for SimpleQueryString configures how RelevanceQuery.build() handles - * named arguments. + * Default constructor for SimpleQueryString configures how RelevanceQuery.build() handles named + * arguments. */ public SimpleQueryStringQuery() { super(FunctionParameterRepository.SimpleQueryStringQueryBuildActions); } @Override - protected SimpleQueryStringBuilder createBuilder(ImmutableMap fields, - String query) { + protected SimpleQueryStringBuilder createBuilder( + ImmutableMap fields, String query) { return QueryBuilders.simpleQueryStringQuery(query).fields(fields); } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SingleFieldQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SingleFieldQuery.java index ec110dfd8b..086aaddc5e 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SingleFieldQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SingleFieldQuery.java @@ -26,18 +26,20 @@ public SingleFieldQuery(Map> queryBuildActions) { @Override protected T createQueryBuilder(List arguments) { // Extract 'field' and 'query' - var field = arguments.stream() - .filter(a -> a.getArgName().equalsIgnoreCase("field")) - .findFirst() - .orElseThrow(() -> new SemanticCheckException("'field' parameter is missing.")); + var field = + arguments.stream() + .filter(a -> a.getArgName().equalsIgnoreCase("field")) + .findFirst() + .orElseThrow(() -> new SemanticCheckException("'field' parameter is missing.")); - var query = arguments.stream() - .filter(a -> a.getArgName().equalsIgnoreCase("query")) - .findFirst() - .orElseThrow(() -> new SemanticCheckException("'query' parameter is missing")); + var query = + arguments.stream() + .filter(a -> a.getArgName().equalsIgnoreCase("query")) + .findFirst() + .orElseThrow(() -> new SemanticCheckException("'query' parameter is missing")); return createBuilder( - ((ReferenceExpression)field.getValue()).getAttr(), + ((ReferenceExpression) field.getValue()).getAttr(), query.getValue().valueOf().stringValue()); } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/WildcardQuery.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/WildcardQuery.java index 9fd37e3de7..7b9887e516 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/WildcardQuery.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/WildcardQuery.java @@ -3,20 +3,17 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.filter.lucene.relevance; import org.opensearch.index.query.QueryBuilders; import org.opensearch.index.query.WildcardQueryBuilder; import org.opensearch.sql.opensearch.storage.script.StringUtils; -/** - * Lucene query that builds wildcard query. - */ +/** Lucene query that builds wildcard query. */ public class WildcardQuery extends SingleFieldQuery { /** - * Default constructor for WildcardQuery configures how RelevanceQuery.build() handles - * named arguments. + * Default constructor for WildcardQuery configures how RelevanceQuery.build() handles named + * arguments. */ public WildcardQuery() { super(FunctionParameterRepository.WildcardQueryBuildActions); diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/sort/SortQueryBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/sort/SortQueryBuilder.java index 62c923832c..7669b569d4 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/sort/SortQueryBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/sort/SortQueryBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.sort; import static org.opensearch.sql.analysis.NestedAnalyzer.generatePath; @@ -20,26 +19,19 @@ import org.opensearch.sql.expression.Expression; import org.opensearch.sql.expression.FunctionExpression; import org.opensearch.sql.expression.ReferenceExpression; -import org.opensearch.sql.expression.function.BuiltinFunctionName; import org.opensearch.sql.opensearch.data.type.OpenSearchTextType; -/** - * Builder of {@link SortBuilder}. - */ +/** Builder of {@link SortBuilder}. */ public class SortQueryBuilder { - /** - * The mapping between Core Engine sort order and OpenSearch sort order. - */ + /** The mapping between Core Engine sort order and OpenSearch sort order. */ private Map sortOrderMap = new ImmutableMap.Builder() .put(Sort.SortOrder.ASC, SortOrder.ASC) .put(Sort.SortOrder.DESC, SortOrder.DESC) .build(); - /** - * The mapping between Core Engine null order and OpenSearch null order. - */ + /** The mapping between Core Engine null order and OpenSearch null order. */ private Map missingMap = new ImmutableMap.Builder() .put(Sort.NullOrder.NULL_FIRST, "_first") @@ -62,14 +54,15 @@ public SortBuilder build(Expression expression, Sort.SortOption option) { } else if (isNestedFunction(expression)) { validateNestedArgs((FunctionExpression) expression); - String orderByName = ((FunctionExpression)expression).getArguments().get(0).toString(); + String orderByName = ((FunctionExpression) expression).getArguments().get(0).toString(); // Generate path if argument not supplied in function. - ReferenceExpression path = ((FunctionExpression)expression).getArguments().size() == 2 - ? (ReferenceExpression) ((FunctionExpression)expression).getArguments().get(1) - : generatePath(orderByName); + ReferenceExpression path = + ((FunctionExpression) expression).getArguments().size() == 2 + ? (ReferenceExpression) ((FunctionExpression) expression).getArguments().get(1) + : generatePath(orderByName); return SortBuilders.fieldSort(orderByName) - .order(sortOrderMap.get(option.getSortOrder())) - .setNestedSort(new NestedSortBuilder(path.toString())); + .order(sortOrderMap.get(option.getSortOrder())) + .setNestedSort(new NestedSortBuilder(path.toString())); } else { throw new IllegalStateException("unsupported expression " + expression.getClass()); } @@ -77,29 +70,26 @@ public SortBuilder build(Expression expression, Sort.SortOption option) { /** * Validate semantics for arguments in nested function. + * * @param nestedFunc Nested function expression. */ private void validateNestedArgs(FunctionExpression nestedFunc) { if (nestedFunc.getArguments().size() < 1 || nestedFunc.getArguments().size() > 2) { throw new IllegalArgumentException( - "nested function supports 2 parameters (field, path) or 1 parameter (field)" - ); + "nested function supports 2 parameters (field, path) or 1 parameter (field)"); } for (Expression arg : nestedFunc.getArguments()) { if (!(arg instanceof ReferenceExpression)) { throw new IllegalArgumentException( - String.format("Illegal nested field name: %s", - arg.toString() - ) - ); + String.format("Illegal nested field name: %s", arg.toString())); } } } private FieldSortBuilder fieldBuild(ReferenceExpression ref, Sort.SortOption option) { return SortBuilders.fieldSort( - OpenSearchTextType.convertTextToKeyword(ref.getAttr(), ref.type())) + OpenSearchTextType.convertTextToKeyword(ref.getAttr(), ref.type())) .order(sortOrderMap.get(option.getSortOrder())) .missing(missingMap.get(option.getNullOrder())); } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/serialization/DefaultExpressionSerializer.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/serialization/DefaultExpressionSerializer.java index dc67da9de5..aa78d60a6e 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/serialization/DefaultExpressionSerializer.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/serialization/DefaultExpressionSerializer.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.serialization; import java.io.ByteArrayInputStream; @@ -14,9 +13,7 @@ import java.util.Base64; import org.opensearch.sql.expression.Expression; -/** - * Default serializer that (de-)serialize expressions by JDK serialization. - */ +/** Default serializer that (de-)serialize expressions by JDK serialization. */ public class DefaultExpressionSerializer implements ExpressionSerializer { @Override @@ -42,5 +39,4 @@ public Expression deserialize(String code) { throw new IllegalStateException("Failed to deserialize expression code: " + code, e); } } - } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/serialization/ExpressionSerializer.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/serialization/ExpressionSerializer.java index b7caeb30f8..9c9779696c 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/serialization/ExpressionSerializer.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/serialization/ExpressionSerializer.java @@ -3,28 +3,26 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.serialization; import org.opensearch.sql.expression.Expression; -/** - * Expression serializer that (de-)serializes expression object. - */ +/** Expression serializer that (de-)serializes expression object. */ public interface ExpressionSerializer { /** * Serialize an expression. - * @param expr expression - * @return serialized string + * + * @param expr expression + * @return serialized string */ String serialize(Expression expr); /** * Deserialize an expression. - * @param code serialized code - * @return original expression object + * + * @param code serialized code + * @return original expression object */ Expression deserialize(String code); - } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndex.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndex.java index 7b6efeeba4..b1b2081f94 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndex.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndex.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.system; import static org.opensearch.sql.utils.SystemIndexUtils.systemTable; @@ -24,17 +23,12 @@ import org.opensearch.sql.storage.Table; import org.opensearch.sql.utils.SystemIndexUtils; -/** - * OpenSearch System Index Table Implementation. - */ +/** OpenSearch System Index Table Implementation. */ public class OpenSearchSystemIndex implements Table { - /** - * System Index Name. - */ + /** System Index Name. */ private final Pair systemIndexBundle; - public OpenSearchSystemIndex( - OpenSearchClient client, String indexName) { + public OpenSearchSystemIndex(OpenSearchClient client, String indexName) { this.systemIndexBundle = buildIndexBundle(client, indexName); } @@ -61,8 +55,7 @@ public PhysicalPlan implement(LogicalPlan plan) { @VisibleForTesting @RequiredArgsConstructor - public class OpenSearchSystemIndexDefaultImplementor - extends DefaultImplementor { + public class OpenSearchSystemIndexDefaultImplementor extends DefaultImplementor { @Override public PhysicalPlan visitRelation(LogicalRelation node, Object context) { @@ -79,10 +72,11 @@ private Pair buildIndexBun OpenSearchClient client, String indexName) { SystemIndexUtils.SystemTable systemTable = systemTable(indexName); if (systemTable.isSystemInfoTable()) { - return Pair.of(OpenSearchSystemIndexSchema.SYS_TABLE_TABLES, - new OpenSearchCatIndicesRequest(client)); + return Pair.of( + OpenSearchSystemIndexSchema.SYS_TABLE_TABLES, new OpenSearchCatIndicesRequest(client)); } else { - return Pair.of(OpenSearchSystemIndexSchema.SYS_TABLE_MAPPINGS, + return Pair.of( + OpenSearchSystemIndexSchema.SYS_TABLE_MAPPINGS, new OpenSearchDescribeIndexRequest(client, systemTable.getTableName())); } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexScan.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexScan.java index ee377263c1..57cdd52985 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexScan.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexScan.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.system; import java.util.Iterator; @@ -14,21 +13,15 @@ import org.opensearch.sql.opensearch.request.system.OpenSearchSystemRequest; import org.opensearch.sql.storage.TableScanOperator; -/** - * OpenSearch index scan operator. - */ +/** OpenSearch index scan operator. */ @RequiredArgsConstructor @EqualsAndHashCode(onlyExplicitlyIncluded = true, callSuper = false) @ToString(onlyExplicitlyIncluded = true) public class OpenSearchSystemIndexScan extends TableScanOperator { - /** - * OpenSearch request. - */ + /** OpenSearch request. */ private final OpenSearchSystemRequest request; - /** - * Search response for current batch. - */ + /** Search response for current batch. */ private Iterator iterator; @Override diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexSchema.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexSchema.java index aa09ff4660..781431ea67 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexSchema.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexSchema.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.system; import static org.opensearch.sql.data.type.ExprCoreType.STRING; @@ -15,53 +14,52 @@ import lombok.RequiredArgsConstructor; import org.opensearch.sql.data.type.ExprType; -/** - * Definition of the system table schema. - */ +/** Definition of the system table schema. */ @Getter @RequiredArgsConstructor public enum OpenSearchSystemIndexSchema { - - SYS_TABLE_TABLES(new LinkedHashMap() {{ - put("TABLE_CAT", STRING); - put("TABLE_SCHEM", STRING); - put("TABLE_NAME", STRING); - put("TABLE_TYPE", STRING); - put("REMARKS", STRING); - put("TYPE_CAT", STRING); - put("TYPE_SCHEM", STRING); - put("TYPE_NAME", STRING); - put("SELF_REFERENCING_COL_NAME", STRING); - put("REF_GENERATION", STRING); - } - } - ), - SYS_TABLE_MAPPINGS(new ImmutableMap.Builder() - .put("TABLE_CAT", STRING) - .put("TABLE_SCHEM", STRING) - .put("TABLE_NAME", STRING) - .put("COLUMN_NAME", STRING) - .put("DATA_TYPE", STRING) - .put("TYPE_NAME", STRING) - .put("COLUMN_SIZE", STRING) - .put("BUFFER_LENGTH", STRING) - .put("DECIMAL_DIGITS", STRING) - .put("NUM_PREC_RADIX", STRING) - .put("NULLABLE", STRING) - .put("REMARKS", STRING) - .put("COLUMN_DEF", STRING) - .put("SQL_DATA_TYPE", STRING) - .put("SQL_DATETIME_SUB", STRING) - .put("CHAR_OCTET_LENGTH", STRING) - .put("ORDINAL_POSITION", STRING) - .put("IS_NULLABLE", STRING) - .put("SCOPE_CATALOG", STRING) - .put("SCOPE_SCHEMA", STRING) - .put("SCOPE_TABLE", STRING) - .put("SOURCE_DATA_TYPE", STRING) - .put("IS_AUTOINCREMENT", STRING) - .put("IS_GENERATEDCOLUMN", STRING) - .build()); + SYS_TABLE_TABLES( + new LinkedHashMap() { + { + put("TABLE_CAT", STRING); + put("TABLE_SCHEM", STRING); + put("TABLE_NAME", STRING); + put("TABLE_TYPE", STRING); + put("REMARKS", STRING); + put("TYPE_CAT", STRING); + put("TYPE_SCHEM", STRING); + put("TYPE_NAME", STRING); + put("SELF_REFERENCING_COL_NAME", STRING); + put("REF_GENERATION", STRING); + } + }), + SYS_TABLE_MAPPINGS( + new ImmutableMap.Builder() + .put("TABLE_CAT", STRING) + .put("TABLE_SCHEM", STRING) + .put("TABLE_NAME", STRING) + .put("COLUMN_NAME", STRING) + .put("DATA_TYPE", STRING) + .put("TYPE_NAME", STRING) + .put("COLUMN_SIZE", STRING) + .put("BUFFER_LENGTH", STRING) + .put("DECIMAL_DIGITS", STRING) + .put("NUM_PREC_RADIX", STRING) + .put("NULLABLE", STRING) + .put("REMARKS", STRING) + .put("COLUMN_DEF", STRING) + .put("SQL_DATA_TYPE", STRING) + .put("SQL_DATETIME_SUB", STRING) + .put("CHAR_OCTET_LENGTH", STRING) + .put("ORDINAL_POSITION", STRING) + .put("IS_NULLABLE", STRING) + .put("SCOPE_CATALOG", STRING) + .put("SCOPE_SCHEMA", STRING) + .put("SCOPE_TABLE", STRING) + .put("SOURCE_DATA_TYPE", STRING) + .put("IS_AUTOINCREMENT", STRING) + .put("IS_GENERATEDCOLUMN", STRING) + .build()); private final Map mapping; } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClientTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClientTest.java index ddf5f27f2a..040b7d2759 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClientTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClientTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.client; import static org.junit.jupiter.api.Assertions.assertAll; @@ -29,7 +28,6 @@ import com.google.common.io.Resources; import java.io.IOException; import java.net.URL; -import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -61,7 +59,6 @@ import org.opensearch.cluster.metadata.MappingMetadata; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.concurrent.ThreadContext; import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.xcontent.DeprecationHandler; import org.opensearch.core.xcontent.NamedXContentRegistry; @@ -91,17 +88,14 @@ class OpenSearchNodeClientTest { @Mock(answer = RETURNS_DEEP_STUBS) private NodeClient nodeClient; - @Mock - private OpenSearchExprValueFactory factory; + @Mock private OpenSearchExprValueFactory factory; - @Mock - private SearchHit searchHit; + @Mock private SearchHit searchHit; - @Mock - private GetIndexResponse indexResponse; + @Mock private GetIndexResponse indexResponse; - private final ExprTupleValue exprTupleValue = ExprTupleValue.fromExprValueMap( - Map.of("id", new ExprIntegerValue(1))); + private final ExprTupleValue exprTupleValue = + ExprTupleValue.fromExprValueMap(Map.of("id", new ExprIntegerValue(1))); private OpenSearchClient client; @@ -112,8 +106,7 @@ void setUp() { @Test void is_index_exist() { - when(nodeClient.admin().indices() - .exists(any(IndicesExistsRequest.class)).actionGet()) + when(nodeClient.admin().indices().exists(any(IndicesExistsRequest.class)).actionGet()) .thenReturn(new IndicesExistsResponse(true)); assertTrue(client.exists("test")); @@ -122,8 +115,7 @@ void is_index_exist() { @Test void is_index_not_exist() { String indexName = "test"; - when(nodeClient.admin().indices() - .exists(any(IndicesExistsRequest.class)).actionGet()) + when(nodeClient.admin().indices().exists(any(IndicesExistsRequest.class)).actionGet()) .thenReturn(new IndicesExistsResponse(false)); assertFalse(client.exists(indexName)); @@ -139,11 +131,8 @@ void is_index_exist_with_exception() { @Test void create_index() { String indexName = "test"; - Map mappings = ImmutableMap.of( - "properties", - ImmutableMap.of("name", "text")); - when(nodeClient.admin().indices() - .create(any(CreateIndexRequest.class)).actionGet()) + Map mappings = ImmutableMap.of("properties", ImmutableMap.of("name", "text")); + when(nodeClient.admin().indices().create(any(CreateIndexRequest.class)).actionGet()) .thenReturn(new CreateIndexResponse(true, true, indexName)); client.createIndex(indexName, mappings); @@ -153,8 +142,7 @@ void create_index() { void create_index_with_exception() { when(nodeClient.admin().indices().create(any())).thenThrow(RuntimeException.class); - assertThrows(IllegalStateException.class, - () -> client.createIndex("test", ImmutableMap.of())); + assertThrows(IllegalStateException.class, () -> client.createIndex("test", ImmutableMap.of())); } @Test @@ -174,58 +162,57 @@ void get_index_mappings() throws IOException { () -> assertEquals(10, mapping.size()), () -> assertEquals(17, parsedTypes.size()), () -> assertEquals("TEXT", mapping.get("address").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Text), - parsedTypes.get("address")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Text), parsedTypes.get("address")), () -> assertEquals("INTEGER", mapping.get("age").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Integer), - parsedTypes.get("age")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Integer), parsedTypes.get("age")), () -> assertEquals("DOUBLE", mapping.get("balance").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Double), - parsedTypes.get("balance")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Double), parsedTypes.get("balance")), () -> assertEquals("KEYWORD", mapping.get("city").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Keyword), - parsedTypes.get("city")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Keyword), parsedTypes.get("city")), () -> assertEquals("DATE", mapping.get("birthday").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Date), - parsedTypes.get("birthday")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Date), parsedTypes.get("birthday")), () -> assertEquals("GEO_POINT", mapping.get("location").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.GeoPoint), - parsedTypes.get("location")), + () -> + assertEquals(OpenSearchTextType.of(MappingType.GeoPoint), parsedTypes.get("location")), // unknown type isn't parsed and ignored () -> assertFalse(mapping.containsKey("new_field")), () -> assertNull(parsedTypes.get("new_field")), () -> assertEquals("TEXT", mapping.get("field with spaces").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Text), - parsedTypes.get("field with spaces")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Text), parsedTypes.get("field with spaces")), () -> assertEquals("TEXT", mapping.get("employer").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Text), - parsedTypes.get("employer")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Text), parsedTypes.get("employer")), // `employer` is a `text` with `fields` - () -> assertTrue(((OpenSearchTextType)parsedTypes.get("employer")).getFields().size() > 0), + () -> assertTrue(((OpenSearchTextType) parsedTypes.get("employer")).getFields().size() > 0), () -> assertEquals("NESTED", mapping.get("projects").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Nested), - parsedTypes.get("projects")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Boolean), - parsedTypes.get("projects.active")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Date), - parsedTypes.get("projects.release")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Nested), - parsedTypes.get("projects.members")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Text), - parsedTypes.get("projects.members.name")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Nested), parsedTypes.get("projects")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Boolean), parsedTypes.get("projects.active")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Date), parsedTypes.get("projects.release")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Nested), parsedTypes.get("projects.members")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Text), parsedTypes.get("projects.members.name")), () -> assertEquals("OBJECT", mapping.get("manager").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Object), - parsedTypes.get("manager")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Text), - parsedTypes.get("manager.name")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Object), parsedTypes.get("manager")), + () -> + assertEquals(OpenSearchTextType.of(MappingType.Text), parsedTypes.get("manager.name")), // `manager.name` is a `text` with `fields` - () -> assertTrue(((OpenSearchTextType)parsedTypes.get("manager.name")) - .getFields().size() > 0), - () -> assertEquals(OpenSearchTextType.of(MappingType.Keyword), - parsedTypes.get("manager.address")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Long), - parsedTypes.get("manager.salary")) - ); + () -> + assertTrue( + ((OpenSearchTextType) parsedTypes.get("manager.name")).getFields().size() > 0), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Keyword), parsedTypes.get("manager.address")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Long), parsedTypes.get("manager.salary"))); } @Test @@ -249,11 +236,8 @@ void get_index_mappings_with_IOException() { @Test void get_index_mappings_with_non_exist_index() { - when(nodeClient.admin().indices() - .prepareGetMappings(any()) - .setLocal(anyBoolean()) - .get() - ).thenThrow(IndexNotFoundException.class); + when(nodeClient.admin().indices().prepareGetMappings(any()).setLocal(anyBoolean()).get()) + .thenThrow(IndexNotFoundException.class); assertThrows(IndexNotFoundException.class, () -> client.getIndexMappings("non_exist_index")); } @@ -309,9 +293,7 @@ void search() { when(searchResponse.getHits()) .thenReturn( new SearchHits( - new SearchHit[] {searchHit}, - new TotalHits(1L, TotalHits.Relation.EQUAL_TO), - 1.0F)); + new SearchHit[] {searchHit}, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0F)); when(searchHit.getSourceAsString()).thenReturn("{\"id\", 1}"); when(searchHit.getInnerHits()).thenReturn(null); when(factory.construct(any(), anyBoolean())).thenReturn(exprTupleValue); @@ -322,9 +304,13 @@ void search() { when(scrollResponse.getHits()).thenReturn(SearchHits.empty()); // Verify response for first scroll request - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory, List.of("id")); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), + factory, + List.of("id")); OpenSearchResponse response1 = client.search(request); assertFalse(response1.isEmpty()); @@ -357,9 +343,13 @@ void cleanup() { when(requestBuilder.addScrollId(any())).thenReturn(requestBuilder); when(requestBuilder.get()).thenReturn(null); - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory, List.of()); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), + factory, + List.of()); request.setScrollId("scroll123"); // Enforce cleaning by setting a private field. FieldUtils.writeField(request, "needClean", true, true); @@ -374,9 +364,13 @@ void cleanup() { @Test void cleanup_without_scrollId() { - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory, List.of()); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), + factory, + List.of()); client.cleanup(request); verify(nodeClient, never()).prepareClearScroll(); } @@ -386,9 +380,13 @@ void cleanup_without_scrollId() { void cleanup_rethrows_exception() { when(nodeClient.prepareClearScroll()).thenThrow(new RuntimeException()); - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory, List.of()); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), + factory, + List.of()); request.setScrollId("scroll123"); // Enforce cleaning by setting a private field. FieldUtils.writeField(request, "needClean", true, true); @@ -400,10 +398,8 @@ void get_indices() { AliasMetadata aliasMetadata = mock(AliasMetadata.class); final var openMap = Map.of("index", List.of(aliasMetadata)); when(aliasMetadata.alias()).thenReturn("index_alias"); - when(nodeClient.admin().indices() - .prepareGetIndex() - .setLocal(true) - .get()).thenReturn(indexResponse); + when(nodeClient.admin().indices().prepareGetIndex().setLocal(true).get()) + .thenReturn(indexResponse); when(indexResponse.getIndices()).thenReturn(new String[] {"index"}); when(indexResponse.aliases()).thenReturn(openMap); @@ -429,10 +425,8 @@ void ml() { public void mockNodeClientIndicesMappings(String indexName, String mappings) { GetMappingsResponse mockResponse = mock(GetMappingsResponse.class); MappingMetadata emptyMapping = mock(MappingMetadata.class); - when(nodeClient.admin().indices() - .prepareGetMappings(any()) - .setLocal(anyBoolean()) - .get()).thenReturn(mockResponse); + when(nodeClient.admin().indices().prepareGetMappings(any()).setLocal(anyBoolean()).get()) + .thenReturn(mockResponse); try { Map metadata; if (mappings.isEmpty()) { @@ -447,13 +441,12 @@ public void mockNodeClientIndicesMappings(String indexName, String mappings) { } } - private void mockNodeClientSettings(String indexName, String indexMetadata) - throws IOException { + private void mockNodeClientSettings(String indexName, String indexMetadata) throws IOException { GetSettingsResponse mockResponse = mock(GetSettingsResponse.class); when(nodeClient.admin().indices().prepareGetSettings(any()).setLocal(anyBoolean()).get()) .thenReturn(mockResponse); - Map metadata = Map.of(indexName, - IndexMetadata.fromXContent(createParser(indexMetadata)).getSettings()); + Map metadata = + Map.of(indexName, IndexMetadata.fromXContent(createParser(indexMetadata)).getSettings()); when(mockResponse.getIndexToSettings()).thenReturn(metadata); } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchRestClientTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchRestClientTest.java index 409596910e..99201aae4f 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchRestClientTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchRestClientTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.client; import static org.junit.jupiter.api.Assertions.assertAll; @@ -79,22 +78,20 @@ class OpenSearchRestClientTest { private static final String TEST_MAPPING_FILE = "mappings/accounts.json"; + @Mock(answer = RETURNS_DEEP_STUBS) private RestHighLevelClient restClient; private OpenSearchClient client; - @Mock - private OpenSearchExprValueFactory factory; + @Mock private OpenSearchExprValueFactory factory; - @Mock - private SearchHit searchHit; + @Mock private SearchHit searchHit; - @Mock - private GetIndexResponse getIndexResponse; + @Mock private GetIndexResponse getIndexResponse; - private final ExprTupleValue exprTupleValue = ExprTupleValue.fromExprValueMap( - Map.of("id", new ExprIntegerValue(1))); + private final ExprTupleValue exprTupleValue = + ExprTupleValue.fromExprValueMap(Map.of("id", new ExprIntegerValue(1))); @BeforeEach void setUp() { @@ -103,8 +100,9 @@ void setUp() { @Test void is_index_exist() throws IOException { - when(restClient.indices() - .exists(any(), any())) // use any() because missing equals() in GetIndexRequest + when(restClient + .indices() + .exists(any(), any())) // use any() because missing equals() in GetIndexRequest .thenReturn(true); assertTrue(client.exists("test")); @@ -112,8 +110,9 @@ void is_index_exist() throws IOException { @Test void is_index_not_exist() throws IOException { - when(restClient.indices() - .exists(any(), any())) // use any() because missing equals() in GetIndexRequest + when(restClient + .indices() + .exists(any(), any())) // use any() because missing equals() in GetIndexRequest .thenReturn(false); assertFalse(client.exists("test")); @@ -129,11 +128,8 @@ void is_index_exist_with_exception() throws IOException { @Test void create_index() throws IOException { String indexName = "test"; - Map mappings = ImmutableMap.of( - "properties", - ImmutableMap.of("name", "text")); - when(restClient.indices() - .create(any(), any())) + Map mappings = ImmutableMap.of("properties", ImmutableMap.of("name", "text")); + when(restClient.indices().create(any(), any())) .thenReturn(new CreateIndexResponse(true, true, indexName)); client.createIndex(indexName, mappings); @@ -142,8 +138,7 @@ void create_index() throws IOException { @Test void create_index_with_IOException() throws IOException { when(restClient.indices().create(any(), any())).thenThrow(IOException.class); - assertThrows(IllegalStateException.class, - () -> client.createIndex("test", ImmutableMap.of())); + assertThrows(IllegalStateException.class, () -> client.createIndex("test", ImmutableMap.of())); } @Test @@ -167,58 +162,57 @@ void get_index_mappings() throws IOException { () -> assertEquals(10, mapping.size()), () -> assertEquals(17, parsedTypes.size()), () -> assertEquals("TEXT", mapping.get("address").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Text), - parsedTypes.get("address")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Text), parsedTypes.get("address")), () -> assertEquals("INTEGER", mapping.get("age").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Integer), - parsedTypes.get("age")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Integer), parsedTypes.get("age")), () -> assertEquals("DOUBLE", mapping.get("balance").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Double), - parsedTypes.get("balance")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Double), parsedTypes.get("balance")), () -> assertEquals("KEYWORD", mapping.get("city").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Keyword), - parsedTypes.get("city")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Keyword), parsedTypes.get("city")), () -> assertEquals("DATE", mapping.get("birthday").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Date), - parsedTypes.get("birthday")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Date), parsedTypes.get("birthday")), () -> assertEquals("GEO_POINT", mapping.get("location").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.GeoPoint), - parsedTypes.get("location")), + () -> + assertEquals(OpenSearchTextType.of(MappingType.GeoPoint), parsedTypes.get("location")), // unknown type isn't parsed and ignored () -> assertFalse(mapping.containsKey("new_field")), () -> assertNull(parsedTypes.get("new_field")), () -> assertEquals("TEXT", mapping.get("field with spaces").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Text), - parsedTypes.get("field with spaces")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Text), parsedTypes.get("field with spaces")), () -> assertEquals("TEXT", mapping.get("employer").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Text), - parsedTypes.get("employer")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Text), parsedTypes.get("employer")), // `employer` is a `text` with `fields` - () -> assertTrue(((OpenSearchTextType)parsedTypes.get("employer")).getFields().size() > 0), + () -> assertTrue(((OpenSearchTextType) parsedTypes.get("employer")).getFields().size() > 0), () -> assertEquals("NESTED", mapping.get("projects").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Nested), - parsedTypes.get("projects")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Boolean), - parsedTypes.get("projects.active")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Date), - parsedTypes.get("projects.release")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Nested), - parsedTypes.get("projects.members")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Text), - parsedTypes.get("projects.members.name")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Nested), parsedTypes.get("projects")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Boolean), parsedTypes.get("projects.active")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Date), parsedTypes.get("projects.release")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Nested), parsedTypes.get("projects.members")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Text), parsedTypes.get("projects.members.name")), () -> assertEquals("OBJECT", mapping.get("manager").legacyTypeName()), - () -> assertEquals(OpenSearchTextType.of(MappingType.Object), - parsedTypes.get("manager")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Text), - parsedTypes.get("manager.name")), + () -> assertEquals(OpenSearchTextType.of(MappingType.Object), parsedTypes.get("manager")), + () -> + assertEquals(OpenSearchTextType.of(MappingType.Text), parsedTypes.get("manager.name")), // `manager.name` is a `text` with `fields` - () -> assertTrue(((OpenSearchTextType)parsedTypes.get("manager.name")) - .getFields().size() > 0), - () -> assertEquals(OpenSearchTextType.of(MappingType.Keyword), - parsedTypes.get("manager.address")), - () -> assertEquals(OpenSearchTextType.of(MappingType.Long), - parsedTypes.get("manager.salary")) - ); + () -> + assertTrue( + ((OpenSearchTextType) parsedTypes.get("manager.name")).getFields().size() > 0), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Keyword), parsedTypes.get("manager.address")), + () -> + assertEquals( + OpenSearchTextType.of(MappingType.Long), parsedTypes.get("manager.salary"))); } @Test @@ -234,14 +228,11 @@ void get_index_max_result_windows_settings() throws IOException { Integer maxResultWindow = 1000; GetSettingsResponse response = mock(GetSettingsResponse.class); - Settings maxResultWindowSettings = Settings.builder() - .put("index.max_result_window", maxResultWindow) - .build(); + Settings maxResultWindowSettings = + Settings.builder().put("index.max_result_window", maxResultWindow).build(); Settings emptySettings = Settings.builder().build(); - Map indexToSettings = - mockSettings(indexName, maxResultWindowSettings); - Map indexToDefaultSettings = - mockSettings(indexName, emptySettings); + Map indexToSettings = mockSettings(indexName, maxResultWindowSettings); + Map indexToDefaultSettings = mockSettings(indexName, emptySettings); when(response.getIndexToSettings()).thenReturn(indexToSettings); when(response.getIndexToDefaultSettings()).thenReturn(indexToDefaultSettings); when(restClient.indices().getSettings(any(GetSettingsRequest.class), any())) @@ -258,14 +249,11 @@ void get_index_max_result_windows_default_settings() throws IOException { Integer maxResultWindow = 10000; GetSettingsResponse response = mock(GetSettingsResponse.class); - Settings maxResultWindowSettings = Settings.builder() - .put("index.max_result_window", maxResultWindow) - .build(); + Settings maxResultWindowSettings = + Settings.builder().put("index.max_result_window", maxResultWindow).build(); Settings emptySettings = Settings.builder().build(); - Map indexToSettings = - mockSettings(indexName, emptySettings); - Map indexToDefaultSettings = - mockSettings(indexName, maxResultWindowSettings); + Map indexToSettings = mockSettings(indexName, emptySettings); + Map indexToDefaultSettings = mockSettings(indexName, maxResultWindowSettings); when(response.getIndexToSettings()).thenReturn(indexToSettings); when(response.getIndexToDefaultSettings()).thenReturn(indexToDefaultSettings); when(restClient.indices().getSettings(any(GetSettingsRequest.class), any())) @@ -292,9 +280,7 @@ void search() throws IOException { when(searchResponse.getHits()) .thenReturn( new SearchHits( - new SearchHit[] {searchHit}, - new TotalHits(1L, TotalHits.Relation.EQUAL_TO), - 1.0F)); + new SearchHit[] {searchHit}, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0F)); when(searchHit.getSourceAsString()).thenReturn("{\"id\", 1}"); when(searchHit.getInnerHits()).thenReturn(null); when(factory.construct(any(), anyBoolean())).thenReturn(exprTupleValue); @@ -305,9 +291,13 @@ void search() throws IOException { when(scrollResponse.getHits()).thenReturn(SearchHits.empty()); // Verify response for first scroll request - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory, List.of("id")); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), + factory, + List.of("id")); OpenSearchResponse response1 = client.search(request); assertFalse(response1.isEmpty()); @@ -327,9 +317,14 @@ void search_with_IOException() throws IOException { when(restClient.search(any(), any())).thenThrow(new IOException()); assertThrows( IllegalStateException.class, - () -> client.search(new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory, List.of()))); + () -> + client.search( + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), + factory, + List.of()))); } @Test @@ -349,28 +344,34 @@ void scroll_with_IOException() throws IOException { when(restClient.scroll(any(), any())).thenThrow(new IOException()); // First request run successfully - OpenSearchScrollRequest scrollRequest = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory, List.of()); + OpenSearchScrollRequest scrollRequest = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), + factory, + List.of()); client.search(scrollRequest); - assertThrows( - IllegalStateException.class, () -> client.search(scrollRequest)); + assertThrows(IllegalStateException.class, () -> client.search(scrollRequest)); } @Test void schedule() { AtomicBoolean isRun = new AtomicBoolean(false); - client.schedule( - () -> isRun.set(true)); + client.schedule(() -> isRun.set(true)); assertTrue(isRun.get()); } @Test @SneakyThrows void cleanup() { - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory, List.of()); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), + factory, + List.of()); // Enforce cleaning by setting a private field. FieldUtils.writeField(request, "needClean", true, true); request.setScrollId("scroll123"); @@ -381,9 +382,13 @@ void cleanup() { @Test void cleanup_without_scrollId() throws IOException { - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory, List.of()); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), + factory, + List.of()); client.cleanup(request); verify(restClient, never()).clearScroll(any(), any()); } @@ -393,9 +398,13 @@ void cleanup_without_scrollId() throws IOException { void cleanup_with_IOException() { when(restClient.clearScroll(any(), any())).thenThrow(new IOException()); - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), - new SearchSourceBuilder(), factory, List.of()); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), + factory, + List.of()); // Enforce cleaning by setting a private field. FieldUtils.writeField(request, "needClean", true, true); request.setScrollId("scroll123"); diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/type/OpenSearchDataTypeRecognitionTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/type/OpenSearchDataTypeRecognitionTest.java index c3a5d13dca..35ad6b7ea6 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/type/OpenSearchDataTypeRecognitionTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/type/OpenSearchDataTypeRecognitionTest.java @@ -35,8 +35,7 @@ private static Stream types() { Arguments.of("BINARY", new OpenSearchExprBinaryValue("A"), "binary"), Arguments.of("IP", new OpenSearchExprIpValue("A"), "ip"), Arguments.of("TEXT", new TestTextWithFieldValue("Hello World"), "text with fields"), - Arguments.of("GEO_POINT", new OpenSearchExprGeoPointValue(0d, 0d), "geo point") - ); + Arguments.of("GEO_POINT", new OpenSearchExprGeoPointValue(0d, 0d), "geo point")); } private String typeofGetValue(ExprValue input) { @@ -50,8 +49,8 @@ public TestTextWithFieldValue(String value) { @Override public ExprType type() { - return OpenSearchTextType.of(Map.of("words", - OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword))); + return OpenSearchTextType.of( + Map.of("words", OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword))); } } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/type/OpenSearchDataTypeTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/type/OpenSearchDataTypeTest.java index 8d69b3d855..b0288dc9a7 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/type/OpenSearchDataTypeTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/type/OpenSearchDataTypeTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.type; import static org.junit.jupiter.api.Assertions.assertAll; @@ -108,13 +107,9 @@ private static Stream getTestDataWithType() { Arguments.of(MappingType.Date, "date", TIMESTAMP), Arguments.of(MappingType.Object, "object", STRUCT), Arguments.of(MappingType.Nested, "nested", ARRAY), - Arguments.of(MappingType.GeoPoint, "geo_point", - OpenSearchGeoPointType.of()), - Arguments.of(MappingType.Binary, "binary", - OpenSearchBinaryType.of()), - Arguments.of(MappingType.Ip, "ip", - OpenSearchIpType.of()) - ); + Arguments.of(MappingType.GeoPoint, "geo_point", OpenSearchGeoPointType.of()), + Arguments.of(MappingType.Binary, "binary", OpenSearchBinaryType.of()), + Arguments.of(MappingType.Ip, "ip", OpenSearchIpType.of())); } @ParameterizedTest(name = "{1}") @@ -128,8 +123,7 @@ public void of_MappingType(MappingType mappingType, String name, ExprType dataTy assertAll( () -> assertEquals(nameForPPL, type.typeName()), () -> assertEquals(nameForSQL, type.legacyTypeName()), - () -> assertEquals(dataType, type.getExprType()) - ); + () -> assertEquals(dataType, type.getExprType())); } @ParameterizedTest(name = "{0}") @@ -168,15 +162,10 @@ public void of_OpenSearchDataType_from_MappingType(OpenSearchDataType.MappingTyp public void types_but_clones_are_singletons_and_cached() { var type = OpenSearchDataType.of(MappingType.Object); var alsoType = OpenSearchDataType.of(MappingType.Object); - Map properties = Map.of( - "properties", - Map.of("number", Map.of("type", "integer"))); - var typeWithProperties = OpenSearchDataType.of( - MappingType.Object, - properties); - var typeWithFields = OpenSearchDataType.of( - MappingType.Text, - Map.of()); + Map properties = + Map.of("properties", Map.of("number", Map.of("type", "integer"))); + var typeWithProperties = OpenSearchDataType.of(MappingType.Object, properties); + var typeWithFields = OpenSearchDataType.of(MappingType.Text, Map.of()); var cloneType = type.cloneEmpty(); assertAll( @@ -187,22 +176,20 @@ public void types_but_clones_are_singletons_and_cached() { () -> assertNotSame(typeWithProperties, typeWithProperties.cloneEmpty()), () -> assertNotSame(typeWithFields, typeWithFields.cloneEmpty()), () -> assertNotSame(dateType, dateType.cloneEmpty()), - () -> assertSame(OpenSearchDataType.of(MappingType.Text), - OpenSearchTextType.of()), - () -> assertSame(OpenSearchDataType.of(MappingType.Binary), - OpenSearchBinaryType.of()), - () -> assertSame(OpenSearchDataType.of(MappingType.GeoPoint), - OpenSearchGeoPointType.of()), - () -> assertSame(OpenSearchDataType.of(MappingType.Ip), - OpenSearchIpType.of()), - () -> assertNotSame(OpenSearchTextType.of(), - OpenSearchTextType.of(Map.of("properties", OpenSearchDataType.of(INTEGER)))), + () -> assertSame(OpenSearchDataType.of(MappingType.Text), OpenSearchTextType.of()), + () -> assertSame(OpenSearchDataType.of(MappingType.Binary), OpenSearchBinaryType.of()), + () -> assertSame(OpenSearchDataType.of(MappingType.GeoPoint), OpenSearchGeoPointType.of()), + () -> assertSame(OpenSearchDataType.of(MappingType.Ip), OpenSearchIpType.of()), + () -> + assertNotSame( + OpenSearchTextType.of(), + OpenSearchTextType.of(Map.of("properties", OpenSearchDataType.of(INTEGER)))), () -> assertSame(OpenSearchDataType.of(INTEGER), OpenSearchDataType.of(INTEGER)), () -> assertSame(OpenSearchDataType.of(STRING), OpenSearchDataType.of(STRING)), () -> assertSame(OpenSearchDataType.of(STRUCT), OpenSearchDataType.of(STRUCT)), - () -> assertNotSame(OpenSearchDataType.of(INTEGER), - OpenSearchDataType.of(INTEGER).cloneEmpty()) - ); + () -> + assertNotSame( + OpenSearchDataType.of(INTEGER), OpenSearchDataType.of(INTEGER).cloneEmpty())); } @Test @@ -211,17 +198,25 @@ public void types_but_clones_are_singletons_and_cached() { public void fields_and_properties_are_readonly() { var objectType = OpenSearchDataType.of(MappingType.Object); var textType = OpenSearchTextType.of(); - var textTypeWithFields = OpenSearchTextType.of( - Map.of("letters", OpenSearchDataType.of(MappingType.Keyword))); + var textTypeWithFields = + OpenSearchTextType.of(Map.of("letters", OpenSearchDataType.of(MappingType.Keyword))); assertAll( - () -> assertThrows(UnsupportedOperationException.class, - () -> objectType.getProperties().put("something", OpenSearchDataType.of(INTEGER))), - () -> assertThrows(UnsupportedOperationException.class, - () -> textType.getFields().put("words", OpenSearchDataType.of(MappingType.Keyword))), - () -> assertThrows(UnsupportedOperationException.class, - () -> textTypeWithFields.getFields().put("words", - OpenSearchDataType.of(MappingType.Keyword))) - ); + () -> + assertThrows( + UnsupportedOperationException.class, + () -> objectType.getProperties().put("something", OpenSearchDataType.of(INTEGER))), + () -> + assertThrows( + UnsupportedOperationException.class, + () -> + textType.getFields().put("words", OpenSearchDataType.of(MappingType.Keyword))), + () -> + assertThrows( + UnsupportedOperationException.class, + () -> + textTypeWithFields + .getFields() + .put("words", OpenSearchDataType.of(MappingType.Keyword)))); } @Test @@ -234,10 +229,8 @@ public void of_null_MappingType() { // cloneEmpty doesn't clone properties and fields. // Fields are cloned by OpenSearchTextType::cloneEmpty, because it is used in that type only. public void cloneEmpty() { - var type = OpenSearchDataType.of( - MappingType.Object, - Map.of("val", OpenSearchDataType.of(INTEGER)) - ); + var type = + OpenSearchDataType.of(MappingType.Object, Map.of("val", OpenSearchDataType.of(INTEGER))); var clone = type.cloneEmpty(); var textClone = textKeywordType.cloneEmpty(); @@ -246,9 +239,10 @@ public void cloneEmpty() { () -> assertEquals(type, clone), () -> assertTrue(clone.getProperties().isEmpty()), () -> assertEquals(textKeywordType, textClone), - () -> assertEquals(FieldUtils.readField(textKeywordType, "fields", true), - FieldUtils.readField(textClone, "fields", true)) - ); + () -> + assertEquals( + FieldUtils.readField(textKeywordType, "fields", true), + FieldUtils.readField(textClone, "fields", true))); } // Following structure of nested objects should be flattened @@ -294,26 +288,29 @@ public void traverseAndFlatten() { () -> assertEquals(9, flattened.size()), () -> assertTrue(flattened.get("mapping").getProperties().isEmpty()), () -> assertTrue(flattened.get("mapping.submapping").getProperties().isEmpty()), - () -> assertTrue( - flattened.get("mapping.submapping.subsubmapping").getProperties().isEmpty()), - + () -> + assertTrue(flattened.get("mapping.submapping.subsubmapping").getProperties().isEmpty()), () -> assertEquals(objectType, flattened.get("mapping")), () -> assertEquals(objectType, flattened.get("mapping.submapping")), () -> assertEquals(objectType, flattened.get("mapping.submapping.subsubmapping")), - - () -> assertEquals(OpenSearchDataType.of(MappingType.Keyword), - flattened.get("mapping.keyword")), - () -> assertEquals(OpenSearchDataType.of(MappingType.Text), - flattened.get("mapping.text")), - () -> assertEquals(OpenSearchGeoPointType.of(), - flattened.get("mapping.submapping.geo_point")), - () -> assertEquals(OpenSearchTextType.of(), - flattened.get("mapping.submapping.textWithFieldsType")), - () -> assertEquals(OpenSearchTextType.of(), - flattened.get("mapping.submapping.subsubmapping.texttype")), - () -> assertEquals(OpenSearchDataType.of(INTEGER), - flattened.get("mapping.submapping.subsubmapping.INTEGER")) - ); + () -> + assertEquals( + OpenSearchDataType.of(MappingType.Keyword), flattened.get("mapping.keyword")), + () -> assertEquals(OpenSearchDataType.of(MappingType.Text), flattened.get("mapping.text")), + () -> + assertEquals( + OpenSearchGeoPointType.of(), flattened.get("mapping.submapping.geo_point")), + () -> + assertEquals( + OpenSearchTextType.of(), flattened.get("mapping.submapping.textWithFieldsType")), + () -> + assertEquals( + OpenSearchTextType.of(), + flattened.get("mapping.submapping.subsubmapping.texttype")), + () -> + assertEquals( + OpenSearchDataType.of(INTEGER), + flattened.get("mapping.submapping.subsubmapping.INTEGER"))); } @Test @@ -322,25 +319,42 @@ public void resolve() { assertAll( () -> assertNull(OpenSearchDataType.resolve(mapping, "incorrect")), - () -> assertEquals(OpenSearchDataType.of(MappingType.Object), - OpenSearchDataType.resolve(mapping, "mapping")), - () -> assertEquals(OpenSearchDataType.of(MappingType.Object), - OpenSearchDataType.resolve(mapping, "submapping")), - () -> assertEquals(OpenSearchDataType.of(MappingType.Object), - OpenSearchDataType.resolve(mapping, "subsubmapping")), - () -> assertEquals(OpenSearchDataType.of(MappingType.Text), - OpenSearchDataType.resolve(mapping, "texttype")), - () -> assertEquals(OpenSearchDataType.of(MappingType.Text), - OpenSearchDataType.resolve(mapping, "textWithFieldsType")), - () -> assertEquals(OpenSearchDataType.of(MappingType.Text), - OpenSearchDataType.resolve(mapping, "text")), - () -> assertEquals(OpenSearchDataType.of(MappingType.Integer), - OpenSearchDataType.resolve(mapping, "INTEGER")), - () -> assertEquals(OpenSearchDataType.of(MappingType.GeoPoint), - OpenSearchDataType.resolve(mapping, "geo_point")), - () -> assertEquals(OpenSearchDataType.of(MappingType.Keyword), - OpenSearchDataType.resolve(mapping, "keyword")) - ); + () -> + assertEquals( + OpenSearchDataType.of(MappingType.Object), + OpenSearchDataType.resolve(mapping, "mapping")), + () -> + assertEquals( + OpenSearchDataType.of(MappingType.Object), + OpenSearchDataType.resolve(mapping, "submapping")), + () -> + assertEquals( + OpenSearchDataType.of(MappingType.Object), + OpenSearchDataType.resolve(mapping, "subsubmapping")), + () -> + assertEquals( + OpenSearchDataType.of(MappingType.Text), + OpenSearchDataType.resolve(mapping, "texttype")), + () -> + assertEquals( + OpenSearchDataType.of(MappingType.Text), + OpenSearchDataType.resolve(mapping, "textWithFieldsType")), + () -> + assertEquals( + OpenSearchDataType.of(MappingType.Text), + OpenSearchDataType.resolve(mapping, "text")), + () -> + assertEquals( + OpenSearchDataType.of(MappingType.Integer), + OpenSearchDataType.resolve(mapping, "INTEGER")), + () -> + assertEquals( + OpenSearchDataType.of(MappingType.GeoPoint), + OpenSearchDataType.resolve(mapping, "geo_point")), + () -> + assertEquals( + OpenSearchDataType.of(MappingType.Keyword), + OpenSearchDataType.resolve(mapping, "keyword"))); } // type : Object @@ -357,39 +371,38 @@ public void resolve() { @Test public void text_type_with_fields_ctor() { - var type = OpenSearchTextType.of(Map.of("words", - OpenSearchDataType.of(MappingType.Keyword))); + var type = OpenSearchTextType.of(Map.of("words", OpenSearchDataType.of(MappingType.Keyword))); assertAll( () -> assertEquals(OpenSearchTextType.of(), type), () -> assertEquals(1, type.getFields().size()), - () -> assertEquals(OpenSearchDataType.of(MappingType.Keyword), - type.getFields().get("words")) - ); + () -> + assertEquals( + OpenSearchDataType.of(MappingType.Keyword), type.getFields().get("words"))); } private Map getSampleMapping() { - Map subsubmapping = Map.of( - "properties", Map.of( - "texttype", Map.of("type", "text"), - "INTEGER", Map.of("type", "integer") - ) - ); - - Map submapping = Map.of( - "properties", Map.of( - "subsubmapping", subsubmapping, - "textWithFieldsType", Map.of("type", "text", "fieldsType", true), - "geo_point", Map.of("type", "geo_point") - ) - ); - - Map types = Map.of( - "properties", Map.of( - "submapping", submapping, - "keyword", Map.of("type", "keyword"), - "text", Map.of("type", "text") - ) - ); + Map subsubmapping = + Map.of( + "properties", + Map.of( + "texttype", Map.of("type", "text"), + "INTEGER", Map.of("type", "integer"))); + + Map submapping = + Map.of( + "properties", + Map.of( + "subsubmapping", subsubmapping, + "textWithFieldsType", Map.of("type", "text", "fieldsType", true), + "geo_point", Map.of("type", "geo_point"))); + + Map types = + Map.of( + "properties", + Map.of( + "submapping", submapping, + "keyword", Map.of("type", "keyword"), + "text", Map.of("type", "text"))); var mapping = OpenSearchDataType.of(MappingType.Object, types); return Map.of("mapping", mapping); @@ -397,8 +410,7 @@ private Map getSampleMapping() { @Test public void test_getExprType() { - assertEquals(OpenSearchTextType.of(), - OpenSearchDataType.of(MappingType.Text).getExprType()); + assertEquals(OpenSearchTextType.of(), OpenSearchDataType.of(MappingType.Text).getExprType()); assertEquals(FLOAT, OpenSearchDataType.of(MappingType.Float).getExprType()); assertEquals(FLOAT, OpenSearchDataType.of(MappingType.HalfFloat).getExprType()); assertEquals(DOUBLE, OpenSearchDataType.of(MappingType.Double).getExprType()); diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/type/OpenSearchDateTypeTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/type/OpenSearchDateTypeTest.java index 13393da732..a9511f8c0b 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/type/OpenSearchDateTypeTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/type/OpenSearchDateTypeTest.java @@ -47,10 +47,8 @@ class OpenSearchDateTypeTest { private static final OpenSearchDateType defaultDateType = OpenSearchDateType.of(defaultFormatString); - private static final OpenSearchDateType dateDateType = - OpenSearchDateType.of(dateFormatString); - private static final OpenSearchDateType timeDateType = - OpenSearchDateType.of(timeFormatString); + private static final OpenSearchDateType dateDateType = OpenSearchDateType.of(dateFormatString); + private static final OpenSearchDateType timeDateType = OpenSearchDateType.of(timeFormatString); private static final OpenSearchDateType datetimeDateType = OpenSearchDateType.of(datetimeFormatString); @@ -79,8 +77,7 @@ public void isCompatible() { () -> assertFalse(DATE.isCompatible(defaultDateType)), () -> assertTrue(DATE.isCompatible(dateDateType)), () -> assertFalse(DATE.isCompatible(timeDateType)), - () -> assertFalse(DATE.isCompatible(datetimeDateType)) - ); + () -> assertFalse(DATE.isCompatible(datetimeDateType))); } // `typeName` and `legacyTypeName` return the same thing for date objects: @@ -92,8 +89,7 @@ public void check_typeName() { () -> assertEquals("DATE", defaultDateType.typeName()), () -> assertEquals("DATE", timeDateType.typeName()), () -> assertEquals("DATE", dateDateType.typeName()), - () -> assertEquals("DATE", datetimeDateType.typeName()) - ); + () -> assertEquals("DATE", datetimeDateType.typeName())); } @Test @@ -103,8 +99,7 @@ public void check_legacyTypeName() { () -> assertEquals("DATE", defaultDateType.legacyTypeName()), () -> assertEquals("DATE", timeDateType.legacyTypeName()), () -> assertEquals("DATE", dateDateType.legacyTypeName()), - () -> assertEquals("DATE", datetimeDateType.legacyTypeName()) - ); + () -> assertEquals("DATE", datetimeDateType.legacyTypeName())); } @Test @@ -114,8 +109,7 @@ public void check_exprTypeName() { () -> assertEquals(TIMESTAMP, defaultDateType.getExprType()), () -> assertEquals(TIME, timeDateType.getExprType()), () -> assertEquals(DATE, dateDateType.getExprType()), - () -> assertEquals(TIMESTAMP, datetimeDateType.getExprType()) - ); + () -> assertEquals(TIMESTAMP, datetimeDateType.getExprType())); } private static Stream getAllSupportedFormats() { @@ -125,11 +119,12 @@ private static Stream getAllSupportedFormats() { @ParameterizedTest @MethodSource("getAllSupportedFormats") public void check_supported_format_names_coverage(FormatNames formatName) { - assertTrue(SUPPORTED_NAMED_NUMERIC_FORMATS.contains(formatName) - || SUPPORTED_NAMED_DATETIME_FORMATS.contains(formatName) - || SUPPORTED_NAMED_DATE_FORMATS.contains(formatName) - || SUPPORTED_NAMED_TIME_FORMATS.contains(formatName) - || SUPPORTED_NAMED_INCOMPLETE_DATE_FORMATS.contains(formatName), + assertTrue( + SUPPORTED_NAMED_NUMERIC_FORMATS.contains(formatName) + || SUPPORTED_NAMED_DATETIME_FORMATS.contains(formatName) + || SUPPORTED_NAMED_DATE_FORMATS.contains(formatName) + || SUPPORTED_NAMED_TIME_FORMATS.contains(formatName) + || SUPPORTED_NAMED_INCOMPLETE_DATE_FORMATS.contains(formatName), formatName + " not supported"); } @@ -142,17 +137,24 @@ private static Stream getSupportedDatetimeFormats() { public void check_datetime_format_names(FormatNames datetimeFormat) { String camelCaseName = datetimeFormat.getCamelCaseName(); if (camelCaseName != null && !camelCaseName.isEmpty()) { - OpenSearchDateType dateType = - OpenSearchDateType.of(camelCaseName); - assertSame(dateType.getExprType(), TIMESTAMP, camelCaseName - + " does not format to a TIMESTAMP type, instead got " + dateType.getExprType()); + OpenSearchDateType dateType = OpenSearchDateType.of(camelCaseName); + assertSame( + dateType.getExprType(), + TIMESTAMP, + camelCaseName + + " does not format to a TIMESTAMP type, instead got " + + dateType.getExprType()); } String snakeCaseName = datetimeFormat.getSnakeCaseName(); if (snakeCaseName != null && !snakeCaseName.isEmpty()) { OpenSearchDateType dateType = OpenSearchDateType.of(snakeCaseName); - assertSame(dateType.getExprType(), TIMESTAMP, snakeCaseName - + " does not format to a TIMESTAMP type, instead got " + dateType.getExprType()); + assertSame( + dateType.getExprType(), + TIMESTAMP, + snakeCaseName + + " does not format to a TIMESTAMP type, instead got " + + dateType.getExprType()); } else { fail(); } @@ -168,15 +170,19 @@ public void check_date_format_names(FormatNames dateFormat) { String camelCaseName = dateFormat.getCamelCaseName(); if (camelCaseName != null && !camelCaseName.isEmpty()) { OpenSearchDateType dateType = OpenSearchDateType.of(camelCaseName); - assertSame(dateType.getExprType(), DATE, camelCaseName - + " does not format to a DATE type, instead got " + dateType.getExprType()); + assertSame( + dateType.getExprType(), + DATE, + camelCaseName + " does not format to a DATE type, instead got " + dateType.getExprType()); } String snakeCaseName = dateFormat.getSnakeCaseName(); if (snakeCaseName != null && !snakeCaseName.isEmpty()) { OpenSearchDateType dateType = OpenSearchDateType.of(snakeCaseName); - assertSame(dateType.getExprType(), DATE, snakeCaseName - + " does not format to a DATE type, instead got " + dateType.getExprType()); + assertSame( + dateType.getExprType(), + DATE, + snakeCaseName + " does not format to a DATE type, instead got " + dateType.getExprType()); } else { fail(); } @@ -192,15 +198,19 @@ public void check_time_format_names(FormatNames timeFormat) { String camelCaseName = timeFormat.getCamelCaseName(); if (camelCaseName != null && !camelCaseName.isEmpty()) { OpenSearchDateType dateType = OpenSearchDateType.of(camelCaseName); - assertSame(dateType.getExprType(), TIME, camelCaseName - + " does not format to a TIME type, instead got " + dateType.getExprType()); + assertSame( + dateType.getExprType(), + TIME, + camelCaseName + " does not format to a TIME type, instead got " + dateType.getExprType()); } String snakeCaseName = timeFormat.getSnakeCaseName(); if (snakeCaseName != null && !snakeCaseName.isEmpty()) { OpenSearchDateType dateType = OpenSearchDateType.of(snakeCaseName); - assertSame(dateType.getExprType(), TIME, snakeCaseName - + " does not format to a TIME type, instead got " + dateType.getExprType()); + assertSame( + dateType.getExprType(), + TIME, + snakeCaseName + " does not format to a TIME type, instead got " + dateType.getExprType()); } else { fail(); } @@ -237,8 +247,7 @@ private static Stream get_format_combinations_for_test() { // D - day of year, N - nano of day Arguments.of(TIMESTAMP, List.of("dd.MM.yyyy N", "uuuu:D:HH:mm"), "custom datetime"), Arguments.of(DATE, List.of("dd.MM.yyyy", "uuuu:D"), "custom date"), - Arguments.of(TIME, List.of("HH:mm", "N"), "custom time") - ); + Arguments.of(TIME, List.of("HH:mm", "N"), "custom time")); } @ParameterizedTest(name = "[{index}] {2}") @@ -258,7 +267,6 @@ public void dont_use_incorrect_format_as_custom() { @Test public void check_if_date_type_compatible() { assertTrue(isDateTypeCompatible(DATE)); - assertFalse(isDateTypeCompatible(OpenSearchDataType.of( - OpenSearchDataType.MappingType.Text))); + assertFalse(isDateTypeCompatible(OpenSearchDataType.of(OpenSearchDataType.MappingType.Text))); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprBinaryValueTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprBinaryValueTest.java index 4e7b33f944..fa221bc214 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprBinaryValueTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprBinaryValueTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.value; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -24,22 +23,19 @@ public void compare() { @Test public void equal() { - OpenSearchExprBinaryValue value = - new OpenSearchExprBinaryValue("U29tZSBiaW5hcnkgYmxvYg=="); + OpenSearchExprBinaryValue value = new OpenSearchExprBinaryValue("U29tZSBiaW5hcnkgYmxvYg=="); assertTrue(value.equal(new OpenSearchExprBinaryValue("U29tZSBiaW5hcnkgYmxvYg=="))); } @Test public void value() { - OpenSearchExprBinaryValue value = - new OpenSearchExprBinaryValue("U29tZSBiaW5hcnkgYmxvYg=="); + OpenSearchExprBinaryValue value = new OpenSearchExprBinaryValue("U29tZSBiaW5hcnkgYmxvYg=="); assertEquals("U29tZSBiaW5hcnkgYmxvYg==", value.value()); } @Test public void type() { - OpenSearchExprBinaryValue value = - new OpenSearchExprBinaryValue("U29tZSBiaW5hcnkgYmxvYg=="); + OpenSearchExprBinaryValue value = new OpenSearchExprBinaryValue("U29tZSBiaW5hcnkgYmxvYg=="); assertEquals(OpenSearchBinaryType.of(), value.type()); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprGeoPointValueTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprGeoPointValueTest.java index 4edb25aff5..defa97d8c8 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprGeoPointValueTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprGeoPointValueTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.value; import static org.junit.jupiter.api.Assertions.assertEquals; diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprIpValueTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprIpValueTest.java index cda4377c60..38a4ad3199 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprIpValueTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprIpValueTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.value; import static org.junit.jupiter.api.Assertions.assertEquals; diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprTextValueTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprTextValueTest.java index b60402e746..9b7e032c57 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprTextValueTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprTextValueTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.value; import static org.junit.jupiter.api.Assertions.assertAll; @@ -27,54 +26,73 @@ public void type_of_ExprTextValue() { @Test public void getFields() { - var fields = Map.of( - "f1", OpenSearchDataType.of(OpenSearchDataType.MappingType.Integer), - "f2", OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword), - "f3", OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword)); + var fields = + Map.of( + "f1", OpenSearchDataType.of(OpenSearchDataType.MappingType.Integer), + "f2", OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword), + "f3", OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword)); assertEquals(fields, OpenSearchTextType.of(fields).getFields()); } @Test void non_text_types_arent_converted() { assertAll( - () -> assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", - OpenSearchDataType.of(INTEGER))), - () -> assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", - OpenSearchDataType.of(STRING))), - () -> assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", - OpenSearchDataType.of(OpenSearchDataType.MappingType.GeoPoint))), - () -> assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", - OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword))), - () -> assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", - OpenSearchDataType.of(OpenSearchDataType.MappingType.Integer))), + () -> + assertEquals( + "field", + OpenSearchTextType.convertTextToKeyword("field", OpenSearchDataType.of(INTEGER))), + () -> + assertEquals( + "field", + OpenSearchTextType.convertTextToKeyword("field", OpenSearchDataType.of(STRING))), + () -> + assertEquals( + "field", + OpenSearchTextType.convertTextToKeyword( + "field", OpenSearchDataType.of(OpenSearchDataType.MappingType.GeoPoint))), + () -> + assertEquals( + "field", + OpenSearchTextType.convertTextToKeyword( + "field", OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword))), + () -> + assertEquals( + "field", + OpenSearchTextType.convertTextToKeyword( + "field", OpenSearchDataType.of(OpenSearchDataType.MappingType.Integer))), () -> assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", STRING)), - () -> assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", INTEGER)) - ); + () -> assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", INTEGER))); } @Test void non_text_types_with_nested_objects_arent_converted() { - var objectType = OpenSearchDataType.of(OpenSearchDataType.MappingType.Object, - Map.of("subfield", OpenSearchDataType.of(STRING))); - var arrayType = OpenSearchDataType.of(OpenSearchDataType.MappingType.Nested, - Map.of("subfield", OpenSearchDataType.of(STRING))); + var objectType = + OpenSearchDataType.of( + OpenSearchDataType.MappingType.Object, + Map.of("subfield", OpenSearchDataType.of(STRING))); + var arrayType = + OpenSearchDataType.of( + OpenSearchDataType.MappingType.Nested, + Map.of("subfield", OpenSearchDataType.of(STRING))); assertAll( () -> assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", objectType)), - () -> assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", arrayType)) - ); + () -> assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", arrayType))); } @Test void text_type_without_fields_isnt_converted() { - assertEquals("field", OpenSearchTextType.convertTextToKeyword("field", - OpenSearchDataType.of(OpenSearchDataType.MappingType.Text))); + assertEquals( + "field", + OpenSearchTextType.convertTextToKeyword( + "field", OpenSearchDataType.of(OpenSearchDataType.MappingType.Text))); } @Test void text_type_with_fields_is_converted() { - var textWithKeywordType = OpenSearchTextType.of(Map.of("keyword", - OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword))); - assertEquals("field.keyword", - OpenSearchTextType.convertTextToKeyword("field", textWithKeywordType)); + var textWithKeywordType = + OpenSearchTextType.of( + Map.of("keyword", OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword))); + assertEquals( + "field.keyword", OpenSearchTextType.convertTextToKeyword("field", textWithKeywordType)); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactoryTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactoryTest.java index 827606a961..1e913dfde2 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactoryTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/data/value/OpenSearchExprValueFactoryTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.data.value; import static org.junit.jupiter.api.Assertions.assertAll; @@ -88,8 +87,8 @@ class OpenSearchExprValueFactoryTest { .put("timeNoMillisOrTimeV", OpenSearchDateType.of("time_no_millis || time")) .put("dateOrOrdinalDateV", OpenSearchDateType.of("date || ordinal_date")) .put("customFormatV", OpenSearchDateType.of("yyyy-MM-dd-HH-mm-ss")) - .put("customAndEpochMillisV", - OpenSearchDateType.of("yyyy-MM-dd-HH-mm-ss || epoch_millis")) + .put( + "customAndEpochMillisV", OpenSearchDateType.of("yyyy-MM-dd-HH-mm-ss || epoch_millis")) .put("incompleteFormatV", OpenSearchDateType.of("year")) .put("boolV", OpenSearchDataType.of(BOOLEAN)) .put("structV", OpenSearchDataType.of(STRUCT)) @@ -98,20 +97,22 @@ class OpenSearchExprValueFactoryTest { .put("arrayV", OpenSearchDataType.of(ARRAY)) .put("arrayV.info", OpenSearchDataType.of(STRING)) .put("arrayV.author", OpenSearchDataType.of(STRING)) - .put("deepNestedV", OpenSearchDataType.of( - OpenSearchDataType.of(OpenSearchDataType.MappingType.Nested)) - ) - .put("deepNestedV.year", OpenSearchDataType.of( - OpenSearchDataType.of(OpenSearchDataType.MappingType.Nested)) - ) + .put( + "deepNestedV", + OpenSearchDataType.of(OpenSearchDataType.of(OpenSearchDataType.MappingType.Nested))) + .put( + "deepNestedV.year", + OpenSearchDataType.of(OpenSearchDataType.of(OpenSearchDataType.MappingType.Nested))) .put("deepNestedV.year.timeV", OpenSearchDateType.of(TIME)) - .put("nestedV", OpenSearchDataType.of( - OpenSearchDataType.of(OpenSearchDataType.MappingType.Nested)) - ) + .put( + "nestedV", + OpenSearchDataType.of(OpenSearchDataType.of(OpenSearchDataType.MappingType.Nested))) .put("nestedV.count", OpenSearchDataType.of(INTEGER)) .put("textV", OpenSearchDataType.of(OpenSearchDataType.MappingType.Text)) - .put("textKeywordV", OpenSearchTextType.of(Map.of("words", - OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword)))) + .put( + "textKeywordV", + OpenSearchTextType.of( + Map.of("words", OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword)))) .put("ipV", OpenSearchDataType.of(OpenSearchDataType.MappingType.Ip)) .put("geoV", OpenSearchDataType.of(OpenSearchDataType.MappingType.GeoPoint)) .put("binaryV", OpenSearchDataType.of(OpenSearchDataType.MappingType.Binary)) @@ -124,9 +125,8 @@ class OpenSearchExprValueFactoryTest { public void constructNullValue() { assertAll( () -> assertEquals(nullValue(), tupleValue("{\"intV\":null}").get("intV")), - () -> assertEquals(nullValue(), constructFromObject("intV", null)), - () -> assertTrue(new OpenSearchJsonContent(null).isNull()) - ); + () -> assertEquals(nullValue(), constructFromObject("intV", null)), + () -> assertTrue(new OpenSearchJsonContent(null).isNull())); } @Test @@ -136,8 +136,7 @@ public void iterateArrayValue() throws JsonProcessingException { assertAll( () -> assertEquals("zz", arrayIt.next().stringValue()), () -> assertEquals("bb", arrayIt.next().stringValue()), - () -> assertFalse(arrayIt.hasNext()) - ); + () -> assertFalse(arrayIt.hasNext())); } @Test @@ -146,8 +145,7 @@ public void iterateArrayValueWithOneElement() throws JsonProcessingException { var arrayIt = new OpenSearchJsonContent(mapper.readTree("[\"zz\"]")).array(); assertAll( () -> assertEquals("zz", arrayIt.next().stringValue()), - () -> assertFalse(arrayIt.hasNext()) - ); + () -> assertFalse(arrayIt.hasNext())); } @Test @@ -160,8 +158,7 @@ public void constructByte() { assertAll( () -> assertEquals(byteValue((byte) 1), tupleValue("{\"byteV\":1}").get("byteV")), () -> assertEquals(byteValue((byte) 1), constructFromObject("byteV", 1)), - () -> assertEquals(byteValue((byte) 1), constructFromObject("byteV", "1.0")) - ); + () -> assertEquals(byteValue((byte) 1), constructFromObject("byteV", "1.0"))); } @Test @@ -169,8 +166,7 @@ public void constructShort() { assertAll( () -> assertEquals(shortValue((short) 1), tupleValue("{\"shortV\":1}").get("shortV")), () -> assertEquals(shortValue((short) 1), constructFromObject("shortV", 1)), - () -> assertEquals(shortValue((short) 1), constructFromObject("shortV", "1.0")) - ); + () -> assertEquals(shortValue((short) 1), constructFromObject("shortV", "1.0"))); } @Test @@ -178,8 +174,7 @@ public void constructInteger() { assertAll( () -> assertEquals(integerValue(1), tupleValue("{\"intV\":1}").get("intV")), () -> assertEquals(integerValue(1), constructFromObject("intV", 1)), - () -> assertEquals(integerValue(1), constructFromObject("intV", "1.0")) - ); + () -> assertEquals(integerValue(1), constructFromObject("intV", "1.0"))); } @Test @@ -192,33 +187,29 @@ public void constructLong() { assertAll( () -> assertEquals(longValue(1L), tupleValue("{\"longV\":1}").get("longV")), () -> assertEquals(longValue(1L), constructFromObject("longV", 1L)), - () -> assertEquals(longValue(1L), constructFromObject("longV", "1.0")) - ); + () -> assertEquals(longValue(1L), constructFromObject("longV", "1.0"))); } @Test public void constructFloat() { assertAll( () -> assertEquals(floatValue(1f), tupleValue("{\"floatV\":1.0}").get("floatV")), - () -> assertEquals(floatValue(1f), constructFromObject("floatV", 1f)) - ); + () -> assertEquals(floatValue(1f), constructFromObject("floatV", 1f))); } @Test public void constructDouble() { assertAll( () -> assertEquals(doubleValue(1d), tupleValue("{\"doubleV\":1.0}").get("doubleV")), - () -> assertEquals(doubleValue(1d), constructFromObject("doubleV", 1d)) - ); + () -> assertEquals(doubleValue(1d), constructFromObject("doubleV", 1d))); } @Test public void constructString() { assertAll( - () -> assertEquals(stringValue("text"), - tupleValue("{\"stringV\":\"text\"}").get("stringV")), - () -> assertEquals(stringValue("text"), constructFromObject("stringV", "text")) - ); + () -> + assertEquals(stringValue("text"), tupleValue("{\"stringV\":\"text\"}").get("stringV")), + () -> assertEquals(stringValue("text"), constructFromObject("stringV", "text"))); } @Test @@ -228,23 +219,25 @@ public void constructBoolean() { () -> assertEquals(booleanValue(true), constructFromObject("boolV", true)), () -> assertEquals(booleanValue(true), constructFromObject("boolV", "true")), () -> assertEquals(booleanValue(true), constructFromObject("boolV", 1)), - () -> assertEquals(booleanValue(false), constructFromObject("boolV", 0)) - ); + () -> assertEquals(booleanValue(false), constructFromObject("boolV", 0))); } @Test public void constructText() { assertAll( - () -> assertEquals(new OpenSearchExprTextValue("text"), - tupleValue("{\"textV\":\"text\"}").get("textV")), - () -> assertEquals(new OpenSearchExprTextValue("text"), - constructFromObject("textV", "text")), - - () -> assertEquals(new OpenSearchExprTextValue("text"), - tupleValue("{\"textKeywordV\":\"text\"}").get("textKeywordV")), - () -> assertEquals(new OpenSearchExprTextValue("text"), - constructFromObject("textKeywordV", "text")) - ); + () -> + assertEquals( + new OpenSearchExprTextValue("text"), + tupleValue("{\"textV\":\"text\"}").get("textV")), + () -> + assertEquals(new OpenSearchExprTextValue("text"), constructFromObject("textV", "text")), + () -> + assertEquals( + new OpenSearchExprTextValue("text"), + tupleValue("{\"textKeywordV\":\"text\"}").get("textKeywordV")), + () -> + assertEquals( + new OpenSearchExprTextValue("text"), constructFromObject("textKeywordV", "text"))); } @Test @@ -252,95 +245,122 @@ public void constructDates() { ExprValue dateStringV = constructFromObject("dateStringV", "1984-04-12"); assertAll( () -> assertEquals(new ExprDateValue("1984-04-12"), dateStringV), - () -> assertEquals(new ExprDateValue( - LocalDate.ofInstant(Instant.ofEpochMilli(450576000000L), UTC_ZONE_ID)), - constructFromObject("dateV", 450576000000L)), - () -> assertEquals(new ExprDateValue("1984-04-12"), - constructFromObject("dateOrOrdinalDateV", "1984-103")), - () -> assertEquals(new ExprDateValue("2015-01-01"), - tupleValue("{\"dateV\":\"2015-01-01\"}").get("dateV")) - ); + () -> + assertEquals( + new ExprDateValue( + LocalDate.ofInstant(Instant.ofEpochMilli(450576000000L), UTC_ZONE_ID)), + constructFromObject("dateV", 450576000000L)), + () -> + assertEquals( + new ExprDateValue("1984-04-12"), + constructFromObject("dateOrOrdinalDateV", "1984-103")), + () -> + assertEquals( + new ExprDateValue("2015-01-01"), + tupleValue("{\"dateV\":\"2015-01-01\"}").get("dateV"))); } @Test public void constructTimes() { - ExprValue timeStringV = constructFromObject("timeStringV","12:10:30.000Z"); + ExprValue timeStringV = constructFromObject("timeStringV", "12:10:30.000Z"); assertAll( () -> assertTrue(timeStringV.isDateTime()), () -> assertTrue(timeStringV instanceof ExprTimeValue), () -> assertEquals(new ExprTimeValue("12:10:30"), timeStringV), - () -> assertEquals(new ExprTimeValue(LocalTime.from( - Instant.ofEpochMilli(1420070400001L).atZone(UTC_ZONE_ID))), - constructFromObject("timeV", 1420070400001L)), - () -> assertEquals(new ExprTimeValue("09:07:42.000"), - constructFromObject("timeNoMillisOrTimeV", "09:07:42.000Z")), - () -> assertEquals(new ExprTimeValue("09:07:42"), - tupleValue("{\"timeV\":\"09:07:42\"}").get("timeV")) - ); + () -> + assertEquals( + new ExprTimeValue( + LocalTime.from(Instant.ofEpochMilli(1420070400001L).atZone(UTC_ZONE_ID))), + constructFromObject("timeV", 1420070400001L)), + () -> + assertEquals( + new ExprTimeValue("09:07:42.000"), + constructFromObject("timeNoMillisOrTimeV", "09:07:42.000Z")), + () -> + assertEquals( + new ExprTimeValue("09:07:42"), + tupleValue("{\"timeV\":\"09:07:42\"}").get("timeV"))); } @Test public void constructDatetime() { assertAll( - () -> assertEquals( - new ExprTimestampValue("2015-01-01 00:00:00"), - tupleValue("{\"timestampV\":\"2015-01-01\"}").get("timestampV")), - () -> assertEquals( - new ExprTimestampValue("2015-01-01 12:10:30"), - tupleValue("{\"timestampV\":\"2015-01-01T12:10:30Z\"}").get("timestampV")), - () -> assertEquals( - new ExprTimestampValue("2015-01-01 12:10:30"), - tupleValue("{\"timestampV\":\"2015-01-01T12:10:30\"}").get("timestampV")), - () -> assertEquals( - new ExprTimestampValue("2015-01-01 12:10:30"), - tupleValue("{\"timestampV\":\"2015-01-01 12:10:30\"}").get("timestampV")), - () -> assertEquals( - new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), - constructFromObject("timestampV", 1420070400001L)), - () -> assertEquals( - new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), - constructFromObject("timestampV", Instant.ofEpochMilli(1420070400001L))), - () -> assertEquals( - new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), - constructFromObject("epochMillisV", "1420070400001")), - () -> assertEquals( - new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), - constructFromObject("epochMillisV", 1420070400001L)), - () -> assertEquals( - new ExprTimestampValue(Instant.ofEpochSecond(142704001L)), - constructFromObject("epochSecondV", 142704001L)), - () -> assertEquals( - new ExprTimeValue("10:20:30"), - tupleValue("{ \"timeCustomV\" : 102030 }").get("timeCustomV")), - () -> assertEquals( - new ExprDateValue("1961-04-12"), - tupleValue("{ \"dateCustomV\" : 19610412 }").get("dateCustomV")), - () -> assertEquals( - new ExprTimestampValue("1984-05-10 20:30:40"), - tupleValue("{ \"dateTimeCustomV\" : 19840510203040 }").get("dateTimeCustomV")), - () -> assertEquals( - new ExprTimestampValue("2015-01-01 12:10:30"), - constructFromObject("timestampV", "2015-01-01 12:10:30")), - () -> assertEquals( - new ExprDatetimeValue("2015-01-01 12:10:30"), - constructFromObject("datetimeV", "2015-01-01 12:10:30")), - () -> assertEquals( - new ExprDatetimeValue("2015-01-01 12:10:30"), - constructFromObject("datetimeDefaultV", "2015-01-01 12:10:30")), - () -> assertEquals( - new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), - constructFromObject("dateOrEpochMillisV", "1420070400001")), + () -> + assertEquals( + new ExprTimestampValue("2015-01-01 00:00:00"), + tupleValue("{\"timestampV\":\"2015-01-01\"}").get("timestampV")), + () -> + assertEquals( + new ExprTimestampValue("2015-01-01 12:10:30"), + tupleValue("{\"timestampV\":\"2015-01-01T12:10:30Z\"}").get("timestampV")), + () -> + assertEquals( + new ExprTimestampValue("2015-01-01 12:10:30"), + tupleValue("{\"timestampV\":\"2015-01-01T12:10:30\"}").get("timestampV")), + () -> + assertEquals( + new ExprTimestampValue("2015-01-01 12:10:30"), + tupleValue("{\"timestampV\":\"2015-01-01 12:10:30\"}").get("timestampV")), + () -> + assertEquals( + new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), + constructFromObject("timestampV", 1420070400001L)), + () -> + assertEquals( + new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), + constructFromObject("timestampV", Instant.ofEpochMilli(1420070400001L))), + () -> + assertEquals( + new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), + constructFromObject("epochMillisV", "1420070400001")), + () -> + assertEquals( + new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), + constructFromObject("epochMillisV", 1420070400001L)), + () -> + assertEquals( + new ExprTimestampValue(Instant.ofEpochSecond(142704001L)), + constructFromObject("epochSecondV", 142704001L)), + () -> + assertEquals( + new ExprTimeValue("10:20:30"), + tupleValue("{ \"timeCustomV\" : 102030 }").get("timeCustomV")), + () -> + assertEquals( + new ExprDateValue("1961-04-12"), + tupleValue("{ \"dateCustomV\" : 19610412 }").get("dateCustomV")), + () -> + assertEquals( + new ExprTimestampValue("1984-05-10 20:30:40"), + tupleValue("{ \"dateTimeCustomV\" : 19840510203040 }").get("dateTimeCustomV")), + () -> + assertEquals( + new ExprTimestampValue("2015-01-01 12:10:30"), + constructFromObject("timestampV", "2015-01-01 12:10:30")), + () -> + assertEquals( + new ExprDatetimeValue("2015-01-01 12:10:30"), + constructFromObject("datetimeV", "2015-01-01 12:10:30")), + () -> + assertEquals( + new ExprDatetimeValue("2015-01-01 12:10:30"), + constructFromObject("datetimeDefaultV", "2015-01-01 12:10:30")), + () -> + assertEquals( + new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), + constructFromObject("dateOrEpochMillisV", "1420070400001")), // case: timestamp-formatted field, but it only gets a time: should match a time - () -> assertEquals( - new ExprTimeValue("19:36:22"), - tupleValue("{\"timestampV\":\"19:36:22\"}").get("timestampV")), + () -> + assertEquals( + new ExprTimeValue("19:36:22"), + tupleValue("{\"timestampV\":\"19:36:22\"}").get("timestampV")), // case: timestamp-formatted field, but it only gets a date: should match a date - () -> assertEquals( - new ExprDateValue("2011-03-03"), - tupleValue("{\"timestampV\":\"2011-03-03\"}").get("timestampV")) - ); + () -> + assertEquals( + new ExprDateValue("2011-03-03"), + tupleValue("{\"timestampV\":\"2011-03-03\"}").get("timestampV"))); } @Test @@ -350,11 +370,11 @@ public void constructDatetime_fromCustomFormat() { constructFromObject("customFormatV", "2015-01-01-12-10-30")); IllegalArgumentException exception = - assertThrows(IllegalArgumentException.class, + assertThrows( + IllegalArgumentException.class, () -> constructFromObject("customFormatV", "2015-01-01 12-10-30")); assertEquals( - "Construct TIMESTAMP from \"2015-01-01 12-10-30\" failed, " - + "unsupported format.", + "Construct TIMESTAMP from \"2015-01-01 12-10-30\" failed, " + "unsupported format.", exception.getMessage()); assertEquals( @@ -369,91 +389,90 @@ public void constructDatetime_fromCustomFormat() { @Test public void constructDatetimeFromUnsupportedFormat_ThrowIllegalArgumentException() { IllegalArgumentException exception = - assertThrows(IllegalArgumentException.class, + assertThrows( + IllegalArgumentException.class, () -> constructFromObject("timestampV", "2015-01-01 12:10")); assertEquals( - "Construct TIMESTAMP from \"2015-01-01 12:10\" failed, " - + "unsupported format.", + "Construct TIMESTAMP from \"2015-01-01 12:10\" failed, " + "unsupported format.", exception.getMessage()); // fail with missing seconds exception = - assertThrows(IllegalArgumentException.class, + assertThrows( + IllegalArgumentException.class, () -> constructFromObject("dateOrEpochMillisV", "2015-01-01 12:10")); assertEquals( - "Construct TIMESTAMP from \"2015-01-01 12:10\" failed, " - + "unsupported format.", + "Construct TIMESTAMP from \"2015-01-01 12:10\" failed, " + "unsupported format.", exception.getMessage()); } @Test public void constructTimeFromUnsupportedFormat_ThrowIllegalArgumentException() { - IllegalArgumentException exception = assertThrows( - IllegalArgumentException.class, () -> constructFromObject("timeV", "2015-01-01")); + IllegalArgumentException exception = + assertThrows( + IllegalArgumentException.class, () -> constructFromObject("timeV", "2015-01-01")); assertEquals( - "Construct TIME from \"2015-01-01\" failed, " - + "unsupported format.", + "Construct TIME from \"2015-01-01\" failed, " + "unsupported format.", exception.getMessage()); - exception = assertThrows( - IllegalArgumentException.class, () -> constructFromObject("timeStringV", "10:10")); + exception = + assertThrows( + IllegalArgumentException.class, () -> constructFromObject("timeStringV", "10:10")); assertEquals( - "Construct TIME from \"10:10\" failed, " - + "unsupported format.", - exception.getMessage()); + "Construct TIME from \"10:10\" failed, " + "unsupported format.", exception.getMessage()); } @Test public void constructDateFromUnsupportedFormat_ThrowIllegalArgumentException() { - IllegalArgumentException exception = assertThrows( - IllegalArgumentException.class, () -> constructFromObject("dateV", "12:10:10")); + IllegalArgumentException exception = + assertThrows( + IllegalArgumentException.class, () -> constructFromObject("dateV", "12:10:10")); assertEquals( - "Construct DATE from \"12:10:10\" failed, " - + "unsupported format.", + "Construct DATE from \"12:10:10\" failed, " + "unsupported format.", exception.getMessage()); - exception = assertThrows( - IllegalArgumentException.class, () -> constructFromObject("dateStringV", "abc")); + exception = + assertThrows( + IllegalArgumentException.class, () -> constructFromObject("dateStringV", "abc")); assertEquals( - "Construct DATE from \"abc\" failed, " - + "unsupported format.", - exception.getMessage()); + "Construct DATE from \"abc\" failed, " + "unsupported format.", exception.getMessage()); } @Test public void constructDateFromIncompleteFormat() { - assertEquals( - new ExprDateValue("1984-01-01"), - constructFromObject("incompleteFormatV", "1984")); + assertEquals(new ExprDateValue("1984-01-01"), constructFromObject("incompleteFormatV", "1984")); } @Test public void constructArray() { assertEquals( - new ExprCollectionValue(List.of(new ExprTupleValue( - new LinkedHashMap() { - { - put("info", stringValue("zz")); - put("author", stringValue("au")); - } - }))), + new ExprCollectionValue( + List.of( + new ExprTupleValue( + new LinkedHashMap() { + { + put("info", stringValue("zz")); + put("author", stringValue("au")); + } + }))), tupleValue("{\"arrayV\":[{\"info\":\"zz\",\"author\":\"au\"}]}").get("arrayV")); assertEquals( - new ExprCollectionValue(List.of(new ExprTupleValue( - new LinkedHashMap() { - { - put("info", stringValue("zz")); - put("author", stringValue("au")); - } - }))), - constructFromObject("arrayV", List.of( - ImmutableMap.of("info", "zz", "author", "au")))); + new ExprCollectionValue( + List.of( + new ExprTupleValue( + new LinkedHashMap() { + { + put("info", stringValue("zz")); + put("author", stringValue("au")); + } + }))), + constructFromObject("arrayV", List.of(ImmutableMap.of("info", "zz", "author", "au")))); } @Test public void constructArrayOfStrings() { - assertEquals(new ExprCollectionValue( - List.of(stringValue("zz"), stringValue("au"))), + assertEquals( + new ExprCollectionValue(List.of(stringValue("zz"), stringValue("au"))), constructFromObject("arrayV", List.of("zz", "au"))); } @@ -461,100 +480,74 @@ public void constructArrayOfStrings() { public void constructNestedArraysOfStrings() { assertEquals( new ExprCollectionValue( - List.of( - collectionValue( - List.of("zz", "au") - ), - collectionValue( - List.of("ss") - ) - ) - ), - tupleValueWithArraySupport( - "{\"stringV\":[" - + "[\"zz\", \"au\"]," - + "[\"ss\"]" - + "]}" - ).get("stringV")); + List.of(collectionValue(List.of("zz", "au")), collectionValue(List.of("ss")))), + tupleValueWithArraySupport("{\"stringV\":[" + "[\"zz\", \"au\"]," + "[\"ss\"]" + "]}") + .get("stringV")); } @Test public void constructNestedArraysOfStringsReturnsFirstIndex() { assertEquals( stringValue("zz"), - tupleValue( - "{\"stringV\":[" - + "[\"zz\", \"au\"]," - + "[\"ss\"]" - + "]}" - ).get("stringV")); + tupleValue("{\"stringV\":[" + "[\"zz\", \"au\"]," + "[\"ss\"]" + "]}").get("stringV")); } @Test public void constructMultiNestedArraysOfStringsReturnsFirstIndex() { assertEquals( stringValue("z"), - tupleValue( - "{\"stringV\":" - + "[\"z\"," - + "[\"s\"]," - + "[\"zz\", \"au\"]" - + "]}" - ).get("stringV")); + tupleValue("{\"stringV\":" + "[\"z\"," + "[\"s\"]," + "[\"zz\", \"au\"]" + "]}") + .get("stringV")); } @Test public void constructArrayOfInts() { - assertEquals(new ExprCollectionValue( - List.of(integerValue(1), integerValue(2))), + assertEquals( + new ExprCollectionValue(List.of(integerValue(1), integerValue(2))), constructFromObject("arrayV", List.of(1, 2))); } @Test public void constructArrayOfShorts() { // Shorts are treated same as integer - assertEquals(new ExprCollectionValue( - List.of(shortValue((short)3), shortValue((short)4))), + assertEquals( + new ExprCollectionValue(List.of(shortValue((short) 3), shortValue((short) 4))), constructFromObject("arrayV", List.of(3, 4))); } @Test public void constructArrayOfLongs() { - assertEquals(new ExprCollectionValue( - List.of(longValue(123456789L), longValue(987654321L))), + assertEquals( + new ExprCollectionValue(List.of(longValue(123456789L), longValue(987654321L))), constructFromObject("arrayV", List.of(123456789L, 987654321L))); } @Test public void constructArrayOfFloats() { - assertEquals(new ExprCollectionValue( - List.of(floatValue(3.14f), floatValue(4.13f))), + assertEquals( + new ExprCollectionValue(List.of(floatValue(3.14f), floatValue(4.13f))), constructFromObject("arrayV", List.of(3.14f, 4.13f))); } @Test public void constructArrayOfDoubles() { - assertEquals(new ExprCollectionValue( - List.of(doubleValue(9.1928374756D), doubleValue(4.987654321D))), + assertEquals( + new ExprCollectionValue(List.of(doubleValue(9.1928374756D), doubleValue(4.987654321D))), constructFromObject("arrayV", List.of(9.1928374756D, 4.987654321D))); } @Test public void constructArrayOfBooleans() { - assertEquals(new ExprCollectionValue( - List.of(booleanValue(true), booleanValue(false))), + assertEquals( + new ExprCollectionValue(List.of(booleanValue(true), booleanValue(false))), constructFromObject("arrayV", List.of(true, false))); } @Test public void constructNestedObjectArrayNode() { - assertEquals(collectionValue( - List.of( - Map.of("count", 1), - Map.of("count", 2) - )), - tupleValueWithArraySupport("{\"nestedV\":[{\"count\":1},{\"count\":2}]}") - .get("nestedV")); + assertEquals( + collectionValue(List.of(Map.of("count", 1), Map.of("count", 2))), + tupleValueWithArraySupport("{\"nestedV\":[{\"count\":1},{\"count\":2}]}").get("nestedV")); } @Test @@ -562,84 +555,70 @@ public void constructNestedObjectArrayOfObjectArraysNode() { assertEquals( collectionValue( List.of( - Map.of("year", + Map.of( + "year", List.of( Map.of("timeV", new ExprTimeValue("09:07:42")), - Map.of("timeV", new ExprTimeValue("09:07:42")) - ) - ), - Map.of("year", + Map.of("timeV", new ExprTimeValue("09:07:42")))), + Map.of( + "year", List.of( Map.of("timeV", new ExprTimeValue("09:07:42")), - Map.of("timeV", new ExprTimeValue("09:07:42")) - ) - ) - ) - ), + Map.of("timeV", new ExprTimeValue("09:07:42")))))), tupleValueWithArraySupport( - "{\"deepNestedV\":" - + "[" - + "{\"year\":" - + "[" - + "{\"timeV\":\"09:07:42\"}," - + "{\"timeV\":\"09:07:42\"}" - + "]" - + "}," - + "{\"year\":" - + "[" - + "{\"timeV\":\"09:07:42\"}," - + "{\"timeV\":\"09:07:42\"}" - + "]" - + "}" - + "]" - + "}") + "{\"deepNestedV\":" + + "[" + + "{\"year\":" + + "[" + + "{\"timeV\":\"09:07:42\"}," + + "{\"timeV\":\"09:07:42\"}" + + "]" + + "}," + + "{\"year\":" + + "[" + + "{\"timeV\":\"09:07:42\"}," + + "{\"timeV\":\"09:07:42\"}" + + "]" + + "}" + + "]" + + "}") .get("deepNestedV")); } @Test public void constructNestedArrayNode() { - assertEquals(collectionValue( - List.of( - 1969, - 2011 - )), - tupleValueWithArraySupport("{\"nestedV\":[1969,2011]}") - .get("nestedV")); + assertEquals( + collectionValue(List.of(1969, 2011)), + tupleValueWithArraySupport("{\"nestedV\":[1969,2011]}").get("nestedV")); } @Test public void constructNestedObjectNode() { - assertEquals(collectionValue( - List.of( - Map.of("count", 1969) - )), - tupleValue("{\"nestedV\":{\"count\":1969}}") - .get("nestedV")); + assertEquals( + collectionValue(List.of(Map.of("count", 1969))), + tupleValue("{\"nestedV\":{\"count\":1969}}").get("nestedV")); } @Test public void constructArrayOfGeoPoints() { - assertEquals(new ExprCollectionValue( + assertEquals( + new ExprCollectionValue( List.of( new OpenSearchExprGeoPointValue(42.60355556, -97.25263889), - new OpenSearchExprGeoPointValue(-33.6123556, 66.287449)) - ), + new OpenSearchExprGeoPointValue(-33.6123556, 66.287449))), tupleValueWithArraySupport( - "{\"geoV\":[" - + "{\"lat\":42.60355556,\"lon\":-97.25263889}," - + "{\"lat\":-33.6123556,\"lon\":66.287449}" - + "]}" - ).get("geoV") - ); + "{\"geoV\":[" + + "{\"lat\":42.60355556,\"lon\":-97.25263889}," + + "{\"lat\":-33.6123556,\"lon\":66.287449}" + + "]}") + .get("geoV")); } @Test public void constructArrayOfIPsReturnsFirstIndex() { assertEquals( new OpenSearchExprIpValue("192.168.0.1"), - tupleValue("{\"ipV\":[\"192.168.0.1\",\"192.168.0.2\"]}") - .get("ipV") - ); + tupleValue("{\"ipV\":[\"192.168.0.1\",\"192.168.0.2\"]}").get("ipV")); } @Test @@ -647,8 +626,7 @@ public void constructBinaryArrayReturnsFirstIndex() { assertEquals( new OpenSearchExprBinaryValue("U29tZSBiaWsdfsdfgYmxvYg=="), tupleValue("{\"binaryV\":[\"U29tZSBiaWsdfsdfgYmxvYg==\",\"U987yuhjjiy8jhk9vY+98jjdf\"]}") - .get("binaryV") - ); + .get("binaryV")); } @Test @@ -656,26 +634,21 @@ public void constructArrayOfCustomEpochMillisReturnsFirstIndex() { assertEquals( new ExprDatetimeValue("2015-01-01 12:10:30"), tupleValue("{\"customAndEpochMillisV\":[\"2015-01-01 12:10:30\",\"1999-11-09 01:09:44\"]}") - .get("customAndEpochMillisV") - ); + .get("customAndEpochMillisV")); } @Test public void constructArrayOfDateStringsReturnsFirstIndex() { assertEquals( new ExprDateValue("1984-04-12"), - tupleValue("{\"dateStringV\":[\"1984-04-12\",\"2033-05-03\"]}") - .get("dateStringV") - ); + tupleValue("{\"dateStringV\":[\"1984-04-12\",\"2033-05-03\"]}").get("dateStringV")); } @Test public void constructArrayOfTimeStringsReturnsFirstIndex() { assertEquals( new ExprTimeValue("12:10:30"), - tupleValue("{\"timeStringV\":[\"12:10:30.000Z\",\"18:33:55.000Z\"]}") - .get("timeStringV") - ); + tupleValue("{\"timeStringV\":[\"12:10:30.000Z\",\"18:33:55.000Z\"]}").get("timeStringV")); } @Test @@ -683,8 +656,7 @@ public void constructArrayOfEpochMillis() { assertEquals( new ExprTimestampValue(Instant.ofEpochMilli(1420070400001L)), tupleValue("{\"dateOrEpochMillisV\":[\"1420070400001\",\"1454251113333\"]}") - .get("dateOrEpochMillisV") - ); + .get("dateOrEpochMillisV")); } @Test @@ -711,54 +683,64 @@ public void constructStruct() { @Test public void constructIP() { - assertEquals(new OpenSearchExprIpValue("192.168.0.1"), + assertEquals( + new OpenSearchExprIpValue("192.168.0.1"), tupleValue("{\"ipV\":\"192.168.0.1\"}").get("ipV")); } @Test public void constructGeoPoint() { - assertEquals(new OpenSearchExprGeoPointValue(42.60355556, -97.25263889), + assertEquals( + new OpenSearchExprGeoPointValue(42.60355556, -97.25263889), tupleValue("{\"geoV\":{\"lat\":42.60355556,\"lon\":-97.25263889}}").get("geoV")); - assertEquals(new OpenSearchExprGeoPointValue(42.60355556, -97.25263889), + assertEquals( + new OpenSearchExprGeoPointValue(42.60355556, -97.25263889), tupleValue("{\"geoV\":{\"lat\":\"42.60355556\",\"lon\":\"-97.25263889\"}}").get("geoV")); - assertEquals(new OpenSearchExprGeoPointValue(42.60355556, -97.25263889), + assertEquals( + new OpenSearchExprGeoPointValue(42.60355556, -97.25263889), constructFromObject("geoV", "42.60355556,-97.25263889")); } @Test public void constructGeoPointFromUnsupportedFormatShouldThrowException() { IllegalStateException exception = - assertThrows(IllegalStateException.class, + assertThrows( + IllegalStateException.class, () -> tupleValue("{\"geoV\":[42.60355556,-97.25263889]}").get("geoV")); - assertEquals("geo point must in format of {\"lat\": number, \"lon\": number}", - exception.getMessage()); + assertEquals( + "geo point must in format of {\"lat\": number, \"lon\": number}", exception.getMessage()); exception = - assertThrows(IllegalStateException.class, + assertThrows( + IllegalStateException.class, () -> tupleValue("{\"geoV\":{\"lon\":-97.25263889}}").get("geoV")); - assertEquals("geo point must in format of {\"lat\": number, \"lon\": number}", - exception.getMessage()); + assertEquals( + "geo point must in format of {\"lat\": number, \"lon\": number}", exception.getMessage()); exception = - assertThrows(IllegalStateException.class, + assertThrows( + IllegalStateException.class, () -> tupleValue("{\"geoV\":{\"lat\":-97.25263889}}").get("geoV")); - assertEquals("geo point must in format of {\"lat\": number, \"lon\": number}", - exception.getMessage()); + assertEquals( + "geo point must in format of {\"lat\": number, \"lon\": number}", exception.getMessage()); exception = - assertThrows(IllegalStateException.class, + assertThrows( + IllegalStateException.class, () -> tupleValue("{\"geoV\":{\"lat\":true,\"lon\":-97.25263889}}").get("geoV")); assertEquals("latitude must be number value, but got value: true", exception.getMessage()); exception = - assertThrows(IllegalStateException.class, + assertThrows( + IllegalStateException.class, () -> tupleValue("{\"geoV\":{\"lat\":42.60355556,\"lon\":false}}").get("geoV")); assertEquals("longitude must be number value, but got value: false", exception.getMessage()); } @Test public void constructBinary() { - assertEquals(new OpenSearchExprBinaryValue("U29tZSBiaW5hcnkgYmxvYg=="), + assertEquals( + new OpenSearchExprBinaryValue("U29tZSBiaW5hcnkgYmxvYg=="), tupleValue("{\"binaryV\":\"U29tZSBiaW5hcnkgYmxvYg==\"}").get("binaryV")); } @@ -769,14 +751,16 @@ public void constructBinary() { @Test public void constructFromOpenSearchArrayReturnFirstElement() { assertEquals(integerValue(1), tupleValue("{\"intV\":[1, 2, 3]}").get("intV")); - assertEquals(new ExprTupleValue( - new LinkedHashMap() { - { - put("id", integerValue(1)); - put("state", stringValue("WA")); - } - }), tupleValue("{\"structV\":[{\"id\":1,\"state\":\"WA\"},{\"id\":2,\"state\":\"CA\"}]}}") - .get("structV")); + assertEquals( + new ExprTupleValue( + new LinkedHashMap() { + { + put("id", integerValue(1)); + put("state", stringValue("WA")); + } + }), + tupleValue("{\"structV\":[{\"id\":1,\"state\":\"WA\"},{\"id\":2,\"state\":\"CA\"}]}}") + .get("structV")); } @Test @@ -799,19 +783,13 @@ public void constructUnsupportedTypeThrowException() { new OpenSearchExprValueFactory(Map.of("type", new TestType())); IllegalStateException exception = assertThrows( - IllegalStateException.class, - () -> exprValueFactory.construct("{\"type\":1}", false) - ); + IllegalStateException.class, () -> exprValueFactory.construct("{\"type\":1}", false)); assertEquals("Unsupported type: TEST_TYPE for value: 1.", exception.getMessage()); exception = assertThrows( - IllegalStateException.class, - () -> exprValueFactory.construct("type", 1, false) - ); - assertEquals( - "Unsupported type: TEST_TYPE for value: 1.", - exception.getMessage()); + IllegalStateException.class, () -> exprValueFactory.construct("type", 1, false)); + assertEquals("Unsupported type: TEST_TYPE for value: 1.", exception.getMessage()); } @Test @@ -820,21 +798,21 @@ public void constructUnsupportedTypeThrowException() { public void factoryMappingsAreExtendableWithoutOverWrite() throws NoSuchFieldException, IllegalAccessException { var factory = new OpenSearchExprValueFactory(Map.of("value", OpenSearchDataType.of(INTEGER))); - factory.extendTypeMapping(Map.of( - "value", OpenSearchDataType.of(DOUBLE), - "agg", OpenSearchDataType.of(DATE))); + factory.extendTypeMapping( + Map.of( + "value", OpenSearchDataType.of(DOUBLE), + "agg", OpenSearchDataType.of(DATE))); // extract private field for testing purposes var field = factory.getClass().getDeclaredField("typeMapping"); field.setAccessible(true); @SuppressWarnings("unchecked") - var mapping = (Map)field.get(factory); + var mapping = (Map) field.get(factory); assertAll( () -> assertEquals(2, mapping.size()), () -> assertTrue(mapping.containsKey("value")), () -> assertTrue(mapping.containsKey("agg")), () -> assertEquals(OpenSearchDataType.of(INTEGER), mapping.get("value")), - () -> assertEquals(OpenSearchDataType.of(DATE), mapping.get("agg")) - ); + () -> assertEquals(OpenSearchDataType.of(DATE), mapping.get("agg"))); } public Map tupleValue(String jsonString) { diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/OpenSearchExecutionEngineTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/OpenSearchExecutionEngineTest.java index 330793a5d6..739b70b1b8 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/OpenSearchExecutionEngineTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/OpenSearchExecutionEngineTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.executor; import static com.google.common.collect.ImmutableMap.of; @@ -73,12 +72,12 @@ class OpenSearchExecutionEngineTest { @BeforeEach void setUp() { doAnswer( - invocation -> { - // Run task immediately - Runnable task = invocation.getArgument(0); - task.run(); - return null; - }) + invocation -> { + // Run task immediately + Runnable task = invocation.getArgument(0); + task.run(); + return null; + }) .when(client) .schedule(any()); } @@ -91,22 +90,22 @@ void execute_successfully() { FakePhysicalPlan plan = new FakePhysicalPlan(expected.iterator()); when(protector.protect(plan)).thenReturn(plan); - OpenSearchExecutionEngine executor = new OpenSearchExecutionEngine(client, protector, - new PlanSerializer(null)); + OpenSearchExecutionEngine executor = + new OpenSearchExecutionEngine(client, protector, new PlanSerializer(null)); List actual = new ArrayList<>(); executor.execute( plan, - new ResponseListener<>() { - @Override - public void onResponse(QueryResponse response) { - actual.addAll(response.getResults()); - } - - @Override - public void onFailure(Exception e) { - fail("Error occurred during execution", e); - } - }); + new ResponseListener<>() { + @Override + public void onResponse(QueryResponse response) { + actual.addAll(response.getResults()); + } + + @Override + public void onFailure(Exception e) { + fail("Error occurred during execution", e); + } + }); assertTrue(plan.hasOpen); assertEquals(expected, actual); @@ -121,23 +120,23 @@ void execute_with_cursor() { var plan = new FakePhysicalPlan(expected.iterator()); when(protector.protect(plan)).thenReturn(plan); - OpenSearchExecutionEngine executor = new OpenSearchExecutionEngine(client, protector, - new PlanSerializer(null)); + OpenSearchExecutionEngine executor = + new OpenSearchExecutionEngine(client, protector, new PlanSerializer(null)); List actual = new ArrayList<>(); executor.execute( plan, - new ResponseListener<>() { - @Override - public void onResponse(QueryResponse response) { - actual.addAll(response.getResults()); - assertTrue(response.getCursor().toString().startsWith("n:")); - } - - @Override - public void onFailure(Exception e) { - fail("Error occurred during execution", e); - } - }); + new ResponseListener<>() { + @Override + public void onResponse(QueryResponse response) { + actual.addAll(response.getResults()); + assertTrue(response.getCursor().toString().startsWith("n:")); + } + + @Override + public void onFailure(Exception e) { + fail("Error occurred during execution", e); + } + }); assertEquals(expected, actual); } @@ -149,78 +148,84 @@ void execute_with_failure() { when(plan.hasNext()).thenThrow(expected); when(protector.protect(plan)).thenReturn(plan); - OpenSearchExecutionEngine executor = new OpenSearchExecutionEngine(client, protector, - new PlanSerializer(null)); + OpenSearchExecutionEngine executor = + new OpenSearchExecutionEngine(client, protector, new PlanSerializer(null)); AtomicReference actual = new AtomicReference<>(); executor.execute( plan, - new ResponseListener<>() { - @Override - public void onResponse(QueryResponse response) { - fail("Expected error didn't happen"); - } - - @Override - public void onFailure(Exception e) { - actual.set(e); - } - }); + new ResponseListener<>() { + @Override + public void onResponse(QueryResponse response) { + fail("Expected error didn't happen"); + } + + @Override + public void onFailure(Exception e) { + actual.set(e); + } + }); assertEquals(expected, actual.get()); verify(plan).close(); } @Test void explain_successfully() { - OpenSearchExecutionEngine executor = new OpenSearchExecutionEngine(client, protector, - new PlanSerializer(null)); + OpenSearchExecutionEngine executor = + new OpenSearchExecutionEngine(client, protector, new PlanSerializer(null)); Settings settings = mock(Settings.class); - when(settings.getSettingValue(SQL_CURSOR_KEEP_ALIVE)) - .thenReturn(TimeValue.timeValueMinutes(1)); + when(settings.getSettingValue(SQL_CURSOR_KEEP_ALIVE)).thenReturn(TimeValue.timeValueMinutes(1)); OpenSearchExprValueFactory exprValueFactory = mock(OpenSearchExprValueFactory.class); final var name = new OpenSearchRequest.IndexName("test"); final int defaultQuerySize = 100; final int maxResultWindow = 10000; final var requestBuilder = new OpenSearchRequestBuilder(defaultQuerySize, exprValueFactory); - PhysicalPlan plan = new OpenSearchIndexScan(mock(OpenSearchClient.class), - maxResultWindow, requestBuilder.build(name, maxResultWindow, - settings.getSettingValue(SQL_CURSOR_KEEP_ALIVE))); + PhysicalPlan plan = + new OpenSearchIndexScan( + mock(OpenSearchClient.class), + maxResultWindow, + requestBuilder.build( + name, maxResultWindow, settings.getSettingValue(SQL_CURSOR_KEEP_ALIVE))); AtomicReference result = new AtomicReference<>(); - executor.explain(plan, new ResponseListener<>() { - @Override - public void onResponse(ExplainResponse response) { - result.set(response); - } - - @Override - public void onFailure(Exception e) { - fail(e); - } - }); + executor.explain( + plan, + new ResponseListener<>() { + @Override + public void onResponse(ExplainResponse response) { + result.set(response); + } + + @Override + public void onFailure(Exception e) { + fail(e); + } + }); assertNotNull(result.get()); } @Test void explain_with_failure() { - OpenSearchExecutionEngine executor = new OpenSearchExecutionEngine(client, protector, - new PlanSerializer(null)); + OpenSearchExecutionEngine executor = + new OpenSearchExecutionEngine(client, protector, new PlanSerializer(null)); PhysicalPlan plan = mock(PhysicalPlan.class); when(plan.accept(any(), any())).thenThrow(IllegalStateException.class); AtomicReference result = new AtomicReference<>(); - executor.explain(plan, new ResponseListener<>() { - @Override - public void onResponse(ExplainResponse response) { - fail("Should fail as expected"); - } - - @Override - public void onFailure(Exception e) { - result.set(e); - } - }); + executor.explain( + plan, + new ResponseListener<>() { + @Override + public void onResponse(ExplainResponse response) { + fail("Should fail as expected"); + } + + @Override + public void onFailure(Exception e) { + result.set(e); + } + }); assertNotNull(result.get()); } @@ -234,8 +239,8 @@ void call_add_split_and_open_in_order() { when(protector.protect(plan)).thenReturn(plan); when(executionContext.getSplit()).thenReturn(Optional.of(split)); - OpenSearchExecutionEngine executor = new OpenSearchExecutionEngine(client, protector, - new PlanSerializer(null)); + OpenSearchExecutionEngine executor = + new OpenSearchExecutionEngine(client, protector, new PlanSerializer(null)); List actual = new ArrayList<>(); executor.execute( plan, @@ -266,12 +271,10 @@ private static class FakePhysicalPlan extends TableScanOperator implements Seria private boolean hasSplit; @Override - public void readExternal(ObjectInput in) { - } + public void readExternal(ObjectInput in) {} @Override - public void writeExternal(ObjectOutput out) { - } + public void writeExternal(ObjectOutput out) {} @Override public void open() { diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/OpenSearchQueryManagerTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/OpenSearchQueryManagerTest.java index 6d2b9b13ce..047a510180 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/OpenSearchQueryManagerTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/OpenSearchQueryManagerTest.java @@ -32,17 +32,13 @@ @ExtendWith(MockitoExtension.class) class OpenSearchQueryManagerTest { - @Mock - private QueryId queryId; + @Mock private QueryId queryId; - @Mock - private QueryService queryService; + @Mock private QueryService queryService; - @Mock - private UnresolvedPlan plan; + @Mock private UnresolvedPlan plan; - @Mock - private ResponseListener listener; + @Mock private ResponseListener listener; @Test public void submitQuery() { @@ -51,19 +47,20 @@ public void submitQuery() { when(nodeClient.threadPool()).thenReturn(threadPool); AtomicBoolean isRun = new AtomicBoolean(false); - AbstractPlan queryPlan = new QueryPlan(queryId, plan, queryService, listener) { - @Override - public void execute() { - isRun.set(true); - } - }; + AbstractPlan queryPlan = + new QueryPlan(queryId, plan, queryService, listener) { + @Override + public void execute() { + isRun.set(true); + } + }; doAnswer( - invocation -> { - Runnable task = invocation.getArgument(0); - task.run(); - return null; - }) + invocation -> { + Runnable task = invocation.getArgument(0); + task.run(); + return null; + }) .when(threadPool) .schedule(any(), any(), any()); new OpenSearchQueryManager(nodeClient).submit(queryPlan); diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/ResourceMonitorPlanTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/ResourceMonitorPlanTest.java index 96e85a8173..26bcdf6d89 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/ResourceMonitorPlanTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/ResourceMonitorPlanTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.executor; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -27,17 +26,13 @@ @ExtendWith(MockitoExtension.class) class ResourceMonitorPlanTest { - @Mock - private PhysicalPlan plan; + @Mock private PhysicalPlan plan; - @Mock - private ResourceMonitor resourceMonitor; + @Mock private ResourceMonitor resourceMonitor; - @Mock - private PhysicalPlanNodeVisitor visitor; + @Mock private PhysicalPlanNodeVisitor visitor; - @Mock - private Object context; + @Mock private Object context; private ResourceMonitorPlan monitorPlan; diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/protector/NoopExecutionProtectorTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/protector/NoopExecutionProtectorTest.java index 8dc49aad01..f028f3ea5d 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/protector/NoopExecutionProtectorTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/protector/NoopExecutionProtectorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.executor.protector; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -17,8 +16,7 @@ @ExtendWith(MockitoExtension.class) class NoopExecutionProtectorTest { - @Mock - private PhysicalPlan plan; + @Mock private PhysicalPlan plan; @Test void protect() { diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/protector/OpenSearchExecutionProtectorTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/protector/OpenSearchExecutionProtectorTest.java index fd5e747b5f..b2dc042110 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/protector/OpenSearchExecutionProtectorTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/protector/OpenSearchExecutionProtectorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.executor.protector; import static java.util.Collections.emptyList; @@ -74,17 +73,13 @@ @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) class OpenSearchExecutionProtectorTest { - @Mock - private OpenSearchClient client; + @Mock private OpenSearchClient client; - @Mock - private ResourceMonitor resourceMonitor; + @Mock private ResourceMonitor resourceMonitor; - @Mock - private OpenSearchExprValueFactory exprValueFactory; + @Mock private OpenSearchExprValueFactory exprValueFactory; - @Mock - private OpenSearchSettings settings; + @Mock private OpenSearchSettings settings; private OpenSearchExecutionProtector executionProtector; @@ -106,8 +101,7 @@ void test_protect_indexScan() { Expression filterExpr = literal(ExprBooleanValue.of(true)); List groupByExprs = List.of(named("age", ref("age", INTEGER))); List aggregators = - List.of(named("avg(age)", new AvgAggregator(List.of(ref("age", INTEGER)), - DOUBLE))); + List.of(named("avg(age)", new AvgAggregator(List.of(ref("age", INTEGER)), DOUBLE))); Map mappings = ImmutableMap.of(ref("name", STRING), ref("lastname", STRING)); Pair newEvalField = @@ -118,9 +112,12 @@ void test_protect_indexScan() { Integer offset = 10; final var name = new OpenSearchRequest.IndexName(indexName); - final var request = new OpenSearchRequestBuilder(querySizeLimit, exprValueFactory) - .build(name, maxResultWindow, - settings.getSettingValue(Settings.Key.SQL_CURSOR_KEEP_ALIVE)); + final var request = + new OpenSearchRequestBuilder(querySizeLimit, exprValueFactory) + .build( + name, + maxResultWindow, + settings.getSettingValue(Settings.Key.SQL_CURSOR_KEEP_ALIVE)); assertEquals( PhysicalPlanDSL.project( PhysicalPlanDSL.limit( @@ -134,8 +131,8 @@ void test_protect_indexScan() { PhysicalPlanDSL.agg( filter( resourceMonitor( - new OpenSearchIndexScan(client, - maxResultWindow, request)), + new OpenSearchIndexScan( + client, maxResultWindow, request)), filterExpr), aggregators, groupByExprs), @@ -161,8 +158,8 @@ void test_protect_indexScan() { PhysicalPlanDSL.rename( PhysicalPlanDSL.agg( filter( - new OpenSearchIndexScan(client, - maxResultWindow, request), + new OpenSearchIndexScan( + client, maxResultWindow, request), filterExpr), aggregators, groupByExprs), @@ -189,21 +186,9 @@ void test_protect_sort_for_windowOperator() { new WindowDefinition(emptyList(), ImmutableList.of(sortItem)); assertEquals( - window( - resourceMonitor( - sort( - values(emptyList()), - sortItem)), - rank, - windowDefinition), + window(resourceMonitor(sort(values(emptyList()), sortItem)), rank, windowDefinition), executionProtector.protect( - window( - sort( - values(emptyList()), - sortItem - ), - rank, - windowDefinition))); + window(sort(values(emptyList()), sortItem), rank, windowDefinition))); } @Test @@ -212,16 +197,8 @@ void test_protect_windowOperator_input() { WindowDefinition windowDefinition = mock(WindowDefinition.class); assertEquals( - window( - resourceMonitor( - values()), - avg, - windowDefinition), - executionProtector.protect( - window( - values(), - avg, - windowDefinition))); + window(resourceMonitor(values()), avg, windowDefinition), + executionProtector.protect(window(values(), avg, windowDefinition))); } @SuppressWarnings("unchecked") @@ -234,20 +211,9 @@ void test_not_protect_windowOperator_input_if_already_protected() { new WindowDefinition(emptyList(), ImmutableList.of(sortItem)); assertEquals( - window( - resourceMonitor( - sort( - values(emptyList()), - sortItem)), - avg, - windowDefinition), + window(resourceMonitor(sort(values(emptyList()), sortItem)), avg, windowDefinition), executionProtector.protect( - window( - sort( - values(emptyList()), - sortItem), - avg, - windowDefinition))); + window(sort(values(emptyList()), sortItem), avg, windowDefinition))); } @Test @@ -255,85 +221,80 @@ void test_without_protection() { Expression filterExpr = literal(ExprBooleanValue.of(true)); assertEquals( - filter( - filter(null, filterExpr), - filterExpr), - executionProtector.protect( - filter( - filter(null, filterExpr), - filterExpr) - ) - ); + filter(filter(null, filterExpr), filterExpr), + executionProtector.protect(filter(filter(null, filterExpr), filterExpr))); } @Test void test_visitMLcommons() { NodeClient nodeClient = mock(NodeClient.class); MLCommonsOperator mlCommonsOperator = - new MLCommonsOperator( - values(emptyList()), "kmeans", - new HashMap() {{ - put("centroids", new Literal(3, DataType.INTEGER)); - put("iterations", new Literal(2, DataType.INTEGER)); - put("distance_type", new Literal(null, DataType.STRING)); - }}, - nodeClient - ); + new MLCommonsOperator( + values(emptyList()), + "kmeans", + new HashMap() { + { + put("centroids", new Literal(3, DataType.INTEGER)); + put("iterations", new Literal(2, DataType.INTEGER)); + put("distance_type", new Literal(null, DataType.STRING)); + } + }, + nodeClient); - assertEquals(executionProtector.doProtect(mlCommonsOperator), - executionProtector.visitMLCommons(mlCommonsOperator, null)); + assertEquals( + executionProtector.doProtect(mlCommonsOperator), + executionProtector.visitMLCommons(mlCommonsOperator, null)); } @Test void test_visitAD() { NodeClient nodeClient = mock(NodeClient.class); ADOperator adOperator = - new ADOperator( - values(emptyList()), - new HashMap() {{ - put("shingle_size", new Literal(8, DataType.INTEGER)); - put("time_decay", new Literal(0.0001, DataType.DOUBLE)); - put("time_field", new Literal(null, DataType.STRING)); - }}, - nodeClient - ); + new ADOperator( + values(emptyList()), + new HashMap() { + { + put("shingle_size", new Literal(8, DataType.INTEGER)); + put("time_decay", new Literal(0.0001, DataType.DOUBLE)); + put("time_field", new Literal(null, DataType.STRING)); + } + }, + nodeClient); - assertEquals(executionProtector.doProtect(adOperator), - executionProtector.visitAD(adOperator, null)); + assertEquals( + executionProtector.doProtect(adOperator), executionProtector.visitAD(adOperator, null)); } @Test void test_visitML() { NodeClient nodeClient = mock(NodeClient.class); MLOperator mlOperator = - new MLOperator( - values(emptyList()), - new HashMap() {{ - put("action", new Literal("train", DataType.STRING)); - put("algorithm", new Literal("rcf", DataType.STRING)); - put("shingle_size", new Literal(8, DataType.INTEGER)); - put("time_decay", new Literal(0.0001, DataType.DOUBLE)); - put("time_field", new Literal(null, DataType.STRING)); - }}, - nodeClient - ); + new MLOperator( + values(emptyList()), + new HashMap() { + { + put("action", new Literal("train", DataType.STRING)); + put("algorithm", new Literal("rcf", DataType.STRING)); + put("shingle_size", new Literal(8, DataType.INTEGER)); + put("time_decay", new Literal(0.0001, DataType.DOUBLE)); + put("time_field", new Literal(null, DataType.STRING)); + } + }, + nodeClient); - assertEquals(executionProtector.doProtect(mlOperator), - executionProtector.visitML(mlOperator, null)); + assertEquals( + executionProtector.doProtect(mlOperator), executionProtector.visitML(mlOperator, null)); } @Test void test_visitNested() { Set args = Set.of("message.info"); - Map> groupedFieldsByPath = - Map.of("message", List.of("message.info")); + Map> groupedFieldsByPath = Map.of("message", List.of("message.info")); NestedOperator nestedOperator = - new NestedOperator( - values(emptyList()), - args, - groupedFieldsByPath); + new NestedOperator(values(emptyList()), args, groupedFieldsByPath); - assertEquals(executionProtector.doProtect(nestedOperator), + assertEquals( + executionProtector.doProtect(nestedOperator), executionProtector.visitNested(nestedOperator, values(emptyList()))); } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/monitor/OpenSearchMemoryHealthyTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/monitor/OpenSearchMemoryHealthyTest.java index af4cdc8ce6..a61f7343e6 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/monitor/OpenSearchMemoryHealthyTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/monitor/OpenSearchMemoryHealthyTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.monitor; import static org.junit.jupiter.api.Assertions.assertNotNull; @@ -20,11 +19,9 @@ @ExtendWith(MockitoExtension.class) class OpenSearchMemoryHealthyTest { - @Mock - private OpenSearchMemoryHealthy.RandomFail randomFail; + @Mock private OpenSearchMemoryHealthy.RandomFail randomFail; - @Mock - private OpenSearchMemoryHealthy.MemoryUsage memoryUsage; + @Mock private OpenSearchMemoryHealthy.MemoryUsage memoryUsage; private OpenSearchMemoryHealthy monitor; @@ -45,7 +42,8 @@ void memoryUsageExceedLimitFastFailure() { when(memoryUsage.usage()).thenReturn(10L); when(randomFail.shouldFail()).thenReturn(true); - assertThrows(OpenSearchMemoryHealthy.MemoryUsageExceedFastFailureException.class, + assertThrows( + OpenSearchMemoryHealthy.MemoryUsageExceedFastFailureException.class, () -> monitor.isMemoryHealthy(9L)); } @@ -54,7 +52,8 @@ void memoryUsageExceedLimitWithoutFastFailure() { when(memoryUsage.usage()).thenReturn(10L); when(randomFail.shouldFail()).thenReturn(false); - assertThrows(OpenSearchMemoryHealthy.MemoryUsageExceedException.class, + assertThrows( + OpenSearchMemoryHealthy.MemoryUsageExceedException.class, () -> monitor.isMemoryHealthy(9L)); } @@ -72,8 +71,7 @@ void randomFail() { @Test void setMemoryUsage() { - OpenSearchMemoryHealthy.MemoryUsage usage = - new OpenSearchMemoryHealthy.MemoryUsage(); + OpenSearchMemoryHealthy.MemoryUsage usage = new OpenSearchMemoryHealthy.MemoryUsage(); assertTrue(usage.usage() > 0); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/monitor/OpenSearchResourceMonitorTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/monitor/OpenSearchResourceMonitorTest.java index cd27b0710e..f56d8cb81b 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/monitor/OpenSearchResourceMonitorTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/monitor/OpenSearchResourceMonitorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.monitor; import static org.junit.jupiter.api.Assertions.assertFalse; @@ -24,11 +23,9 @@ @ExtendWith(MockitoExtension.class) class OpenSearchResourceMonitorTest { - @Mock - private Settings settings; + @Mock private Settings settings; - @Mock - private OpenSearchMemoryHealthy memoryMonitor; + @Mock private OpenSearchMemoryHealthy memoryMonitor; @BeforeEach public void setup() { @@ -47,8 +44,8 @@ void isHealthy() { @Test void notHealthyFastFailure() { - when(memoryMonitor.isMemoryHealthy(anyLong())).thenThrow( - OpenSearchMemoryHealthy.MemoryUsageExceedFastFailureException.class); + when(memoryMonitor.isMemoryHealthy(anyLong())) + .thenThrow(OpenSearchMemoryHealthy.MemoryUsageExceedFastFailureException.class); OpenSearchResourceMonitor resourceMonitor = new OpenSearchResourceMonitor(settings, memoryMonitor); @@ -58,8 +55,8 @@ void notHealthyFastFailure() { @Test void notHealthyWithRetry() { - when(memoryMonitor.isMemoryHealthy(anyLong())).thenThrow( - OpenSearchMemoryHealthy.MemoryUsageExceedException.class); + when(memoryMonitor.isMemoryHealthy(anyLong())) + .thenThrow(OpenSearchMemoryHealthy.MemoryUsageExceedException.class); OpenSearchResourceMonitor resourceMonitor = new OpenSearchResourceMonitor(settings, memoryMonitor); @@ -70,8 +67,9 @@ void notHealthyWithRetry() { @Test void healthyWithRetry() { - when(memoryMonitor.isMemoryHealthy(anyLong())).thenThrow( - OpenSearchMemoryHealthy.MemoryUsageExceedException.class).thenReturn(true); + when(memoryMonitor.isMemoryHealthy(anyLong())) + .thenThrow(OpenSearchMemoryHealthy.MemoryUsageExceedException.class) + .thenReturn(true); OpenSearchResourceMonitor resourceMonitor = new OpenSearchResourceMonitor(settings, memoryMonitor); diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/planner/physical/MLCommonsOperatorTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/planner/physical/MLCommonsOperatorTest.java index 20d2f633dd..e6d2bac85b 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/planner/physical/MLCommonsOperatorTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/planner/physical/MLCommonsOperatorTest.java @@ -52,60 +52,60 @@ @MockitoSettings(strictness = Strictness.LENIENT) @RunWith(MockitoJUnitRunner.Silent.class) public class MLCommonsOperatorTest { - @Mock - private PhysicalPlan input; + @Mock private PhysicalPlan input; - @Mock(answer = Answers.RETURNS_DEEP_STUBS) + @Mock(answer = Answers.RETURNS_DEEP_STUBS) private NodeClient nodeClient; private MLCommonsOperator mlCommonsOperator; - @Mock(answer = Answers.RETURNS_DEEP_STUBS) + @Mock(answer = Answers.RETURNS_DEEP_STUBS) private MachineLearningNodeClient machineLearningNodeClient; @BeforeEach void setUp() { Map arguments = new HashMap<>(); - arguments.put("k1",AstDSL.intLiteral(3)); - arguments.put("k2",AstDSL.stringLiteral("v1")); - arguments.put("k3",AstDSL.booleanLiteral(true)); - arguments.put("k4",AstDSL.doubleLiteral(2.0D)); - arguments.put("k5",AstDSL.shortLiteral((short)2)); - arguments.put("k6",AstDSL.longLiteral(2L)); - arguments.put("k7",AstDSL.floatLiteral(2F)); - - - mlCommonsOperator = new MLCommonsOperator(input, "kmeans", arguments, - nodeClient); + arguments.put("k1", AstDSL.intLiteral(3)); + arguments.put("k2", AstDSL.stringLiteral("v1")); + arguments.put("k3", AstDSL.booleanLiteral(true)); + arguments.put("k4", AstDSL.doubleLiteral(2.0D)); + arguments.put("k5", AstDSL.shortLiteral((short) 2)); + arguments.put("k6", AstDSL.longLiteral(2L)); + arguments.put("k7", AstDSL.floatLiteral(2F)); + + mlCommonsOperator = new MLCommonsOperator(input, "kmeans", arguments, nodeClient); when(input.hasNext()).thenReturn(true).thenReturn(false); ImmutableMap.Builder resultBuilder = new ImmutableMap.Builder<>(); resultBuilder.put("k1", new ExprIntegerValue(2)); when(input.next()).thenReturn(ExprTupleValue.fromExprValueMap(resultBuilder.build())); - DataFrame dataFrame = DataFrameBuilder - .load(Collections.singletonList( - ImmutableMap.builder().put("result-k1", 2D) - .put("result-k2", 1) - .put("result-k3", "v3") - .put("result-k4", true) - .put("result-k5", (short)2) - .put("result-k6", 2L) - .put("result-k7", 2F) - .build()) - ); - MLPredictionOutput mlPredictionOutput = MLPredictionOutput.builder() + DataFrame dataFrame = + DataFrameBuilder.load( + Collections.singletonList( + ImmutableMap.builder() + .put("result-k1", 2D) + .put("result-k2", 1) + .put("result-k3", "v3") + .put("result-k4", true) + .put("result-k5", (short) 2) + .put("result-k6", 2L) + .put("result-k7", 2F) + .build())); + MLPredictionOutput mlPredictionOutput = + MLPredictionOutput.builder() .taskId("test_task_id") .status("test_status") .predictionResult(dataFrame) .build(); try (MockedStatic mlClientMockedStatic = Mockito.mockStatic(MLClient.class)) { - mlClientMockedStatic.when(() -> MLClient.getMLClient(any(NodeClient.class))) - .thenReturn(machineLearningNodeClient); - when(machineLearningNodeClient.trainAndPredict(any(MLInput.class)) - .actionGet(anyLong(), - eq(TimeUnit.SECONDS))) - .thenReturn(mlPredictionOutput); + mlClientMockedStatic + .when(() -> MLClient.getMLClient(any(NodeClient.class))) + .thenReturn(machineLearningNodeClient); + when(machineLearningNodeClient + .trainAndPredict(any(MLInput.class)) + .actionGet(anyLong(), eq(TimeUnit.SECONDS))) + .thenReturn(mlPredictionOutput); } } @@ -120,17 +120,17 @@ public void testOpen() { @Test public void testAccept() { - PhysicalPlanNodeVisitor physicalPlanNodeVisitor - = new PhysicalPlanNodeVisitor() {}; + PhysicalPlanNodeVisitor physicalPlanNodeVisitor = + new PhysicalPlanNodeVisitor() {}; assertNull(mlCommonsOperator.accept(physicalPlanNodeVisitor, null)); } @Test public void testConvertArgumentToMLParameter_UnsupportedType() { Map argument = new HashMap<>(); - argument.put("k2",AstDSL.dateLiteral("2020-10-31")); - assertThrows(IllegalArgumentException.class, () -> mlCommonsOperator - .convertArgumentToMLParameter(argument, "LINEAR_REGRESSION")); + argument.put("k2", AstDSL.dateLiteral("2020-10-31")); + assertThrows( + IllegalArgumentException.class, + () -> mlCommonsOperator.convertArgumentToMLParameter(argument, "LINEAR_REGRESSION")); } - } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/planner/physical/MLOperatorTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/planner/physical/MLOperatorTest.java index 7a73468391..0a3f56285f 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/planner/physical/MLOperatorTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/planner/physical/MLOperatorTest.java @@ -57,29 +57,27 @@ @MockitoSettings(strictness = Strictness.LENIENT) @RunWith(MockitoJUnitRunner.Silent.class) public class MLOperatorTest { - @Mock - private PhysicalPlan input; + @Mock private PhysicalPlan input; - @Mock - PlainActionFuture actionFuture; + @Mock PlainActionFuture actionFuture; - @Mock(answer = Answers.RETURNS_DEEP_STUBS) + @Mock(answer = Answers.RETURNS_DEEP_STUBS) private NodeClient nodeClient; private MLOperator mlOperator; Map arguments = new HashMap<>(); - @Mock(answer = Answers.RETURNS_DEEP_STUBS) + @Mock(answer = Answers.RETURNS_DEEP_STUBS) private MachineLearningNodeClient machineLearningNodeClient; void setUp(boolean isPredict) { - arguments.put("k1",AstDSL.intLiteral(3)); - arguments.put("k2",AstDSL.stringLiteral("v1")); - arguments.put("k3",AstDSL.booleanLiteral(true)); - arguments.put("k4",AstDSL.doubleLiteral(2.0D)); - arguments.put("k5",AstDSL.shortLiteral((short)2)); - arguments.put("k6",AstDSL.longLiteral(2L)); - arguments.put("k7",AstDSL.floatLiteral(2F)); + arguments.put("k1", AstDSL.intLiteral(3)); + arguments.put("k2", AstDSL.stringLiteral("v1")); + arguments.put("k3", AstDSL.booleanLiteral(true)); + arguments.put("k4", AstDSL.doubleLiteral(2.0D)); + arguments.put("k5", AstDSL.shortLiteral((short) 2)); + arguments.put("k6", AstDSL.longLiteral(2L)); + arguments.put("k7", AstDSL.floatLiteral(2F)); mlOperator = new MLOperator(input, arguments, nodeClient); when(input.hasNext()).thenReturn(true).thenReturn(false); @@ -87,49 +85,50 @@ void setUp(boolean isPredict) { resultBuilder.put("k1", new ExprIntegerValue(2)); when(input.next()).thenReturn(ExprTupleValue.fromExprValueMap(resultBuilder.build())); - DataFrame dataFrame = DataFrameBuilder - .load(Collections.singletonList( - ImmutableMap.builder().put("result-k1", 2D) - .put("result-k2", 1) - .put("result-k3", "v3") - .put("result-k4", true) - .put("result-k5", (short)2) - .put("result-k6", 2L) - .put("result-k7", 2F) - .build()) - ); + DataFrame dataFrame = + DataFrameBuilder.load( + Collections.singletonList( + ImmutableMap.builder() + .put("result-k1", 2D) + .put("result-k2", 1) + .put("result-k3", "v3") + .put("result-k4", true) + .put("result-k5", (short) 2) + .put("result-k6", 2L) + .put("result-k7", 2F) + .build())); MLOutput mlOutput; if (isPredict) { - mlOutput = MLPredictionOutput.builder() + mlOutput = + MLPredictionOutput.builder() .taskId("test_task_id") .status("test_status") .predictionResult(dataFrame) .build(); } else { - mlOutput = MLTrainingOutput.builder() + mlOutput = + MLTrainingOutput.builder() .taskId("test_task_id") .status("test_status") .modelId("test_model_id") .build(); } - when(actionFuture.actionGet(anyLong(), eq(TimeUnit.SECONDS))) - .thenReturn(mlOutput); - when(machineLearningNodeClient.run(any(MLInput.class), any())) - .thenReturn(actionFuture); + when(actionFuture.actionGet(anyLong(), eq(TimeUnit.SECONDS))).thenReturn(mlOutput); + when(machineLearningNodeClient.run(any(MLInput.class), any())).thenReturn(actionFuture); } void setUpPredict() { - arguments.put(ACTION,AstDSL.stringLiteral(PREDICT)); - arguments.put(ALGO,AstDSL.stringLiteral(KMEANS)); - arguments.put("modelid",AstDSL.stringLiteral("dummyID")); + arguments.put(ACTION, AstDSL.stringLiteral(PREDICT)); + arguments.put(ALGO, AstDSL.stringLiteral(KMEANS)); + arguments.put("modelid", AstDSL.stringLiteral("dummyID")); setUp(true); } void setUpTrain() { - arguments.put(ACTION,AstDSL.stringLiteral(TRAIN)); - arguments.put(ALGO,AstDSL.stringLiteral(KMEANS)); + arguments.put(ACTION, AstDSL.stringLiteral(TRAIN)); + arguments.put(ALGO, AstDSL.stringLiteral(KMEANS)); setUp(false); } @@ -162,10 +161,9 @@ public void testAccept() { setUpPredict(); try (MockedStatic mlClientMockedStatic = Mockito.mockStatic(MLClient.class)) { when(MLClient.getMLClient(any(NodeClient.class))).thenReturn(machineLearningNodeClient); - PhysicalPlanNodeVisitor physicalPlanNodeVisitor - = new PhysicalPlanNodeVisitor() {}; + PhysicalPlanNodeVisitor physicalPlanNodeVisitor = + new PhysicalPlanNodeVisitor() {}; assertNull(mlOperator.accept(physicalPlanNodeVisitor, null)); } } - } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequestTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequestTest.java index b6966f2403..d2bc5b0641 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequestTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequestTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.request; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -41,32 +40,23 @@ @ExtendWith(MockitoExtension.class) public class OpenSearchQueryRequestTest { - @Mock - private Function searchAction; + @Mock private Function searchAction; - @Mock - private Function scrollAction; + @Mock private Function scrollAction; - @Mock - private Consumer cleanAction; + @Mock private Consumer cleanAction; - @Mock - private SearchResponse searchResponse; + @Mock private SearchResponse searchResponse; - @Mock - private SearchHits searchHits; + @Mock private SearchHits searchHits; - @Mock - private SearchHit searchHit; + @Mock private SearchHit searchHit; - @Mock - private SearchSourceBuilder sourceBuilder; + @Mock private SearchSourceBuilder sourceBuilder; - @Mock - private FetchSourceContext fetchSourceContext; + @Mock private FetchSourceContext fetchSourceContext; - @Mock - private OpenSearchExprValueFactory factory; + @Mock private OpenSearchExprValueFactory factory; private final OpenSearchQueryRequest request = new OpenSearchQueryRequest("test", 200, factory, List.of()); @@ -76,12 +66,9 @@ public class OpenSearchQueryRequestTest { @Test void search() { - OpenSearchQueryRequest request = new OpenSearchQueryRequest( - new OpenSearchRequest.IndexName("test"), - sourceBuilder, - factory, - List.of() - ); + OpenSearchQueryRequest request = + new OpenSearchQueryRequest( + new OpenSearchRequest.IndexName("test"), sourceBuilder, factory, List.of()); when(searchAction.apply(any())).thenReturn(searchResponse); when(searchResponse.getHits()).thenReturn(searchHits); @@ -96,12 +83,9 @@ void search() { @Test void search_withoutContext() { - OpenSearchQueryRequest request = new OpenSearchQueryRequest( - new OpenSearchRequest.IndexName("test"), - sourceBuilder, - factory, - List.of() - ); + OpenSearchQueryRequest request = + new OpenSearchQueryRequest( + new OpenSearchRequest.IndexName("test"), sourceBuilder, factory, List.of()); when(searchAction.apply(any())).thenReturn(searchResponse); when(searchResponse.getHits()).thenReturn(searchHits); @@ -113,12 +97,9 @@ void search_withoutContext() { @Test void search_withIncludes() { - OpenSearchQueryRequest request = new OpenSearchQueryRequest( - new OpenSearchRequest.IndexName("test"), - sourceBuilder, - factory, - List.of() - ); + OpenSearchQueryRequest request = + new OpenSearchQueryRequest( + new OpenSearchRequest.IndexName("test"), sourceBuilder, factory, List.of()); String[] includes = {"_id", "_index"}; when(searchAction.apply(any())).thenReturn(searchResponse); @@ -144,13 +125,15 @@ void clean() { void searchRequest() { request.getSourceBuilder().query(QueryBuilders.termQuery("name", "John")); - assertSearchRequest(new SearchRequest() - .indices("test") - .source(new SearchSourceBuilder() - .timeout(DEFAULT_QUERY_TIMEOUT) - .from(0) - .size(200) - .query(QueryBuilders.termQuery("name", "John"))), + assertSearchRequest( + new SearchRequest() + .indices("test") + .source( + new SearchSourceBuilder() + .timeout(DEFAULT_QUERY_TIMEOUT) + .from(0) + .size(200) + .query(QueryBuilders.termQuery("name", "John"))), request); } @@ -161,28 +144,31 @@ void searchCrossClusterRequest() { assertSearchRequest( new SearchRequest() .indices("ccs:test") - .source(new SearchSourceBuilder() - .timeout(DEFAULT_QUERY_TIMEOUT) - .from(0) - .size(200) - .query(QueryBuilders.termQuery("name", "John"))), + .source( + new SearchSourceBuilder() + .timeout(DEFAULT_QUERY_TIMEOUT) + .from(0) + .size(200) + .query(QueryBuilders.termQuery("name", "John"))), remoteRequest); } @Test void writeTo_unsupported() { - assertThrows(UnsupportedOperationException.class, - () -> request.writeTo(mock(StreamOutput.class))); + assertThrows( + UnsupportedOperationException.class, () -> request.writeTo(mock(StreamOutput.class))); } private void assertSearchRequest(SearchRequest expected, OpenSearchQueryRequest request) { - Function querySearch = searchRequest -> { - assertEquals(expected, searchRequest); - return when(mock(SearchResponse.class).getHits()) - .thenReturn(new SearchHits(new SearchHit[0], - new TotalHits(0, TotalHits.Relation.EQUAL_TO), 0.0f)) - .getMock(); - }; + Function querySearch = + searchRequest -> { + assertEquals(expected, searchRequest); + return when(mock(SearchResponse.class).getHits()) + .thenReturn( + new SearchHits( + new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 0.0f)) + .getMock(); + }; request.search(querySearch, searchScrollRequest -> null); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilderTest.java index 483ea1290e..5bb0a2207b 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilderTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilderTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.request; import static org.junit.Assert.assertThrows; @@ -71,11 +70,10 @@ class OpenSearchRequestBuilderTest { private static final Integer DEFAULT_LIMIT = 200; private static final Integer MAX_RESULT_WINDOW = 500; - private static final OpenSearchRequest.IndexName indexName - = new OpenSearchRequest.IndexName("test"); + private static final OpenSearchRequest.IndexName indexName = + new OpenSearchRequest.IndexName("test"); - @Mock - private OpenSearchExprValueFactory exprValueFactory; + @Mock private OpenSearchExprValueFactory exprValueFactory; private OpenSearchRequestBuilder requestBuilder; @@ -99,7 +97,8 @@ void build_query_request() { .size(limit) .timeout(DEFAULT_QUERY_TIMEOUT) .trackScores(true), - exprValueFactory, List.of()), + exprValueFactory, + List.of()), requestBuilder.build(indexName, MAX_RESULT_WINDOW, DEFAULT_QUERY_TIMEOUT)); } @@ -111,12 +110,14 @@ void build_scroll_request_with_correct_size() { assertEquals( new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), new SearchSourceBuilder() .from(offset) .size(MAX_RESULT_WINDOW - offset) .timeout(DEFAULT_QUERY_TIMEOUT), - exprValueFactory, List.of()), + exprValueFactory, + List.of()), requestBuilder.build(indexName, MAX_RESULT_WINDOW, DEFAULT_QUERY_TIMEOUT)); } @@ -126,33 +127,32 @@ void test_push_down_query() { requestBuilder.pushDownFilter(query); var r = requestBuilder.build(indexName, MAX_RESULT_WINDOW, DEFAULT_QUERY_TIMEOUT); - Function querySearch = searchRequest -> { - assertEquals( - new SearchSourceBuilder() - .from(DEFAULT_OFFSET) - .size(DEFAULT_LIMIT) - .timeout(DEFAULT_QUERY_TIMEOUT) - .query(query) - .sort(DOC_FIELD_NAME, ASC), - searchRequest.source() - ); - return mock(); - }; - Function scrollSearch = searchScrollRequest -> { - throw new UnsupportedOperationException(); - }; + Function querySearch = + searchRequest -> { + assertEquals( + new SearchSourceBuilder() + .from(DEFAULT_OFFSET) + .size(DEFAULT_LIMIT) + .timeout(DEFAULT_QUERY_TIMEOUT) + .query(query) + .sort(DOC_FIELD_NAME, ASC), + searchRequest.source()); + return mock(); + }; + Function scrollSearch = + searchScrollRequest -> { + throw new UnsupportedOperationException(); + }; r.search(querySearch, scrollSearch); - } @Test void test_push_down_aggregation() { - AggregationBuilder aggBuilder = AggregationBuilders.composite( - "composite_buckets", - Collections.singletonList(new TermsValuesSourceBuilder("longA"))); + AggregationBuilder aggBuilder = + AggregationBuilders.composite( + "composite_buckets", Collections.singletonList(new TermsValuesSourceBuilder("longA"))); OpenSearchAggregationResponseParser responseParser = - new CompositeAggregationParser( - new SingleValueParser("AVG(intA)")); + new CompositeAggregationParser(new SingleValueParser("AVG(intA)")); requestBuilder.pushDownAggregation(Pair.of(List.of(aggBuilder), responseParser)); assertEquals( @@ -161,8 +161,7 @@ void test_push_down_aggregation() { .size(0) .timeout(DEFAULT_QUERY_TIMEOUT) .aggregation(aggBuilder), - requestBuilder.getSourceBuilder() - ); + requestBuilder.getSourceBuilder()); verify(exprValueFactory).setParser(responseParser); } @@ -184,21 +183,25 @@ void test_push_down_query_and_sort() { requestBuilder); } - void assertSearchSourceBuilder(SearchSourceBuilder expected, - OpenSearchRequestBuilder requestBuilder) + void assertSearchSourceBuilder( + SearchSourceBuilder expected, OpenSearchRequestBuilder requestBuilder) throws UnsupportedOperationException { - Function querySearch = searchRequest -> { - assertEquals(expected, searchRequest.source()); - return when(mock(SearchResponse.class).getHits()) - .thenReturn(new SearchHits(new SearchHit[0], new TotalHits(0, - TotalHits.Relation.EQUAL_TO), 0.0f)) - .getMock(); - }; - Function scrollSearch = searchScrollRequest -> { - throw new UnsupportedOperationException(); - }; - requestBuilder.build(indexName, MAX_RESULT_WINDOW, DEFAULT_QUERY_TIMEOUT).search( - querySearch, scrollSearch); + Function querySearch = + searchRequest -> { + assertEquals(expected, searchRequest.source()); + return when(mock(SearchResponse.class).getHits()) + .thenReturn( + new SearchHits( + new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 0.0f)) + .getMock(); + }; + Function scrollSearch = + searchScrollRequest -> { + throw new UnsupportedOperationException(); + }; + requestBuilder + .build(indexName, MAX_RESULT_WINDOW, DEFAULT_QUERY_TIMEOUT) + .search(querySearch, scrollSearch); } @Test @@ -231,9 +234,8 @@ void test_push_down_non_field_sort() { @Test void test_push_down_multiple_sort() { - requestBuilder.pushDownSort(List.of( - SortBuilders.fieldSort("intA"), - SortBuilders.fieldSort("intB"))); + requestBuilder.pushDownSort( + List.of(SortBuilders.fieldSort("intA"), SortBuilders.fieldSort("intB"))); assertSearchSourceBuilder( new SearchSourceBuilder() @@ -255,7 +257,7 @@ void test_push_down_project() { .from(DEFAULT_OFFSET) .size(DEFAULT_LIMIT) .timeout(DEFAULT_QUERY_TIMEOUT) - .fetchSource(new String[]{"intA"}, new String[0]), + .fetchSource(new String[] {"intA"}, new String[0]), requestBuilder); assertEquals( @@ -285,7 +287,7 @@ void test_push_down_project_limit() { .from(offset) .size(limit) .timeout(DEFAULT_QUERY_TIMEOUT) - .fetchSource(new String[]{"intA"}, new String[0]), + .fetchSource(new String[] {"intA"}, new String[0]), requestBuilder); assertEquals( @@ -315,7 +317,7 @@ void test_push_down_project_limit_and_offset() { .from(offset) .size(limit) .timeout(DEFAULT_QUERY_TIMEOUT) - .fetchSource(new String[]{"intA"}, new String[0]), + .fetchSource(new String[] {"intA"}, new String[0]), requestBuilder); assertEquals( @@ -333,24 +335,25 @@ void test_push_down_project_limit_and_offset() { @Test void test_push_down_nested() { - List> args = List.of( - Map.of( - "field", new ReferenceExpression("message.info", STRING), - "path", new ReferenceExpression("message", STRING) - ) - ); + List> args = + List.of( + Map.of( + "field", new ReferenceExpression("message.info", STRING), + "path", new ReferenceExpression("message", STRING))); List projectList = List.of( - new NamedExpression("message.info", DSL.nested(DSL.ref("message.info", STRING)), null) - ); + new NamedExpression("message.info", DSL.nested(DSL.ref("message.info", STRING)), null)); LogicalNested nested = new LogicalNested(null, args, projectList); requestBuilder.pushDownNested(nested.getFields()); - NestedQueryBuilder nestedQuery = nestedQuery("message", matchAllQuery(), ScoreMode.None) - .innerHit(new InnerHitBuilder().setFetchSourceContext( - new FetchSourceContext(true, new String[]{"message.info"}, null))); + NestedQueryBuilder nestedQuery = + nestedQuery("message", matchAllQuery(), ScoreMode.None) + .innerHit( + new InnerHitBuilder() + .setFetchSourceContext( + new FetchSourceContext(true, new String[] {"message.info"}, null))); assertSearchSourceBuilder( new SearchSourceBuilder() @@ -363,28 +366,29 @@ void test_push_down_nested() { @Test void test_push_down_multiple_nested_with_same_path() { - List> args = List.of( - Map.of( - "field", new ReferenceExpression("message.info", STRING), - "path", new ReferenceExpression("message", STRING) - ), - Map.of( - "field", new ReferenceExpression("message.from", STRING), - "path", new ReferenceExpression("message", STRING) - ) - ); + List> args = + List.of( + Map.of( + "field", new ReferenceExpression("message.info", STRING), + "path", new ReferenceExpression("message", STRING)), + Map.of( + "field", new ReferenceExpression("message.from", STRING), + "path", new ReferenceExpression("message", STRING))); List projectList = List.of( new NamedExpression("message.info", DSL.nested(DSL.ref("message.info", STRING)), null), - new NamedExpression("message.from", DSL.nested(DSL.ref("message.from", STRING)), null) - ); + new NamedExpression("message.from", DSL.nested(DSL.ref("message.from", STRING)), null)); LogicalNested nested = new LogicalNested(null, args, projectList); requestBuilder.pushDownNested(nested.getFields()); - NestedQueryBuilder nestedQuery = nestedQuery("message", matchAllQuery(), ScoreMode.None) - .innerHit(new InnerHitBuilder().setFetchSourceContext( - new FetchSourceContext(true, new String[]{"message.info", "message.from"}, null))); + NestedQueryBuilder nestedQuery = + nestedQuery("message", matchAllQuery(), ScoreMode.None) + .innerHit( + new InnerHitBuilder() + .setFetchSourceContext( + new FetchSourceContext( + true, new String[] {"message.info", "message.from"}, null))); assertSearchSourceBuilder( new SearchSourceBuilder() .query(QueryBuilders.boolQuery().filter(QueryBuilders.boolQuery().must(nestedQuery))) @@ -396,35 +400,35 @@ void test_push_down_multiple_nested_with_same_path() { @Test void test_push_down_nested_with_filter() { - List> args = List.of( - Map.of( - "field", new ReferenceExpression("message.info", STRING), - "path", new ReferenceExpression("message", STRING) - ) - ); + List> args = + List.of( + Map.of( + "field", new ReferenceExpression("message.info", STRING), + "path", new ReferenceExpression("message", STRING))); List projectList = List.of( - new NamedExpression("message.info", DSL.nested(DSL.ref("message.info", STRING)), null) - ); + new NamedExpression("message.info", DSL.nested(DSL.ref("message.info", STRING)), null)); LogicalNested nested = new LogicalNested(null, args, projectList); requestBuilder.getSourceBuilder().query(QueryBuilders.rangeQuery("myNum").gt(3)); requestBuilder.pushDownNested(nested.getFields()); - NestedQueryBuilder nestedQuery = nestedQuery("message", matchAllQuery(), ScoreMode.None) - .innerHit(new InnerHitBuilder().setFetchSourceContext( - new FetchSourceContext(true, new String[]{"message.info"}, null))); + NestedQueryBuilder nestedQuery = + nestedQuery("message", matchAllQuery(), ScoreMode.None) + .innerHit( + new InnerHitBuilder() + .setFetchSourceContext( + new FetchSourceContext(true, new String[] {"message.info"}, null))); assertSearchSourceBuilder( new SearchSourceBuilder() .query( - QueryBuilders.boolQuery().filter( - QueryBuilders.boolQuery() - .must(QueryBuilders.rangeQuery("myNum").gt(3)) - .must(nestedQuery) - ) - ) + QueryBuilders.boolQuery() + .filter( + QueryBuilders.boolQuery() + .must(QueryBuilders.rangeQuery("myNum").gt(3)) + .must(nestedQuery))) .from(DEFAULT_OFFSET) .size(DEFAULT_LIMIT) .timeout(DEFAULT_QUERY_TIMEOUT), @@ -433,17 +437,15 @@ void test_push_down_nested_with_filter() { @Test void testPushDownNestedWithNestedFilter() { - List> args = List.of( - Map.of( - "field", new ReferenceExpression("message.info", STRING), - "path", new ReferenceExpression("message", STRING) - ) - ); + List> args = + List.of( + Map.of( + "field", new ReferenceExpression("message.info", STRING), + "path", new ReferenceExpression("message", STRING))); List projectList = List.of( - new NamedExpression("message.info", DSL.nested(DSL.ref("message.info", STRING)), null) - ); + new NamedExpression("message.info", DSL.nested(DSL.ref("message.info", STRING)), null)); QueryBuilder innerFilterQuery = QueryBuilders.rangeQuery("myNum").gt(3); QueryBuilder filterQuery = @@ -452,20 +454,20 @@ void testPushDownNestedWithNestedFilter() { requestBuilder.getSourceBuilder().query(filterQuery); requestBuilder.pushDownNested(nested.getFields()); - NestedQueryBuilder nestedQuery = nestedQuery("message", matchAllQuery(), ScoreMode.None) - .innerHit(new InnerHitBuilder().setFetchSourceContext( - new FetchSourceContext(true, new String[]{"message.info"}, null))); - - assertSearchSourceBuilder(new SearchSourceBuilder() - .query( - QueryBuilders.boolQuery().filter( - QueryBuilders.boolQuery() - .must(filterQuery) - ) - ) - .from(DEFAULT_OFFSET) - .size(DEFAULT_LIMIT) - .timeout(DEFAULT_QUERY_TIMEOUT), requestBuilder); + NestedQueryBuilder nestedQuery = + nestedQuery("message", matchAllQuery(), ScoreMode.None) + .innerHit( + new InnerHitBuilder() + .setFetchSourceContext( + new FetchSourceContext(true, new String[] {"message.info"}, null))); + + assertSearchSourceBuilder( + new SearchSourceBuilder() + .query(QueryBuilders.boolQuery().filter(QueryBuilders.boolQuery().must(filterQuery))) + .from(DEFAULT_OFFSET) + .size(DEFAULT_LIMIT) + .timeout(DEFAULT_QUERY_TIMEOUT), + requestBuilder); } @Test @@ -479,8 +481,9 @@ void test_push_type_mapping() { @Test void push_down_highlight_with_repeating_fields() { requestBuilder.pushDownHighlight("name", Map.of()); - var exception = assertThrows(SemanticCheckException.class, () -> - requestBuilder.pushDownHighlight("name", Map.of())); + var exception = + assertThrows( + SemanticCheckException.class, () -> requestBuilder.pushDownHighlight("name", Map.of())); assertEquals("Duplicate field name in highlight", exception.getMessage()); } @@ -488,10 +491,7 @@ void push_down_highlight_with_repeating_fields() { void push_down_page_size() { requestBuilder.pushDownPageSize(3); assertSearchSourceBuilder( - new SearchSourceBuilder() - .from(DEFAULT_OFFSET) - .size(3) - .timeout(DEFAULT_QUERY_TIMEOUT), + new SearchSourceBuilder().from(DEFAULT_OFFSET).size(3).timeout(DEFAULT_QUERY_TIMEOUT), requestBuilder); } @@ -499,7 +499,8 @@ void push_down_page_size() { void exception_when_non_zero_offset_and_page_size() { requestBuilder.pushDownPageSize(3); requestBuilder.pushDownLimit(300, 2); - assertThrows(UnsupportedOperationException.class, + assertThrows( + UnsupportedOperationException.class, () -> requestBuilder.build(indexName, MAX_RESULT_WINDOW, DEFAULT_QUERY_TIMEOUT)); } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequestTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequestTest.java index 4b9233dbc1..66cb6bf14c 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequestTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequestTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.request; import static org.junit.jupiter.api.Assertions.assertAll; @@ -49,50 +48,48 @@ @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) class OpenSearchScrollRequestTest { - public static final OpenSearchRequest.IndexName INDEX_NAME - = new OpenSearchRequest.IndexName("test"); + public static final OpenSearchRequest.IndexName INDEX_NAME = + new OpenSearchRequest.IndexName("test"); public static final TimeValue SCROLL_TIMEOUT = TimeValue.timeValueMinutes(1); - @Mock - private SearchResponse searchResponse; + @Mock private SearchResponse searchResponse; - @Mock - private SearchHits searchHits; + @Mock private SearchHits searchHits; - @Mock - private SearchHit searchHit; + @Mock private SearchHit searchHit; - @Mock - private SearchSourceBuilder sourceBuilder; + @Mock private SearchSourceBuilder sourceBuilder; - @Mock - private OpenSearchExprValueFactory factory; + @Mock private OpenSearchExprValueFactory factory; private final SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); - private final OpenSearchScrollRequest request = new OpenSearchScrollRequest( - INDEX_NAME, SCROLL_TIMEOUT, - searchSourceBuilder, factory, List.of()); + private final OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + INDEX_NAME, SCROLL_TIMEOUT, searchSourceBuilder, factory, List.of()); @Test void constructor() { - var request = new OpenSearchScrollRequest(INDEX_NAME, SCROLL_TIMEOUT, - searchSourceBuilder, factory, List.of("test")); + var request = + new OpenSearchScrollRequest( + INDEX_NAME, SCROLL_TIMEOUT, searchSourceBuilder, factory, List.of("test")); assertEquals(List.of("test"), request.getIncludes()); } @Test void searchRequest() { searchSourceBuilder.query(QueryBuilders.termQuery("name", "John")); - request.search(searchRequest -> { - assertEquals( - new SearchRequest() - .indices("test") - .scroll(TimeValue.timeValueMinutes(1)) - .source(new SearchSourceBuilder().query(QueryBuilders.termQuery("name", "John"))), - searchRequest); - SearchHits searchHitsMock = when(mock(SearchHits.class).getHits()) - .thenReturn(new SearchHit[0]).getMock(); - return when(mock(SearchResponse.class).getHits()).thenReturn(searchHitsMock).getMock(); - }, searchScrollRequest -> null); + request.search( + searchRequest -> { + assertEquals( + new SearchRequest() + .indices("test") + .scroll(TimeValue.timeValueMinutes(1)) + .source(new SearchSourceBuilder().query(QueryBuilders.termQuery("name", "John"))), + searchRequest); + SearchHits searchHitsMock = + when(mock(SearchHits.class).getHits()).thenReturn(new SearchHit[0]).getMock(); + return when(mock(SearchResponse.class).getHits()).thenReturn(searchHitsMock).getMock(); + }, + searchScrollRequest -> null); } @Test @@ -110,21 +107,19 @@ void isScrollStarted() { void scrollRequest() { request.setScrollId("scroll123"); assertEquals( - new SearchScrollRequest() - .scroll(TimeValue.timeValueMinutes(1)) - .scrollId("scroll123"), + new SearchScrollRequest().scroll(TimeValue.timeValueMinutes(1)).scrollId("scroll123"), request.scrollRequest()); } @Test void search() { - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), - TimeValue.timeValueMinutes(1), - sourceBuilder, - factory, - List.of() - ); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + sourceBuilder, + factory, + List.of()); when(searchResponse.getHits()).thenReturn(searchHits); when(searchHits.getHits()).thenReturn(new SearchHit[] {searchHit}); @@ -135,13 +130,13 @@ void search() { @Test void search_without_context() { - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), - TimeValue.timeValueMinutes(1), - sourceBuilder, - factory, - List.of() - ); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + sourceBuilder, + factory, + List.of()); when(searchResponse.getHits()).thenReturn(searchHits); when(searchHits.getHits()).thenReturn(new SearchHit[] {searchHit}); @@ -154,13 +149,13 @@ void search_without_context() { @SneakyThrows void search_without_scroll_and_initial_request_should_throw() { // Steps: serialize a not used request, deserialize it, then use - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), - TimeValue.timeValueMinutes(1), - sourceBuilder, - factory, - List.of() - ); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + sourceBuilder, + factory, + List.of()); var outStream = new BytesStreamOutput(); request.writeTo(outStream); outStream.flush(); @@ -172,20 +167,21 @@ void search_without_scroll_and_initial_request_should_throw() { assertAll( () -> assertFalse(request2.isScroll()), () -> assertNull(request2.getInitialSearchRequest()), - () -> assertThrows(UnsupportedOperationException.class, - () -> request2.search(sr -> fail("search"), sr -> fail("scroll"))) - ); + () -> + assertThrows( + UnsupportedOperationException.class, + () -> request2.search(sr -> fail("search"), sr -> fail("scroll")))); } @Test void search_withoutIncludes() { - OpenSearchScrollRequest request = new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), - TimeValue.timeValueMinutes(1), - sourceBuilder, - factory, - List.of() - ); + OpenSearchScrollRequest request = + new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), + TimeValue.timeValueMinutes(1), + sourceBuilder, + factory, + List.of()); when(searchResponse.getHits()).thenReturn(searchHits); when(searchHits.getHits()).thenReturn(new SearchHit[] {searchHit}); @@ -213,9 +209,10 @@ void clean_on_empty_response() { // This could happen on sequential search calls SearchResponse searchResponse = mock(); when(searchResponse.getScrollId()).thenReturn("scroll1", "scroll2"); - when(searchResponse.getHits()).thenReturn( - new SearchHits(new SearchHit[1], new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1F), - new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 1F)); + when(searchResponse.getHits()) + .thenReturn( + new SearchHits(new SearchHit[1], new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1F), + new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 1F)); request.search((x) -> searchResponse, (x) -> searchResponse); assertEquals("scroll1", request.getScrollId()); @@ -233,8 +230,9 @@ void clean_on_empty_response() { void no_clean_on_non_empty_response() { SearchResponse searchResponse = mock(); when(searchResponse.getScrollId()).thenReturn("scroll"); - when(searchResponse.getHits()).thenReturn( - new SearchHits(new SearchHit[1], new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1F)); + when(searchResponse.getHits()) + .thenReturn( + new SearchHits(new SearchHit[1], new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1F)); request.search((sr) -> searchResponse, (sr) -> searchResponse); assertEquals("scroll", request.getScrollId()); @@ -246,8 +244,7 @@ void no_clean_on_non_empty_response() { @Test void no_cursor_on_empty_response() { SearchResponse searchResponse = mock(); - when(searchResponse.getHits()).thenReturn( - new SearchHits(new SearchHit[0], null, 1f)); + when(searchResponse.getHits()).thenReturn(new SearchHits(new SearchHit[0], null, 1f)); request.search((x) -> searchResponse, (x) -> searchResponse); assertFalse(request.hasAnotherBatch()); @@ -256,8 +253,9 @@ void no_cursor_on_empty_response() { @Test void no_clean_if_no_scroll_in_response() { SearchResponse searchResponse = mock(); - when(searchResponse.getHits()).thenReturn( - new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 1F)); + when(searchResponse.getHits()) + .thenReturn( + new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 1F)); request.search((x) -> searchResponse, (x) -> searchResponse); assertEquals(NO_SCROLL_ID, request.getScrollId()); @@ -286,8 +284,10 @@ void serialize_deserialize_no_needClean() { @Test @SneakyThrows void serialize_deserialize_needClean() { - lenient().when(searchResponse.getHits()).thenReturn( - new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 1F)); + lenient() + .when(searchResponse.getHits()) + .thenReturn( + new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 1F)); lenient().when(searchResponse.getScrollId()).thenReturn(""); var stream = new BytesStreamOutput(); diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/system/OpenSearchCatIndicesRequestTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/system/OpenSearchCatIndicesRequestTest.java index a720c2a266..8f954b68b2 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/system/OpenSearchCatIndicesRequestTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/system/OpenSearchCatIndicesRequestTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.request.system; import static org.hamcrest.MatcherAssert.assertThat; @@ -25,8 +24,7 @@ @ExtendWith(MockitoExtension.class) class OpenSearchCatIndicesRequestTest { - @Mock - private OpenSearchClient client; + @Mock private OpenSearchClient client; @Test void testSearch() { @@ -34,14 +32,12 @@ void testSearch() { final List results = new OpenSearchCatIndicesRequest(client).search(); assertEquals(1, results.size()); - assertThat(results.get(0).tupleValue(), anyOf( - hasEntry("TABLE_NAME", stringValue("index")) - )); + assertThat(results.get(0).tupleValue(), anyOf(hasEntry("TABLE_NAME", stringValue("index")))); } @Test void testToString() { - assertEquals("OpenSearchCatIndicesRequest{}", - new OpenSearchCatIndicesRequest(client).toString()); + assertEquals( + "OpenSearchCatIndicesRequest{}", new OpenSearchCatIndicesRequest(client).toString()); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/system/OpenSearchDescribeIndexRequestTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/system/OpenSearchDescribeIndexRequestTest.java index c19b3a3ccd..59ece9bfbc 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/system/OpenSearchDescribeIndexRequestTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/system/OpenSearchDescribeIndexRequestTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.request.system; import static org.hamcrest.MatcherAssert.assertThat; @@ -28,46 +27,47 @@ @ExtendWith(MockitoExtension.class) class OpenSearchDescribeIndexRequestTest { - @Mock - private OpenSearchClient client; + @Mock private OpenSearchClient client; - @Mock - private IndexMapping mapping; + @Mock private IndexMapping mapping; @Test void testSearch() { - when(mapping.getFieldMappings()).thenReturn( - Map.of("name", OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword))); + when(mapping.getFieldMappings()) + .thenReturn(Map.of("name", OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword))); when(client.getIndexMappings("index")).thenReturn(ImmutableMap.of("test", mapping)); final List results = new OpenSearchDescribeIndexRequest(client, "index").search(); assertEquals(1, results.size()); - assertThat(results.get(0).tupleValue(), anyOf( - hasEntry("TABLE_NAME", stringValue("index")), - hasEntry("COLUMN_NAME", stringValue("name")), - hasEntry("TYPE_NAME", stringValue("STRING")) - )); + assertThat( + results.get(0).tupleValue(), + anyOf( + hasEntry("TABLE_NAME", stringValue("index")), + hasEntry("COLUMN_NAME", stringValue("name")), + hasEntry("TYPE_NAME", stringValue("STRING")))); } @Test void testCrossClusterShouldSearchLocal() { - when(mapping.getFieldMappings()).thenReturn( - Map.of("name", OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword))); + when(mapping.getFieldMappings()) + .thenReturn(Map.of("name", OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword))); when(client.getIndexMappings("index")).thenReturn(ImmutableMap.of("test", mapping)); final List results = new OpenSearchDescribeIndexRequest(client, "ccs:index").search(); assertEquals(1, results.size()); - assertThat(results.get(0).tupleValue(), anyOf( - hasEntry("TABLE_NAME", stringValue("index")), - hasEntry("COLUMN_NAME", stringValue("name")), - hasEntry("TYPE_NAME", stringValue("STRING")) - )); + assertThat( + results.get(0).tupleValue(), + anyOf( + hasEntry("TABLE_NAME", stringValue("index")), + hasEntry("COLUMN_NAME", stringValue("name")), + hasEntry("TYPE_NAME", stringValue("STRING")))); } @Test void testToString() { - assertEquals("OpenSearchDescribeIndexRequest{indexName='index'}", + assertEquals( + "OpenSearchDescribeIndexRequest{indexName='index'}", new OpenSearchDescribeIndexRequest(client, "index").toString()); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/AggregationResponseUtils.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/AggregationResponseUtils.java index bbc462e980..76148b9395 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/AggregationResponseUtils.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/AggregationResponseUtils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.response; import com.fasterxml.jackson.core.JsonFactory; @@ -52,37 +51,45 @@ public class AggregationResponseUtils { private static final List entryList = - new ImmutableMap.Builder>().put( - MinAggregationBuilder.NAME, (p, c) -> ParsedMin.fromXContent(p, (String) c)) - .put(MaxAggregationBuilder.NAME, (p, c) -> ParsedMax.fromXContent(p, (String) c)) - .put(SumAggregationBuilder.NAME, (p, c) -> ParsedSum.fromXContent(p, (String) c)) - .put(AvgAggregationBuilder.NAME, (p, c) -> ParsedAvg.fromXContent(p, (String) c)) - .put(ExtendedStatsAggregationBuilder.NAME, - (p, c) -> ParsedExtendedStats.fromXContent(p, (String) c)) - .put(StringTerms.NAME, (p, c) -> ParsedStringTerms.fromXContent(p, (String) c)) - .put(LongTerms.NAME, (p, c) -> ParsedLongTerms.fromXContent(p, (String) c)) - .put(DoubleTerms.NAME, (p, c) -> ParsedDoubleTerms.fromXContent(p, (String) c)) - .put(ValueCountAggregationBuilder.NAME, - (p, c) -> ParsedValueCount.fromXContent(p, (String) c)) - .put(PercentilesBucketPipelineAggregationBuilder.NAME, - (p, c) -> ParsedPercentilesBucket.fromXContent(p, (String) c)) - .put(DateHistogramAggregationBuilder.NAME, - (p, c) -> ParsedDateHistogram.fromXContent(p, (String) c)) - .put(HistogramAggregationBuilder.NAME, - (p, c) -> ParsedHistogram.fromXContent(p, (String) c)) - .put(CompositeAggregationBuilder.NAME, - (p, c) -> ParsedComposite.fromXContent(p, (String) c)) - .put(FilterAggregationBuilder.NAME, - (p, c) -> ParsedFilter.fromXContent(p, (String) c)) - .put(TopHitsAggregationBuilder.NAME, - (p, c) -> ParsedTopHits.fromXContent(p, (String) c)) - .build() - .entrySet() - .stream() - .map(entry -> new NamedXContentRegistry.Entry(Aggregation.class, - new ParseField(entry.getKey()), - entry.getValue())) - .collect(Collectors.toList()); + new ImmutableMap.Builder>() + .put(MinAggregationBuilder.NAME, (p, c) -> ParsedMin.fromXContent(p, (String) c)) + .put(MaxAggregationBuilder.NAME, (p, c) -> ParsedMax.fromXContent(p, (String) c)) + .put(SumAggregationBuilder.NAME, (p, c) -> ParsedSum.fromXContent(p, (String) c)) + .put(AvgAggregationBuilder.NAME, (p, c) -> ParsedAvg.fromXContent(p, (String) c)) + .put( + ExtendedStatsAggregationBuilder.NAME, + (p, c) -> ParsedExtendedStats.fromXContent(p, (String) c)) + .put(StringTerms.NAME, (p, c) -> ParsedStringTerms.fromXContent(p, (String) c)) + .put(LongTerms.NAME, (p, c) -> ParsedLongTerms.fromXContent(p, (String) c)) + .put(DoubleTerms.NAME, (p, c) -> ParsedDoubleTerms.fromXContent(p, (String) c)) + .put( + ValueCountAggregationBuilder.NAME, + (p, c) -> ParsedValueCount.fromXContent(p, (String) c)) + .put( + PercentilesBucketPipelineAggregationBuilder.NAME, + (p, c) -> ParsedPercentilesBucket.fromXContent(p, (String) c)) + .put( + DateHistogramAggregationBuilder.NAME, + (p, c) -> ParsedDateHistogram.fromXContent(p, (String) c)) + .put( + HistogramAggregationBuilder.NAME, + (p, c) -> ParsedHistogram.fromXContent(p, (String) c)) + .put( + CompositeAggregationBuilder.NAME, + (p, c) -> ParsedComposite.fromXContent(p, (String) c)) + .put( + FilterAggregationBuilder.NAME, (p, c) -> ParsedFilter.fromXContent(p, (String) c)) + .put( + TopHitsAggregationBuilder.NAME, + (p, c) -> ParsedTopHits.fromXContent(p, (String) c)) + .build() + .entrySet() + .stream() + .map( + entry -> + new NamedXContentRegistry.Entry( + Aggregation.class, new ParseField(entry.getKey()), entry.getValue())) + .collect(Collectors.toList()); private static final NamedXContentRegistry namedXContentRegistry = new NamedXContentRegistry(entryList); @@ -94,10 +101,11 @@ public class AggregationResponseUtils { */ public static Aggregations fromJson(String json) { try { - XContentParser contentParser = new JsonXContentParser( - namedXContentRegistry, - LoggingDeprecationHandler.INSTANCE, - new JsonFactory().createParser(json)); + XContentParser contentParser = + new JsonXContentParser( + namedXContentRegistry, + LoggingDeprecationHandler.INSTANCE, + new JsonFactory().createParser(json)); contentParser.nextToken(); return Aggregations.fromXContent(contentParser); } catch (IOException e) { diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchAggregationResponseParserTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchAggregationResponseParserTest.java index 318110bdde..1a15e57c55 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchAggregationResponseParserTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchAggregationResponseParserTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.response; import static org.hamcrest.MatcherAssert.assertThat; @@ -34,127 +33,116 @@ @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) class OpenSearchAggregationResponseParserTest { - /** - * SELECT MAX(age) as max FROM accounts. - */ + /** SELECT MAX(age) as max FROM accounts. */ @Test void no_bucket_one_metric_should_pass() { - String response = "{\n" - + " \"max#max\": {\n" - + " \"value\": 40\n" - + " }\n" - + "}"; - NoBucketAggregationParser parser = new NoBucketAggregationParser( - new SingleValueParser("max") - ); + String response = "{\n" + " \"max#max\": {\n" + " \"value\": 40\n" + " }\n" + "}"; + NoBucketAggregationParser parser = new NoBucketAggregationParser(new SingleValueParser("max")); assertThat(parse(parser, response), contains(entry("max", 40d))); } - /** - * SELECT MAX(age) as max, MIN(age) as min FROM accounts. - */ + /** SELECT MAX(age) as max, MIN(age) as min FROM accounts. */ @Test void no_bucket_two_metric_should_pass() { - String response = "{\n" - + " \"max#max\": {\n" - + " \"value\": 40\n" - + " },\n" - + " \"min#min\": {\n" - + " \"value\": 20\n" - + " }\n" - + "}"; - NoBucketAggregationParser parser = new NoBucketAggregationParser( - new SingleValueParser("max"), - new SingleValueParser("min") - ); - assertThat(parse(parser, response), - contains(entry("max", 40d,"min", 20d))); + String response = + "{\n" + + " \"max#max\": {\n" + + " \"value\": 40\n" + + " },\n" + + " \"min#min\": {\n" + + " \"value\": 20\n" + + " }\n" + + "}"; + NoBucketAggregationParser parser = + new NoBucketAggregationParser(new SingleValueParser("max"), new SingleValueParser("min")); + assertThat(parse(parser, response), contains(entry("max", 40d, "min", 20d))); } @Test void one_bucket_one_metric_should_pass() { - String response = "{\n" - + " \"composite#composite_buckets\": {\n" - + " \"after_key\": {\n" - + " \"type\": \"sale\"\n" - + " },\n" - + " \"buckets\": [\n" - + " {\n" - + " \"key\": {\n" - + " \"type\": \"cost\"\n" - + " },\n" - + " \"doc_count\": 2,\n" - + " \"avg#avg\": {\n" - + " \"value\": 20\n" - + " }\n" - + " },\n" - + " {\n" - + " \"key\": {\n" - + " \"type\": \"sale\"\n" - + " },\n" - + " \"doc_count\": 2,\n" - + " \"avg#avg\": {\n" - + " \"value\": 105\n" - + " }\n" - + " }\n" - + " ]\n" - + " }\n" - + "}"; + String response = + "{\n" + + " \"composite#composite_buckets\": {\n" + + " \"after_key\": {\n" + + " \"type\": \"sale\"\n" + + " },\n" + + " \"buckets\": [\n" + + " {\n" + + " \"key\": {\n" + + " \"type\": \"cost\"\n" + + " },\n" + + " \"doc_count\": 2,\n" + + " \"avg#avg\": {\n" + + " \"value\": 20\n" + + " }\n" + + " },\n" + + " {\n" + + " \"key\": {\n" + + " \"type\": \"sale\"\n" + + " },\n" + + " \"doc_count\": 2,\n" + + " \"avg#avg\": {\n" + + " \"value\": 105\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + "}"; - OpenSearchAggregationResponseParser parser = new CompositeAggregationParser( - new SingleValueParser("avg")); - assertThat(parse(parser, response), - containsInAnyOrder(ImmutableMap.of("type", "cost", "avg", 20d), + OpenSearchAggregationResponseParser parser = + new CompositeAggregationParser(new SingleValueParser("avg")); + assertThat( + parse(parser, response), + containsInAnyOrder( + ImmutableMap.of("type", "cost", "avg", 20d), ImmutableMap.of("type", "sale", "avg", 105d))); } @Test void two_bucket_one_metric_should_pass() { - String response = "{\n" - + " \"composite#composite_buckets\": {\n" - + " \"after_key\": {\n" - + " \"type\": \"sale\",\n" - + " \"region\": \"us\"\n" - + " },\n" - + " \"buckets\": [\n" - + " {\n" - + " \"key\": {\n" - + " \"type\": \"cost\",\n" - + " \"region\": \"us\"\n" - + " },\n" - + " \"avg#avg\": {\n" - + " \"value\": 20\n" - + " }\n" - + " },\n" - + " {\n" - + " \"key\": {\n" - + " \"type\": \"sale\",\n" - + " \"region\": \"uk\"\n" - + " },\n" - + " \"avg#avg\": {\n" - + " \"value\": 130\n" - + " }\n" - + " }\n" - + " ]\n" - + " }\n" - + "}"; - OpenSearchAggregationResponseParser parser = new CompositeAggregationParser( - new SingleValueParser("avg")); - assertThat(parse(parser, response), - containsInAnyOrder(ImmutableMap.of("type", "cost", "region", "us", "avg", 20d), + String response = + "{\n" + + " \"composite#composite_buckets\": {\n" + + " \"after_key\": {\n" + + " \"type\": \"sale\",\n" + + " \"region\": \"us\"\n" + + " },\n" + + " \"buckets\": [\n" + + " {\n" + + " \"key\": {\n" + + " \"type\": \"cost\",\n" + + " \"region\": \"us\"\n" + + " },\n" + + " \"avg#avg\": {\n" + + " \"value\": 20\n" + + " }\n" + + " },\n" + + " {\n" + + " \"key\": {\n" + + " \"type\": \"sale\",\n" + + " \"region\": \"uk\"\n" + + " },\n" + + " \"avg#avg\": {\n" + + " \"value\": 130\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + "}"; + OpenSearchAggregationResponseParser parser = + new CompositeAggregationParser(new SingleValueParser("avg")); + assertThat( + parse(parser, response), + containsInAnyOrder( + ImmutableMap.of("type", "cost", "region", "us", "avg", 20d), ImmutableMap.of("type", "sale", "region", "uk", "avg", 130d))); } @Test void unsupported_aggregation_should_fail() { - String response = "{\n" - + " \"date_histogram#date_histogram\": {\n" - + " \"value\": 40\n" - + " }\n" - + "}"; - NoBucketAggregationParser parser = new NoBucketAggregationParser( - new SingleValueParser("max") - ); + String response = + "{\n" + " \"date_histogram#date_histogram\": {\n" + " \"value\": 40\n" + " }\n" + "}"; + NoBucketAggregationParser parser = new NoBucketAggregationParser(new SingleValueParser("max")); RuntimeException exception = assertThrows(RuntimeException.class, () -> parse(parser, response)); assertEquals( @@ -170,14 +158,15 @@ void nan_value_should_return_null() { @Test void filter_aggregation_should_pass() { - String response = "{\n" - + " \"filter#filtered\" : {\n" - + " \"doc_count\" : 3,\n" - + " \"avg#filtered\" : {\n" - + " \"value\" : 37.0\n" - + " }\n" - + " }\n" - + " }"; + String response = + "{\n" + + " \"filter#filtered\" : {\n" + + " \"doc_count\" : 3,\n" + + " \"avg#filtered\" : {\n" + + " \"value\" : 37.0\n" + + " }\n" + + " }\n" + + " }"; OpenSearchAggregationResponseParser parser = new NoBucketAggregationParser( FilterParser.builder() @@ -189,132 +178,134 @@ void filter_aggregation_should_pass() { @Test void filter_aggregation_group_by_should_pass() { - String response = "{\n" - + " \"composite#composite_buckets\":{\n" - + " \"after_key\":{\n" - + " \"gender\":\"m\"\n" - + " },\n" - + " \"buckets\":[\n" - + " {\n" - + " \"key\":{\n" - + " \"gender\":\"f\"\n" - + " },\n" - + " \"doc_count\":3,\n" - + " \"filter#filter\":{\n" - + " \"doc_count\":1,\n" - + " \"avg#avg\":{\n" - + " \"value\":39.0\n" - + " }\n" - + " }\n" - + " },\n" - + " {\n" - + " \"key\":{\n" - + " \"gender\":\"m\"\n" - + " },\n" - + " \"doc_count\":4,\n" - + " \"filter#filter\":{\n" - + " \"doc_count\":2,\n" - + " \"avg#avg\":{\n" - + " \"value\":36.0\n" - + " }\n" - + " }\n" - + " }\n" - + " ]\n" - + " }\n" - + "}"; - OpenSearchAggregationResponseParser parser = new CompositeAggregationParser( - FilterParser.builder() - .name("filter") - .metricsParser(new SingleValueParser("avg")) - .build() - ); - assertThat(parse(parser, response), containsInAnyOrder( - entry("gender", "f", "avg", 39.0), - entry("gender", "m", "avg", 36.0))); + String response = + "{\n" + + " \"composite#composite_buckets\":{\n" + + " \"after_key\":{\n" + + " \"gender\":\"m\"\n" + + " },\n" + + " \"buckets\":[\n" + + " {\n" + + " \"key\":{\n" + + " \"gender\":\"f\"\n" + + " },\n" + + " \"doc_count\":3,\n" + + " \"filter#filter\":{\n" + + " \"doc_count\":1,\n" + + " \"avg#avg\":{\n" + + " \"value\":39.0\n" + + " }\n" + + " }\n" + + " },\n" + + " {\n" + + " \"key\":{\n" + + " \"gender\":\"m\"\n" + + " },\n" + + " \"doc_count\":4,\n" + + " \"filter#filter\":{\n" + + " \"doc_count\":2,\n" + + " \"avg#avg\":{\n" + + " \"value\":36.0\n" + + " }\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + "}"; + OpenSearchAggregationResponseParser parser = + new CompositeAggregationParser( + FilterParser.builder() + .name("filter") + .metricsParser(new SingleValueParser("avg")) + .build()); + assertThat( + parse(parser, response), + containsInAnyOrder(entry("gender", "f", "avg", 39.0), entry("gender", "m", "avg", 36.0))); } - /** - * SELECT MAX(age) as max, STDDEV(age) as min FROM accounts. - */ + /** SELECT MAX(age) as max, STDDEV(age) as min FROM accounts. */ @Test void no_bucket_max_and_extended_stats() { - String response = "{\n" - + " \"extended_stats#esField\": {\n" - + " \"count\": 2033,\n" - + " \"min\": 0,\n" - + " \"max\": 360,\n" - + " \"avg\": 45.47958681751107,\n" - + " \"sum\": 92460,\n" - + " \"sum_of_squares\": 22059450,\n" - + " \"variance\": 8782.295820390027,\n" - + " \"variance_population\": 8782.295820390027,\n" - + " \"variance_sampling\": 8786.61781636463,\n" - + " \"std_deviation\": 93.71390409320287,\n" - + " \"std_deviation_population\": 93.71390409320287,\n" - + " \"std_deviation_sampling\": 93.73696078049805,\n" - + " \"std_deviation_bounds\": {\n" - + " \"upper\": 232.9073950039168,\n" - + " \"lower\": -141.94822136889468,\n" - + " \"upper_population\": 232.9073950039168,\n" - + " \"lower_population\": -141.94822136889468,\n" - + " \"upper_sampling\": 232.95350837850717,\n" - + " \"lower_sampling\": -141.99433474348504\n" - + " }\n" - + " },\n" - + " \"max#maxField\": {\n" - + " \"value\": 360\n" - + " }\n" - + "}"; + String response = + "{\n" + + " \"extended_stats#esField\": {\n" + + " \"count\": 2033,\n" + + " \"min\": 0,\n" + + " \"max\": 360,\n" + + " \"avg\": 45.47958681751107,\n" + + " \"sum\": 92460,\n" + + " \"sum_of_squares\": 22059450,\n" + + " \"variance\": 8782.295820390027,\n" + + " \"variance_population\": 8782.295820390027,\n" + + " \"variance_sampling\": 8786.61781636463,\n" + + " \"std_deviation\": 93.71390409320287,\n" + + " \"std_deviation_population\": 93.71390409320287,\n" + + " \"std_deviation_sampling\": 93.73696078049805,\n" + + " \"std_deviation_bounds\": {\n" + + " \"upper\": 232.9073950039168,\n" + + " \"lower\": -141.94822136889468,\n" + + " \"upper_population\": 232.9073950039168,\n" + + " \"lower_population\": -141.94822136889468,\n" + + " \"upper_sampling\": 232.95350837850717,\n" + + " \"lower_sampling\": -141.99433474348504\n" + + " }\n" + + " },\n" + + " \"max#maxField\": {\n" + + " \"value\": 360\n" + + " }\n" + + "}"; - NoBucketAggregationParser parser = new NoBucketAggregationParser( - new SingleValueParser("maxField"), - new StatsParser(ExtendedStats::getStdDeviation, "esField") - ); - assertThat(parse(parser, response), - contains(entry("esField", 93.71390409320287, "maxField", 360D))); + NoBucketAggregationParser parser = + new NoBucketAggregationParser( + new SingleValueParser("maxField"), + new StatsParser(ExtendedStats::getStdDeviation, "esField")); + assertThat( + parse(parser, response), contains(entry("esField", 93.71390409320287, "maxField", 360D))); } @Test void top_hits_aggregation_should_pass() { - String response = "{\n" - + " \"composite#composite_buckets\": {\n" - + " \"buckets\": [\n" - + " {\n" - + " \"key\": {\n" - + " \"type\": \"take\"\n" - + " },\n" - + " \"doc_count\": 2,\n" - + " \"top_hits#take\": {\n" - + " \"hits\": {\n" - + " \"total\": { \"value\": 2, \"relation\": \"eq\" },\n" - + " \"max_score\": 1.0,\n" - + " \"hits\": [\n" - + " {\n" - + " \"_index\": \"accounts\",\n" - + " \"_id\": \"1\",\n" - + " \"_score\": 1.0,\n" - + " \"_source\": {\n" - + " \"gender\": \"m\"\n" - + " }\n" - + " },\n" - + " {\n" - + " \"_index\": \"accounts\",\n" - + " \"_id\": \"2\",\n" - + " \"_score\": 1.0,\n" - + " \"_source\": {\n" - + " \"gender\": \"f\"\n" - + " }\n" - + " }\n" - + " ]\n" - + " }\n" - + " }\n" - + " }\n" - + " ]\n" - + " }\n" - + "}"; + String response = + "{\n" + + " \"composite#composite_buckets\": {\n" + + " \"buckets\": [\n" + + " {\n" + + " \"key\": {\n" + + " \"type\": \"take\"\n" + + " },\n" + + " \"doc_count\": 2,\n" + + " \"top_hits#take\": {\n" + + " \"hits\": {\n" + + " \"total\": { \"value\": 2, \"relation\": \"eq\" },\n" + + " \"max_score\": 1.0,\n" + + " \"hits\": [\n" + + " {\n" + + " \"_index\": \"accounts\",\n" + + " \"_id\": \"1\",\n" + + " \"_score\": 1.0,\n" + + " \"_source\": {\n" + + " \"gender\": \"m\"\n" + + " }\n" + + " },\n" + + " {\n" + + " \"_index\": \"accounts\",\n" + + " \"_id\": \"2\",\n" + + " \"_score\": 1.0,\n" + + " \"_source\": {\n" + + " \"gender\": \"f\"\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + "}"; OpenSearchAggregationResponseParser parser = new CompositeAggregationParser(new TopHitsParser("take")); - assertThat(parse(parser, response), + assertThat( + parse(parser, response), contains(ImmutableMap.of("type", "take", "take", ImmutableList.of("m", "f")))); } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchResponseTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchResponseTest.java index e77819a453..6f4605bc2f 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchResponseTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchResponseTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.response; import static java.util.Collections.emptyList; @@ -15,8 +14,6 @@ import static org.mockito.ArgumentMatchers.anyBoolean; import static org.mockito.ArgumentMatchers.anyInt; import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.lenient; import static org.mockito.Mockito.when; import com.google.common.collect.ImmutableMap; @@ -51,31 +48,25 @@ @ExtendWith(MockitoExtension.class) class OpenSearchResponseTest { - @Mock - private SearchResponse searchResponse; + @Mock private SearchResponse searchResponse; - @Mock - private OpenSearchExprValueFactory factory; + @Mock private OpenSearchExprValueFactory factory; - @Mock - private SearchHit searchHit1; + @Mock private SearchHit searchHit1; - @Mock - private SearchHit searchHit2; + @Mock private SearchHit searchHit2; - @Mock - private Aggregations aggregations; + @Mock private Aggregations aggregations; private List includes = List.of(); - @Mock - private OpenSearchAggregationResponseParser parser; + @Mock private OpenSearchAggregationResponseParser parser; - private ExprTupleValue exprTupleValue1 = ExprTupleValue.fromExprValueMap(ImmutableMap.of("id1", - new ExprIntegerValue(1))); + private ExprTupleValue exprTupleValue1 = + ExprTupleValue.fromExprValueMap(ImmutableMap.of("id1", new ExprIntegerValue(1))); - private ExprTupleValue exprTupleValue2 = ExprTupleValue.fromExprValueMap(ImmutableMap.of("id2", - new ExprIntegerValue(2))); + private ExprTupleValue exprTupleValue2 = + ExprTupleValue.fromExprValueMap(ImmutableMap.of("id2", new ExprIntegerValue(2))); @Test void isEmpty() { @@ -121,7 +112,8 @@ void iterator() { when(searchHit1.getInnerHits()).thenReturn(null); when(searchHit2.getInnerHits()).thenReturn(null); when(factory.construct(any(), anyBoolean())) - .thenReturn(exprTupleValue1).thenReturn(exprTupleValue2); + .thenReturn(exprTupleValue1) + .thenReturn(exprTupleValue2); int i = 0; for (ExprValue hit : new OpenSearchResponse(searchResponse, factory, List.of("id1"))) { @@ -139,9 +131,8 @@ void iterator() { @Test void iterator_metafields() { - ExprTupleValue exprTupleHit = ExprTupleValue.fromExprValueMap(ImmutableMap.of( - "id1", new ExprIntegerValue(1) - )); + ExprTupleValue exprTupleHit = + ExprTupleValue.fromExprValueMap(ImmutableMap.of("id1", new ExprIntegerValue(1))); when(searchResponse.getHits()) .thenReturn( @@ -162,15 +153,16 @@ void iterator_metafields() { when(factory.construct(any(), anyBoolean())).thenReturn(exprTupleHit); - ExprTupleValue exprTupleResponse = ExprTupleValue.fromExprValueMap(ImmutableMap.of( - "id1", new ExprIntegerValue(1), - "_index", new ExprStringValue("testIndex"), - "_id", new ExprStringValue("testId"), - "_routing", new ExprStringValue(shardTarget.toString()), - "_sort", new ExprLongValue(123456L), - "_score", new ExprFloatValue(3.75F), - "_maxscore", new ExprFloatValue(3.75F) - )); + ExprTupleValue exprTupleResponse = + ExprTupleValue.fromExprValueMap( + ImmutableMap.of( + "id1", new ExprIntegerValue(1), + "_index", new ExprStringValue("testIndex"), + "_id", new ExprStringValue("testId"), + "_routing", new ExprStringValue(shardTarget.toString()), + "_sort", new ExprLongValue(123456L), + "_score", new ExprFloatValue(3.75F), + "_maxscore", new ExprFloatValue(3.75F))); List includes = List.of("id1", "_index", "_id", "_routing", "_sort", "_score", "_maxscore"); int i = 0; for (ExprValue hit : new OpenSearchResponse(searchResponse, factory, includes)) { @@ -186,9 +178,8 @@ void iterator_metafields() { @Test void iterator_metafields_withoutIncludes() { - ExprTupleValue exprTupleHit = ExprTupleValue.fromExprValueMap(ImmutableMap.of( - "id1", new ExprIntegerValue(1) - )); + ExprTupleValue exprTupleHit = + ExprTupleValue.fromExprValueMap(ImmutableMap.of("id1", new ExprIntegerValue(1))); when(searchResponse.getHits()) .thenReturn( @@ -202,9 +193,8 @@ void iterator_metafields_withoutIncludes() { when(factory.construct(any(), anyBoolean())).thenReturn(exprTupleHit); List includes = List.of("id1"); - ExprTupleValue exprTupleResponse = ExprTupleValue.fromExprValueMap(ImmutableMap.of( - "id1", new ExprIntegerValue(1) - )); + ExprTupleValue exprTupleResponse = + ExprTupleValue.fromExprValueMap(ImmutableMap.of("id1", new ExprIntegerValue(1))); int i = 0; for (ExprValue hit : new OpenSearchResponse(searchResponse, factory, includes)) { if (i == 0) { @@ -219,9 +209,8 @@ void iterator_metafields_withoutIncludes() { @Test void iterator_metafields_scoreNaN() { - ExprTupleValue exprTupleHit = ExprTupleValue.fromExprValueMap(ImmutableMap.of( - "id1", new ExprIntegerValue(1) - )); + ExprTupleValue exprTupleHit = + ExprTupleValue.fromExprValueMap(ImmutableMap.of("id1", new ExprIntegerValue(1))); when(searchResponse.getHits()) .thenReturn( @@ -239,12 +228,13 @@ void iterator_metafields_scoreNaN() { when(factory.construct(any(), anyBoolean())).thenReturn(exprTupleHit); List includes = List.of("id1", "_index", "_id", "_sort", "_score", "_maxscore"); - ExprTupleValue exprTupleResponse = ExprTupleValue.fromExprValueMap(ImmutableMap.of( - "id1", new ExprIntegerValue(1), - "_index", new ExprStringValue("testIndex"), - "_id", new ExprStringValue("testId"), - "_sort", new ExprLongValue(123456L) - )); + ExprTupleValue exprTupleResponse = + ExprTupleValue.fromExprValueMap( + ImmutableMap.of( + "id1", new ExprIntegerValue(1), + "_index", new ExprStringValue("testIndex"), + "_id", new ExprStringValue("testId"), + "_sort", new ExprLongValue(123456L))); int i = 0; for (ExprValue hit : new OpenSearchResponse(searchResponse, factory, includes)) { if (i == 0) { @@ -264,13 +254,14 @@ void iterator_with_inner_hits() { new SearchHit[] {searchHit1}, new TotalHits(2L, TotalHits.Relation.EQUAL_TO), 1.0F)); - when(searchHit1.getInnerHits()).thenReturn( - Map.of( - "innerHit", - new SearchHits( - new SearchHit[] {searchHit1}, - new TotalHits(2L, TotalHits.Relation.EQUAL_TO), - 1.0F))); + when(searchHit1.getInnerHits()) + .thenReturn( + Map.of( + "innerHit", + new SearchHits( + new SearchHit[] {searchHit1}, + new TotalHits(2L, TotalHits.Relation.EQUAL_TO), + 1.0F))); when(factory.construct(any(), anyBoolean())).thenReturn(exprTupleValue1); @@ -323,18 +314,17 @@ void aggregation_iterator() { @Test void highlight_iterator() { SearchHit searchHit = new SearchHit(1); - searchHit.sourceRef( - new BytesArray("{\"name\":\"John\"}")); - Map highlightMap = Map.of("highlights", - new HighlightField("Title", new Text[] {new Text("field")})); - searchHit.highlightFields(Map.of("highlights", new HighlightField("Title", - new Text[] {new Text("field")}))); + searchHit.sourceRef(new BytesArray("{\"name\":\"John\"}")); + Map highlightMap = + Map.of("highlights", new HighlightField("Title", new Text[] {new Text("field")})); + searchHit.highlightFields( + Map.of("highlights", new HighlightField("Title", new Text[] {new Text("field")}))); ExprValue resultTuple = ExprValueUtils.tupleValue(searchHit.getSourceAsMap()); when(searchResponse.getHits()) .thenReturn( new SearchHits( - new SearchHit[]{searchHit1}, + new SearchHit[] {searchHit1}, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0F)); @@ -342,11 +332,12 @@ void highlight_iterator() { when(factory.construct(any(), anyBoolean())).thenReturn(resultTuple); for (ExprValue resultHit : new OpenSearchResponse(searchResponse, factory, includes)) { - var expected = ExprValueUtils.collectionValue( - Arrays.stream(searchHit.getHighlightFields().get("highlights").getFragments()) - .map(t -> (t.toString())).collect(Collectors.toList())); - var result = resultHit.tupleValue().get( - "_highlight").tupleValue().get("highlights"); + var expected = + ExprValueUtils.collectionValue( + Arrays.stream(searchHit.getHighlightFields().get("highlights").getFragments()) + .map(t -> (t.toString())) + .collect(Collectors.toList())); + var result = resultHit.tupleValue().get("_highlight").tupleValue().get("highlights"); assertTrue(expected.equals(result)); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/error/ErrorMessageFactoryTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/error/ErrorMessageFactoryTest.java index c3ae5d139d..eb759233a8 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/error/ErrorMessageFactoryTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/error/ErrorMessageFactoryTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.response.error; import static org.junit.jupiter.api.Assertions.assertFalse; diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/error/ErrorMessageTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/error/ErrorMessageTest.java index ac0d46938a..90268502c2 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/error/ErrorMessageTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/error/ErrorMessageTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.response.error; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -20,45 +19,49 @@ class ErrorMessageTest { @Test public void testToString() { ErrorMessage errorMessage = - new ErrorMessage(new IllegalStateException("illegal state"), - SERVICE_UNAVAILABLE.getStatus()); - assertEquals("{\n" - + " \"error\": {\n" - + " \"reason\": \"There was internal problem at backend\",\n" - + " \"details\": \"illegal state\",\n" - + " \"type\": \"IllegalStateException\"\n" - + " },\n" - + " \"status\": 503\n" - + "}", errorMessage.toString()); + new ErrorMessage( + new IllegalStateException("illegal state"), SERVICE_UNAVAILABLE.getStatus()); + assertEquals( + "{\n" + + " \"error\": {\n" + + " \"reason\": \"There was internal problem at backend\",\n" + + " \"details\": \"illegal state\",\n" + + " \"type\": \"IllegalStateException\"\n" + + " },\n" + + " \"status\": 503\n" + + "}", + errorMessage.toString()); } @Test public void testBadRequestToString() { ErrorMessage errorMessage = - new ErrorMessage(new IllegalStateException(), - BAD_REQUEST.getStatus()); - assertEquals("{\n" - + " \"error\": {\n" - + " \"reason\": \"Invalid Query\",\n" - + " \"details\": \"\",\n" - + " \"type\": \"IllegalStateException\"\n" - + " },\n" - + " \"status\": 400\n" - + "}", errorMessage.toString()); + new ErrorMessage(new IllegalStateException(), BAD_REQUEST.getStatus()); + assertEquals( + "{\n" + + " \"error\": {\n" + + " \"reason\": \"Invalid Query\",\n" + + " \"details\": \"\",\n" + + " \"type\": \"IllegalStateException\"\n" + + " },\n" + + " \"status\": 400\n" + + "}", + errorMessage.toString()); } @Test public void testToStringWithEmptyErrorMessage() { ErrorMessage errorMessage = - new ErrorMessage(new IllegalStateException(), - SERVICE_UNAVAILABLE.getStatus()); - assertEquals("{\n" - + " \"error\": {\n" - + " \"reason\": \"There was internal problem at backend\",\n" - + " \"details\": \"\",\n" - + " \"type\": \"IllegalStateException\"\n" - + " },\n" - + " \"status\": 503\n" - + "}", errorMessage.toString()); + new ErrorMessage(new IllegalStateException(), SERVICE_UNAVAILABLE.getStatus()); + assertEquals( + "{\n" + + " \"error\": {\n" + + " \"reason\": \"There was internal problem at backend\",\n" + + " \"details\": \"\",\n" + + " \"type\": \"IllegalStateException\"\n" + + " },\n" + + " \"status\": 503\n" + + "}", + errorMessage.toString()); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/error/OpenSearchErrorMessageTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/error/OpenSearchErrorMessageTest.java index 3dcb38a558..f07b5dfdd3 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/error/OpenSearchErrorMessageTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/error/OpenSearchErrorMessageTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.response.error; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -21,14 +20,11 @@ @ExtendWith(MockitoExtension.class) class OpenSearchErrorMessageTest { - @Mock - private OpenSearchException openSearchException; + @Mock private OpenSearchException openSearchException; - @Mock - private SearchPhaseExecutionException searchPhaseExecutionException; + @Mock private SearchPhaseExecutionException searchPhaseExecutionException; - @Mock - private ShardSearchFailure shardSearchFailure; + @Mock private ShardSearchFailure shardSearchFailure; @Test public void fetchReason() { @@ -45,7 +41,8 @@ public void fetchDetailsWithOpenSearchException() { OpenSearchErrorMessage errorMessage = new OpenSearchErrorMessage(openSearchException, SERVICE_UNAVAILABLE.getStatus()); - assertEquals("detail error\n" + assertEquals( + "detail error\n" + "For more details, please send request for " + "Json format to see the raw response from OpenSearch engine.", errorMessage.fetchDetails()); @@ -59,9 +56,9 @@ public void fetchDetailsWithSearchPhaseExecutionException() { when(shardSearchFailure.getCause()).thenReturn(new IllegalStateException("illegal state")); OpenSearchErrorMessage errorMessage = - new OpenSearchErrorMessage(searchPhaseExecutionException, - SERVICE_UNAVAILABLE.getStatus()); - assertEquals("Shard[1]: java.lang.IllegalStateException: illegal state\n" + new OpenSearchErrorMessage(searchPhaseExecutionException, SERVICE_UNAVAILABLE.getStatus()); + assertEquals( + "Shard[1]: java.lang.IllegalStateException: illegal state\n" + "\n" + "For more details, please send request for Json format to see the " + "raw response from OpenSearch engine.", diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/setting/OpenSearchSettingsTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/setting/OpenSearchSettingsTest.java index b4c8cc8c69..ff2c311753 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/setting/OpenSearchSettingsTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/setting/OpenSearchSettingsTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.setting; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -12,18 +11,14 @@ import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.mockito.AdditionalMatchers.not; import static org.mockito.AdditionalMatchers.or; -import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.when; import static org.opensearch.common.unit.TimeValue.timeValueMinutes; -import static org.opensearch.sql.opensearch.setting.LegacyOpenDistroSettings.PPL_ENABLED_SETTING; import static org.opensearch.sql.opensearch.setting.LegacyOpenDistroSettings.legacySettings; import java.util.List; -import java.util.Set; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.AdditionalMatchers; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; import org.opensearch.cluster.ClusterName; @@ -37,14 +32,12 @@ @ExtendWith(MockitoExtension.class) class OpenSearchSettingsTest { - @Mock - private ClusterSettings clusterSettings; + @Mock private ClusterSettings clusterSettings; @Test void getSettingValue() { when(clusterSettings.get(ClusterName.CLUSTER_NAME_SETTING)).thenReturn(ClusterName.DEFAULT); - when(clusterSettings.get(not((eq(ClusterName.CLUSTER_NAME_SETTING))))) - .thenReturn(null); + when(clusterSettings.get(not((eq(ClusterName.CLUSTER_NAME_SETTING))))).thenReturn(null); OpenSearchSettings settings = new OpenSearchSettings(clusterSettings); ByteSizeValue sizeValue = settings.getSettingValue(Settings.Key.QUERY_MEMORY_LIMIT); @@ -54,11 +47,14 @@ void getSettingValue() { @Test void getSettingValueWithPresetValuesInYml() { when(clusterSettings.get(ClusterName.CLUSTER_NAME_SETTING)).thenReturn(ClusterName.DEFAULT); - when(clusterSettings - .get((Setting) OpenSearchSettings.QUERY_MEMORY_LIMIT_SETTING)) + when(clusterSettings.get( + (Setting) OpenSearchSettings.QUERY_MEMORY_LIMIT_SETTING)) .thenReturn(new ByteSizeValue(20)); - when(clusterSettings.get(not(or(eq(ClusterName.CLUSTER_NAME_SETTING), - eq((Setting) OpenSearchSettings.QUERY_MEMORY_LIMIT_SETTING))))) + when(clusterSettings.get( + not( + or( + eq(ClusterName.CLUSTER_NAME_SETTING), + eq((Setting) OpenSearchSettings.QUERY_MEMORY_LIMIT_SETTING))))) .thenReturn(null); OpenSearchSettings settings = new OpenSearchSettings(clusterSettings); ByteSizeValue sizeValue = settings.getSettingValue(Settings.Key.QUERY_MEMORY_LIMIT); @@ -82,8 +78,7 @@ void pluginNonDynamicSettings() { @Test void getSettings() { when(clusterSettings.get(ClusterName.CLUSTER_NAME_SETTING)).thenReturn(ClusterName.DEFAULT); - when(clusterSettings.get(not((eq(ClusterName.CLUSTER_NAME_SETTING))))) - .thenReturn(null); + when(clusterSettings.get(not((eq(ClusterName.CLUSTER_NAME_SETTING))))).thenReturn(null); OpenSearchSettings settings = new OpenSearchSettings(clusterSettings); assertFalse(settings.getSettings().isEmpty()); } @@ -91,12 +86,10 @@ void getSettings() { @Test void update() { when(clusterSettings.get(ClusterName.CLUSTER_NAME_SETTING)).thenReturn(ClusterName.DEFAULT); - when(clusterSettings.get(not((eq(ClusterName.CLUSTER_NAME_SETTING))))) - .thenReturn(null); + when(clusterSettings.get(not((eq(ClusterName.CLUSTER_NAME_SETTING))))).thenReturn(null); OpenSearchSettings settings = new OpenSearchSettings(clusterSettings); ByteSizeValue oldValue = settings.getSettingValue(Settings.Key.QUERY_MEMORY_LIMIT); - OpenSearchSettings.Updater updater = - settings.new Updater(Settings.Key.QUERY_MEMORY_LIMIT); + OpenSearchSettings.Updater updater = settings.new Updater(Settings.Key.QUERY_MEMORY_LIMIT); updater.accept(new ByteSizeValue(0L)); ByteSizeValue newValue = settings.getSettingValue(Settings.Key.QUERY_MEMORY_LIMIT); @@ -107,8 +100,7 @@ void update() { @Test void settingsFallback() { when(clusterSettings.get(ClusterName.CLUSTER_NAME_SETTING)).thenReturn(ClusterName.DEFAULT); - when(clusterSettings.get(not((eq(ClusterName.CLUSTER_NAME_SETTING))))) - .thenReturn(null); + when(clusterSettings.get(not((eq(ClusterName.CLUSTER_NAME_SETTING))))).thenReturn(null); OpenSearchSettings settings = new OpenSearchSettings(clusterSettings); assertEquals( settings.getSettingValue(Settings.Key.SQL_ENABLED), @@ -160,17 +152,17 @@ public void updateLegacySettingsFallback() { assertEquals(OpenSearchSettings.SQL_ENABLED_SETTING.get(settings), false); assertEquals(OpenSearchSettings.SQL_SLOWLOG_SETTING.get(settings), 10); - assertEquals(OpenSearchSettings.SQL_CURSOR_KEEP_ALIVE_SETTING.get(settings), - timeValueMinutes(1)); + assertEquals( + OpenSearchSettings.SQL_CURSOR_KEEP_ALIVE_SETTING.get(settings), timeValueMinutes(1)); assertEquals(OpenSearchSettings.PPL_ENABLED_SETTING.get(settings), true); - assertEquals(OpenSearchSettings.QUERY_MEMORY_LIMIT_SETTING.get(settings), + assertEquals( + OpenSearchSettings.QUERY_MEMORY_LIMIT_SETTING.get(settings), new ByteSizeValue((int) (JvmInfo.jvmInfo().getMem().getHeapMax().getBytes() * 0.2))); assertEquals(OpenSearchSettings.QUERY_SIZE_LIMIT_SETTING.get(settings), 100); assertEquals(OpenSearchSettings.METRICS_ROLLING_WINDOW_SETTING.get(settings), 2000L); assertEquals(OpenSearchSettings.METRICS_ROLLING_INTERVAL_SETTING.get(settings), 100L); } - @Test void legacySettingsShouldBeDeprecatedBeforeRemove() { assertEquals(15, legacySettings().size()); diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchDefaultImplementorTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchDefaultImplementorTest.java index f2b6a70a46..85d0a4e94f 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchDefaultImplementorTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchDefaultImplementorTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage; import static org.junit.jupiter.api.Assertions.assertNotNull; @@ -19,18 +18,15 @@ import org.opensearch.sql.planner.logical.LogicalML; import org.opensearch.sql.planner.logical.LogicalMLCommons; import org.opensearch.sql.planner.logical.LogicalPlan; -import org.opensearch.sql.storage.Table; @ExtendWith(MockitoExtension.class) public class OpenSearchDefaultImplementorTest { - @Mock - OpenSearchClient client; + @Mock OpenSearchClient client; @Test public void visitMachineLearning() { - LogicalMLCommons node = Mockito.mock(LogicalMLCommons.class, - Answers.RETURNS_DEEP_STUBS); + LogicalMLCommons node = Mockito.mock(LogicalMLCommons.class, Answers.RETURNS_DEEP_STUBS); Mockito.when(node.getChild().get(0)).thenReturn(Mockito.mock(LogicalPlan.class)); OpenSearchIndex.OpenSearchDefaultImplementor implementor = new OpenSearchIndex.OpenSearchDefaultImplementor(client); @@ -39,8 +35,7 @@ public void visitMachineLearning() { @Test public void visitAD() { - LogicalAD node = Mockito.mock(LogicalAD.class, - Answers.RETURNS_DEEP_STUBS); + LogicalAD node = Mockito.mock(LogicalAD.class, Answers.RETURNS_DEEP_STUBS); Mockito.when(node.getChild().get(0)).thenReturn(Mockito.mock(LogicalPlan.class)); OpenSearchIndex.OpenSearchDefaultImplementor implementor = new OpenSearchIndex.OpenSearchDefaultImplementor(client); @@ -49,11 +44,10 @@ public void visitAD() { @Test public void visitML() { - LogicalML node = Mockito.mock(LogicalML.class, - Answers.RETURNS_DEEP_STUBS); + LogicalML node = Mockito.mock(LogicalML.class, Answers.RETURNS_DEEP_STUBS); Mockito.when(node.getChild().get(0)).thenReturn(Mockito.mock(LogicalPlan.class)); OpenSearchIndex.OpenSearchDefaultImplementor implementor = - new OpenSearchIndex.OpenSearchDefaultImplementor(client); + new OpenSearchIndex.OpenSearchDefaultImplementor(client); assertNotNull(implementor.visitML(node, null)); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchIndexTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchIndexTest.java index 39af59b6cd..3ddb07d86a 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchIndexTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchIndexTest.java @@ -63,20 +63,16 @@ class OpenSearchIndexTest { public static final int QUERY_SIZE_LIMIT = 200; public static final TimeValue SCROLL_TIMEOUT = new TimeValue(1); - public static final OpenSearchRequest.IndexName INDEX_NAME - = new OpenSearchRequest.IndexName("test"); + public static final OpenSearchRequest.IndexName INDEX_NAME = + new OpenSearchRequest.IndexName("test"); - @Mock - private OpenSearchClient client; + @Mock private OpenSearchClient client; - @Mock - private OpenSearchExprValueFactory exprValueFactory; + @Mock private OpenSearchExprValueFactory exprValueFactory; - @Mock - private Settings settings; + @Mock private Settings settings; - @Mock - private IndexMapping mapping; + @Mock private IndexMapping mapping; private OpenSearchIndex index; @@ -94,16 +90,18 @@ void isExist() { @Test void createIndex() { - Map mappings = Map.of( - "properties", + Map mappings = Map.of( - "name", "text", - "age", "integer")); + "properties", + Map.of( + "name", "text", + "age", "integer")); doNothing().when(client).createIndex("test", mappings); Map schema = new HashMap<>(); - schema.put("name", OpenSearchTextType.of(Map.of("keyword", - OpenSearchDataType.of(MappingType.Keyword)))); + schema.put( + "name", + OpenSearchTextType.of(Map.of("keyword", OpenSearchDataType.of(MappingType.Keyword)))); schema.put("age", INTEGER); index.create(schema); verify(client).createIndex(any(), any()); @@ -111,24 +109,27 @@ void createIndex() { @Test void getFieldTypes() { - when(mapping.getFieldMappings()).thenReturn( - ImmutableMap.builder() - .put("name", MappingType.Keyword) - .put("address", MappingType.Text) - .put("age", MappingType.Integer) - .put("account_number", MappingType.Long) - .put("balance1", MappingType.Float) - .put("balance2", MappingType.Double) - .put("gender", MappingType.Boolean) - .put("family", MappingType.Nested) - .put("employer", MappingType.Object) - .put("birthday", MappingType.Date) - .put("id1", MappingType.Byte) - .put("id2", MappingType.Short) - .put("blob", MappingType.Binary) - .build().entrySet().stream().collect(Collectors.toMap( - Map.Entry::getKey, e -> OpenSearchDataType.of(e.getValue()) - ))); + when(mapping.getFieldMappings()) + .thenReturn( + ImmutableMap.builder() + .put("name", MappingType.Keyword) + .put("address", MappingType.Text) + .put("age", MappingType.Integer) + .put("account_number", MappingType.Long) + .put("balance1", MappingType.Float) + .put("balance2", MappingType.Double) + .put("gender", MappingType.Boolean) + .put("family", MappingType.Nested) + .put("employer", MappingType.Object) + .put("birthday", MappingType.Date) + .put("id1", MappingType.Byte) + .put("id2", MappingType.Short) + .put("blob", MappingType.Binary) + .build() + .entrySet() + .stream() + .collect( + Collectors.toMap(Map.Entry::getKey, e -> OpenSearchDataType.of(e.getValue())))); when(client.getIndexMappings("test")).thenReturn(ImmutableMap.of("test", mapping)); // Run more than once to confirm caching logic is covered and can work @@ -150,35 +151,30 @@ void getFieldTypes() { hasEntry("birthday", ExprCoreType.TIMESTAMP), hasEntry("id1", ExprCoreType.BYTE), hasEntry("id2", ExprCoreType.SHORT), - hasEntry("blob", (ExprType) OpenSearchDataType.of(MappingType.Binary)) - )); + hasEntry("blob", (ExprType) OpenSearchDataType.of(MappingType.Binary)))); } } @Test void checkCacheUsedForFieldMappings() { - when(mapping.getFieldMappings()).thenReturn( - Map.of("name", OpenSearchDataType.of(MappingType.Keyword))); - when(client.getIndexMappings("test")).thenReturn( - ImmutableMap.of("test", mapping)); + when(mapping.getFieldMappings()) + .thenReturn(Map.of("name", OpenSearchDataType.of(MappingType.Keyword))); + when(client.getIndexMappings("test")).thenReturn(ImmutableMap.of("test", mapping)); OpenSearchIndex index = new OpenSearchIndex(client, settings, "test"); - assertThat(index.getFieldTypes(), allOf( - aMapWithSize(1), - hasEntry("name", STRING))); - assertThat(index.getFieldOpenSearchTypes(), allOf( - aMapWithSize(1), - hasEntry("name", OpenSearchDataType.of(STRING)))); + assertThat(index.getFieldTypes(), allOf(aMapWithSize(1), hasEntry("name", STRING))); + assertThat( + index.getFieldOpenSearchTypes(), + allOf(aMapWithSize(1), hasEntry("name", OpenSearchDataType.of(STRING)))); - lenient().when(mapping.getFieldMappings()).thenReturn( - Map.of("name", OpenSearchDataType.of(MappingType.Integer))); + lenient() + .when(mapping.getFieldMappings()) + .thenReturn(Map.of("name", OpenSearchDataType.of(MappingType.Integer))); - assertThat(index.getFieldTypes(), allOf( - aMapWithSize(1), - hasEntry("name", STRING))); - assertThat(index.getFieldOpenSearchTypes(), allOf( - aMapWithSize(1), - hasEntry("name", OpenSearchDataType.of(STRING)))); + assertThat(index.getFieldTypes(), allOf(aMapWithSize(1), hasEntry("name", STRING))); + assertThat( + index.getFieldOpenSearchTypes(), + allOf(aMapWithSize(1), hasEntry("name", OpenSearchDataType.of(STRING)))); } @Test @@ -193,8 +189,7 @@ void getReservedFieldTypes() { hasEntry("_routing", ExprCoreType.STRING), hasEntry("_sort", ExprCoreType.LONG), hasEntry("_score", ExprCoreType.FLOAT), - hasEntry("_maxscore", ExprCoreType.FLOAT) - )); + hasEntry("_maxscore", ExprCoreType.FLOAT))); } @Test @@ -204,8 +199,9 @@ void implementRelationOperatorOnly() { LogicalPlan plan = index.createScanBuilder(); Integer maxResultWindow = index.getMaxResultWindow(); final var requestBuilder = new OpenSearchRequestBuilder(QUERY_SIZE_LIMIT, exprValueFactory); - assertEquals(new OpenSearchIndexScan(client, - 200, requestBuilder.build(INDEX_NAME, maxResultWindow, SCROLL_TIMEOUT)), + assertEquals( + new OpenSearchIndexScan( + client, 200, requestBuilder.build(INDEX_NAME, maxResultWindow, SCROLL_TIMEOUT)), index.implement(index.optimize(plan))); } @@ -216,8 +212,10 @@ void implementRelationOperatorWithOptimization() { LogicalPlan plan = index.createScanBuilder(); Integer maxResultWindow = index.getMaxResultWindow(); final var requestBuilder = new OpenSearchRequestBuilder(QUERY_SIZE_LIMIT, exprValueFactory); - assertEquals(new OpenSearchIndexScan(client, 200, - requestBuilder.build(INDEX_NAME, maxResultWindow, SCROLL_TIMEOUT)), index.implement(plan)); + assertEquals( + new OpenSearchIndexScan( + client, 200, requestBuilder.build(INDEX_NAME, maxResultWindow, SCROLL_TIMEOUT)), + index.implement(plan)); } @Test @@ -239,12 +237,7 @@ void implementOtherLogicalOperators() { LogicalPlanDSL.dedupe( sort( eval( - remove( - rename( - index.createScanBuilder(), - mappings), - exclude), - newEvalField), + remove(rename(index.createScanBuilder(), mappings), exclude), newEvalField), sortField), dedupeField), include); @@ -258,9 +251,11 @@ void implementOtherLogicalOperators() { PhysicalPlanDSL.eval( PhysicalPlanDSL.remove( PhysicalPlanDSL.rename( - new OpenSearchIndexScan(client, - QUERY_SIZE_LIMIT, requestBuilder.build(INDEX_NAME, maxResultWindow, - SCROLL_TIMEOUT)), + new OpenSearchIndexScan( + client, + QUERY_SIZE_LIMIT, + requestBuilder.build( + INDEX_NAME, maxResultWindow, SCROLL_TIMEOUT)), mappings), exclude), newEvalField), diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchStorageEngineTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchStorageEngineTest.java index 1089e7e252..38f2ae495e 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchStorageEngineTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchStorageEngineTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage; import static org.junit.jupiter.api.Assertions.assertAll; @@ -25,31 +24,23 @@ @ExtendWith(MockitoExtension.class) class OpenSearchStorageEngineTest { - @Mock - private OpenSearchClient client; + @Mock private OpenSearchClient client; - @Mock - private Settings settings; + @Mock private Settings settings; @Test public void getTable() { OpenSearchStorageEngine engine = new OpenSearchStorageEngine(client, settings); - Table table = engine.getTable(new DataSourceSchemaName(DEFAULT_DATASOURCE_NAME, "default"), - "test"); - assertAll( - () -> assertNotNull(table), - () -> assertTrue(table instanceof OpenSearchIndex) - ); + Table table = + engine.getTable(new DataSourceSchemaName(DEFAULT_DATASOURCE_NAME, "default"), "test"); + assertAll(() -> assertNotNull(table), () -> assertTrue(table instanceof OpenSearchIndex)); } @Test public void getSystemTable() { OpenSearchStorageEngine engine = new OpenSearchStorageEngine(client, settings); - Table table = engine.getTable(new DataSourceSchemaName(DEFAULT_DATASOURCE_NAME, "default"), - TABLE_INFO); - assertAll( - () -> assertNotNull(table), - () -> assertTrue(table instanceof OpenSearchSystemIndex) - ); + Table table = + engine.getTable(new DataSourceSchemaName(DEFAULT_DATASOURCE_NAME, "default"), TABLE_INFO); + assertAll(() -> assertNotNull(table), () -> assertTrue(table instanceof OpenSearchSystemIndex)); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanAggregationBuilderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanAggregationBuilderTest.java index 5a510fefec..229d62abdf 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanAggregationBuilderTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanAggregationBuilderTest.java @@ -26,10 +26,8 @@ @ExtendWith(MockitoExtension.class) class OpenSearchIndexScanAggregationBuilderTest { - @Mock - OpenSearchRequestBuilder requestBuilder; - @Mock - LogicalAggregation logicalAggregation; + @Mock OpenSearchRequestBuilder requestBuilder; + @Mock LogicalAggregation logicalAggregation; OpenSearchIndexScanAggregationBuilder builder; @BeforeEach @@ -71,5 +69,4 @@ void pushDownPageSize() { void pushDownNested() { assertFalse(builder.pushDownNested(mock(LogicalNested.class))); } - } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanOptimizationTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanOptimizationTest.java index e045bae3e3..6749f87c5b 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanOptimizationTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanOptimizationTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.scan; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -91,16 +90,13 @@ @ExtendWith(MockitoExtension.class) class OpenSearchIndexScanOptimizationTest { - @Mock - private Table table; + @Mock private Table table; - @Mock - private OpenSearchIndexScan indexScan; + @Mock private OpenSearchIndexScan indexScan; private OpenSearchIndexScanBuilder indexScanBuilder; - @Mock - private OpenSearchRequestBuilder requestBuilder; + @Mock private OpenSearchRequestBuilder requestBuilder; private Runnable[] verifyPushDownCalls = {}; @@ -114,72 +110,54 @@ void setUp() { void test_project_push_down() { assertEqualsAfterOptimization( project( - indexScanBuilder( - withProjectPushedDown(DSL.ref("intV", INTEGER))), - DSL.named("i", DSL.ref("intV", INTEGER)) - ), - project( - relation("schema", table), - DSL.named("i", DSL.ref("intV", INTEGER))) - ); + indexScanBuilder(withProjectPushedDown(DSL.ref("intV", INTEGER))), + DSL.named("i", DSL.ref("intV", INTEGER))), + project(relation("schema", table), DSL.named("i", DSL.ref("intV", INTEGER)))); } - /** - * SELECT intV as i FROM schema WHERE intV = 1. - */ + /** SELECT intV as i FROM schema WHERE intV = 1. */ @Test void test_filter_push_down() { assertEqualsAfterOptimization( project( indexScanBuilder( - //withProjectPushedDown(DSL.ref("intV", INTEGER)), - withFilterPushedDown(QueryBuilders.termQuery("intV", 1)) - ), - DSL.named("i", DSL.ref("intV", INTEGER)) - ), + // withProjectPushedDown(DSL.ref("intV", INTEGER)), + withFilterPushedDown(QueryBuilders.termQuery("intV", 1))), + DSL.named("i", DSL.ref("intV", INTEGER))), project( filter( relation("schema", table), - DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1))) - ), - DSL.named("i", DSL.ref("intV", INTEGER)) - ) - ); + DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1)))), + DSL.named("i", DSL.ref("intV", INTEGER)))); } - /** - * SELECT intV as i FROM schema WHERE query_string(["intV^1.5", "QUERY", boost=12.5). - */ + /** SELECT intV as i FROM schema WHERE query_string(["intV^1.5", "QUERY", boost=12.5). */ @Test void test_filter_on_opensearchfunction_with_trackedscores_push_down() { LogicalPlan expectedPlan = project( indexScanBuilder( withFilterPushedDown( - QueryBuilders.queryStringQuery("QUERY") - .field("intV", 1.5F) - .boost(12.5F) - ), - withTrackedScoresPushedDown(true) - ), - DSL.named("i", DSL.ref("intV", INTEGER)) - ); - FunctionExpression queryString = DSL.query_string( - DSL.namedArgument("fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "intV", ExprValueUtils.floatValue(1.5F)))))), - DSL.namedArgument("query", "QUERY"), - DSL.namedArgument("boost", "12.5")); + QueryBuilders.queryStringQuery("QUERY").field("intV", 1.5F).boost(12.5F)), + withTrackedScoresPushedDown(true)), + DSL.named("i", DSL.ref("intV", INTEGER))); + FunctionExpression queryString = + DSL.query_string( + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of("intV", ExprValueUtils.floatValue(1.5F)))))), + DSL.namedArgument("query", "QUERY"), + DSL.namedArgument("boost", "12.5")); ((OpenSearchFunctions.OpenSearchFunction) queryString).setScoreTracked(true); - LogicalPlan logicalPlan = project( - filter( - relation("schema", table), - queryString - ), - DSL.named("i", DSL.ref("intV", INTEGER)) - ); + LogicalPlan logicalPlan = + project( + filter(relation("schema", table), queryString), + DSL.named("i", DSL.ref("intV", INTEGER))); assertEqualsAfterOptimization(expectedPlan, logicalPlan); } @@ -197,35 +175,36 @@ void test_filter_on_multiple_opensearchfunctions_with_trackedscores_push_down() .should( QueryBuilders.queryStringQuery("QUERY") .field("intV", 1.5F) - .boost(12.5F) - ) - ), - withTrackedScoresPushedDown(true) - ), - DSL.named("i", DSL.ref("intV", INTEGER)) - ); - FunctionExpression firstQueryString = DSL.query_string( - DSL.namedArgument("fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "intV", ExprValueUtils.floatValue(1.5F)))))), - DSL.namedArgument("query", "QUERY"), - DSL.namedArgument("boost", "12.5")); + .boost(12.5F))), + withTrackedScoresPushedDown(true)), + DSL.named("i", DSL.ref("intV", INTEGER))); + FunctionExpression firstQueryString = + DSL.query_string( + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of("intV", ExprValueUtils.floatValue(1.5F)))))), + DSL.namedArgument("query", "QUERY"), + DSL.namedArgument("boost", "12.5")); ((OpenSearchFunctions.OpenSearchFunction) firstQueryString).setScoreTracked(false); - FunctionExpression secondQueryString = DSL.query_string( - DSL.namedArgument("fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "intV", ExprValueUtils.floatValue(1.5F)))))), - DSL.namedArgument("query", "QUERY"), - DSL.namedArgument("boost", "12.5")); + FunctionExpression secondQueryString = + DSL.query_string( + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of("intV", ExprValueUtils.floatValue(1.5F)))))), + DSL.namedArgument("query", "QUERY"), + DSL.namedArgument("boost", "12.5")); ((OpenSearchFunctions.OpenSearchFunction) secondQueryString).setScoreTracked(true); - LogicalPlan logicalPlan = project( - filter( - relation("schema", table), - DSL.or(firstQueryString, secondQueryString) - ), - DSL.named("i", DSL.ref("intV", INTEGER)) - ); + LogicalPlan logicalPlan = + project( + filter(relation("schema", table), DSL.or(firstQueryString, secondQueryString)), + DSL.named("i", DSL.ref("intV", INTEGER))); assertEqualsAfterOptimization(expectedPlan, logicalPlan); } @@ -235,34 +214,28 @@ void test_filter_on_opensearchfunction_without_trackedscores_push_down() { project( indexScanBuilder( withFilterPushedDown( - QueryBuilders.queryStringQuery("QUERY") - .field("intV", 1.5F) - .boost(12.5F) - ), - withTrackedScoresPushedDown(false) - ), - DSL.named("i", DSL.ref("intV", INTEGER)) - ); - FunctionExpression queryString = DSL.query_string( - DSL.namedArgument("fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "intV", ExprValueUtils.floatValue(1.5F)))))), - DSL.namedArgument("query", "QUERY"), - DSL.namedArgument("boost", "12.5")); - - LogicalPlan logicalPlan = project( - filter( - relation("schema", table), - queryString - ), - DSL.named("i", DSL.ref("intV", INTEGER)) - ); + QueryBuilders.queryStringQuery("QUERY").field("intV", 1.5F).boost(12.5F)), + withTrackedScoresPushedDown(false)), + DSL.named("i", DSL.ref("intV", INTEGER))); + FunctionExpression queryString = + DSL.query_string( + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of("intV", ExprValueUtils.floatValue(1.5F)))))), + DSL.namedArgument("query", "QUERY"), + DSL.namedArgument("boost", "12.5")); + + LogicalPlan logicalPlan = + project( + filter(relation("schema", table), queryString), + DSL.named("i", DSL.ref("intV", INTEGER))); assertEqualsAfterOptimization(expectedPlan, logicalPlan); } - /** - * SELECT avg(intV) FROM schema GROUP BY string_value. - */ + /** SELECT avg(intV) FROM schema GROUP BY string_value. */ @Test void test_aggregation_push_down() { assertEqualsAfterOptimization( @@ -272,20 +245,17 @@ void test_aggregation_push_down() { aggregate("AVG(intV)") .aggregateBy("intV") .groupBy("longV") - .resultTypes(Map.of( - "AVG(intV)", DOUBLE, - "longV", LONG)))), + .resultTypes( + Map.of( + "AVG(intV)", DOUBLE, + "longV", LONG)))), DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE))), project( aggregation( relation("schema", table), - ImmutableList - .of(DSL.named("AVG(intV)", - DSL.avg(DSL.ref("intV", INTEGER)))), + ImmutableList.of(DSL.named("AVG(intV)", DSL.avg(DSL.ref("intV", INTEGER)))), ImmutableList.of(DSL.named("longV", DSL.ref("longV", LONG)))), - DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE)) - ) - ); + DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE)))); } /* @@ -319,125 +289,79 @@ void aggregation_cant_merge_indexScan_with_project() { } */ - /** - * Sort - Relation --> IndexScan. - */ + /** Sort - Relation --> IndexScan. */ @Test void test_sort_push_down() { assertEqualsAfterOptimization( indexScanBuilder( withSortPushedDown( - SortBuilders.fieldSort("intV").order(SortOrder.ASC).missing("_first")) - ), - sort( - relation("schema", table), - Pair.of(SortOption.DEFAULT_ASC, DSL.ref("intV", INTEGER)) - ) - ); + SortBuilders.fieldSort("intV").order(SortOrder.ASC).missing("_first"))), + sort(relation("schema", table), Pair.of(SortOption.DEFAULT_ASC, DSL.ref("intV", INTEGER)))); } @Test void test_page_push_down() { assertEqualsAfterOptimization( project( - indexScanBuilder( - withPageSizePushDown(5)), - DSL.named("intV", DSL.ref("intV", INTEGER)) - ), - paginate(project( - relation("schema", table), - DSL.named("intV", DSL.ref("intV", INTEGER)) - ), 5 - )); + indexScanBuilder(withPageSizePushDown(5)), DSL.named("intV", DSL.ref("intV", INTEGER))), + paginate( + project(relation("schema", table), DSL.named("intV", DSL.ref("intV", INTEGER))), 5)); } @Test void test_score_sort_push_down() { assertEqualsAfterOptimization( - indexScanBuilder( - withSortPushedDown( - SortBuilders.scoreSort().order(SortOrder.ASC) - ) - ), + indexScanBuilder(withSortPushedDown(SortBuilders.scoreSort().order(SortOrder.ASC))), sort( relation("schema", table), - Pair.of(SortOption.DEFAULT_ASC, DSL.ref("_score", INTEGER)) - ) - ); + Pair.of(SortOption.DEFAULT_ASC, DSL.ref("_score", INTEGER)))); } @Test void test_limit_push_down() { assertEqualsAfterOptimization( project( - indexScanBuilder( - withLimitPushedDown(1, 1)), - DSL.named("intV", DSL.ref("intV", INTEGER)) - ), + indexScanBuilder(withLimitPushedDown(1, 1)), + DSL.named("intV", DSL.ref("intV", INTEGER))), project( - limit( - relation("schema", table), - 1, 1), - DSL.named("intV", DSL.ref("intV", INTEGER)) - ) - ); + limit(relation("schema", table), 1, 1), DSL.named("intV", DSL.ref("intV", INTEGER)))); } @Test void test_highlight_push_down() { assertEqualsAfterOptimization( project( - indexScanBuilder( - withHighlightPushedDown("*", Collections.emptyMap())), - DSL.named("highlight(*)", - new HighlightExpression(DSL.literal("*"))) - ), + indexScanBuilder(withHighlightPushedDown("*", Collections.emptyMap())), + DSL.named("highlight(*)", new HighlightExpression(DSL.literal("*")))), project( - highlight( - relation("schema", table), - DSL.literal("*"), Collections.emptyMap()), - DSL.named("highlight(*)", - new HighlightExpression(DSL.literal("*"))) - ) - ); + highlight(relation("schema", table), DSL.literal("*"), Collections.emptyMap()), + DSL.named("highlight(*)", new HighlightExpression(DSL.literal("*"))))); } @Test void test_nested_push_down() { - List> args = List.of( - Map.of( - "field", new ReferenceExpression("message.info", STRING), - "path", new ReferenceExpression("message", STRING) - ) - ); + List> args = + List.of( + Map.of( + "field", new ReferenceExpression("message.info", STRING), + "path", new ReferenceExpression("message", STRING))); List projectList = List.of( - new NamedExpression("message.info", DSL.nested(DSL.ref("message.info", STRING)), null) - ); + new NamedExpression("message.info", DSL.nested(DSL.ref("message.info", STRING)), null)); LogicalNested nested = new LogicalNested(null, args, projectList); assertEqualsAfterOptimization( project( - nested( - indexScanBuilder( - withNestedPushedDown(nested.getFields())), args, projectList), - DSL.named("message.info", - DSL.nested(DSL.ref("message.info", STRING))) - ), - project( - nested( - relation("schema", table), args, projectList), - DSL.named("message.info", - DSL.nested(DSL.ref("message.info", STRING))) - ) - ); + nested(indexScanBuilder(withNestedPushedDown(nested.getFields())), args, projectList), + DSL.named("message.info", DSL.nested(DSL.ref("message.info", STRING)))), + project( + nested(relation("schema", table), args, projectList), + DSL.named("message.info", DSL.nested(DSL.ref("message.info", STRING))))); } - /** - * SELECT avg(intV) FROM schema WHERE intV = 1 GROUP BY string_value. - */ + /** SELECT avg(intV) FROM schema WHERE intV = 1 GROUP BY string_value. */ @Test void test_aggregation_filter_push_down() { assertEqualsAfterOptimization( @@ -448,50 +372,37 @@ void test_aggregation_filter_push_down() { aggregate("AVG(intV)") .aggregateBy("intV") .groupBy("longV") - .resultTypes(Map.of( - "AVG(intV)", DOUBLE, - "longV", LONG)))), - DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE)) - ), + .resultTypes( + Map.of( + "AVG(intV)", DOUBLE, + "longV", LONG)))), + DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE))), project( aggregation( filter( relation("schema", table), - DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1))) - ), - ImmutableList - .of(DSL.named("AVG(intV)", - DSL.avg(DSL.ref("intV", INTEGER)))), + DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1)))), + ImmutableList.of(DSL.named("AVG(intV)", DSL.avg(DSL.ref("intV", INTEGER)))), ImmutableList.of(DSL.named("longV", DSL.ref("longV", LONG)))), - DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE)) - ) - ); + DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE)))); } - /** - * Sort - Filter - Relation --> IndexScan. - */ + /** Sort - Filter - Relation --> IndexScan. */ @Test void test_sort_filter_push_down() { assertEqualsAfterOptimization( indexScanBuilder( withFilterPushedDown(QueryBuilders.termQuery("intV", 1)), withSortPushedDown( - SortBuilders.fieldSort("longV").order(SortOrder.ASC).missing("_first")) - ), + SortBuilders.fieldSort("longV").order(SortOrder.ASC).missing("_first"))), sort( filter( relation("schema", table), - DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1))) - ), - Pair.of(SortOption.DEFAULT_ASC, DSL.ref("longV", LONG)) - ) - ); + DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1)))), + Pair.of(SortOption.DEFAULT_ASC, DSL.ref("longV", LONG)))); } - /** - * SELECT avg(intV) FROM schema GROUP BY stringV ORDER BY stringV. - */ + /** SELECT avg(intV) FROM schema GROUP BY stringV ORDER BY stringV. */ @Test void test_sort_aggregation_push_down() { assertEqualsAfterOptimization( @@ -502,22 +413,19 @@ void test_sort_aggregation_push_down() { .aggregateBy("intV") .groupBy("stringV") .sortBy(SortOption.DEFAULT_DESC) - .resultTypes(Map.of( - "AVG(intV)", DOUBLE, - "stringV", STRING)))), + .resultTypes( + Map.of( + "AVG(intV)", DOUBLE, + "stringV", STRING)))), DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE))), project( sort( aggregation( relation("schema", table), - ImmutableList - .of(DSL.named("AVG(intV)", DSL.avg(DSL.ref("intV", INTEGER)))), + ImmutableList.of(DSL.named("AVG(intV)", DSL.avg(DSL.ref("intV", INTEGER)))), ImmutableList.of(DSL.named("stringV", DSL.ref("stringV", STRING)))), - Pair.of(SortOption.DEFAULT_DESC, DSL.ref("stringV", STRING)) - ), - DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE)) - ) - ); + Pair.of(SortOption.DEFAULT_DESC, DSL.ref("stringV", STRING))), + DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE)))); } @Test @@ -529,21 +437,17 @@ void test_limit_sort_filter_push_down() { withSortPushedDown( SortBuilders.fieldSort("longV").order(SortOrder.ASC).missing("_first")), withLimitPushedDown(1, 1)), - DSL.named("intV", DSL.ref("intV", INTEGER)) - ), + DSL.named("intV", DSL.ref("intV", INTEGER))), project( limit( sort( filter( relation("schema", table), - DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1))) - ), - Pair.of(SortOption.DEFAULT_ASC, DSL.ref("longV", LONG)) - ), 1, 1 - ), - DSL.named("intV", DSL.ref("intV", INTEGER)) - ) - ); + DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1)))), + Pair.of(SortOption.DEFAULT_ASC, DSL.ref("longV", LONG))), + 1, + 1), + DSL.named("intV", DSL.ref("intV", INTEGER)))); } /* @@ -557,23 +461,16 @@ void only_one_project_should_be_push() { project( project( indexScanBuilder( - withProjectPushedDown( - DSL.ref("intV", INTEGER), - DSL.ref("stringV", STRING))), + withProjectPushedDown(DSL.ref("intV", INTEGER), DSL.ref("stringV", STRING))), DSL.named("i", DSL.ref("intV", INTEGER)), - DSL.named("s", DSL.ref("stringV", STRING)) - ), - DSL.named("i", DSL.ref("intV", INTEGER)) - ), + DSL.named("s", DSL.ref("stringV", STRING))), + DSL.named("i", DSL.ref("intV", INTEGER))), project( project( relation("schema", table), DSL.named("i", DSL.ref("intV", INTEGER)), - DSL.named("s", DSL.ref("stringV", STRING)) - ), - DSL.named("i", DSL.ref("intV", INTEGER)) - ) - ); + DSL.named("s", DSL.ref("stringV", STRING))), + DSL.named("i", DSL.ref("intV", INTEGER)))); } @Test @@ -586,21 +483,14 @@ void test_nested_sort_filter_push_down() { SortBuilders.fieldSort("message.info") .order(SortOrder.ASC) .setNestedSort(new NestedSortBuilder("message")))), - DSL.named("intV", DSL.ref("intV", INTEGER)) - ), + DSL.named("intV", DSL.ref("intV", INTEGER))), project( - sort( - filter( - relation("schema", table), - DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1))) - ), - Pair.of( - SortOption.DEFAULT_ASC, DSL.nested(DSL.ref("message.info", STRING)) - ) - ), - DSL.named("intV", DSL.ref("intV", INTEGER)) - ) - ); + sort( + filter( + relation("schema", table), + DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(integerValue(1)))), + Pair.of(SortOption.DEFAULT_ASC, DSL.nested(DSL.ref("message.info", STRING)))), + DSL.named("intV", DSL.ref("intV", INTEGER)))); } @Test @@ -610,54 +500,30 @@ void test_function_expression_sort_returns_optimized_logical_sort() { sort( indexScanBuilder(), Pair.of( - SortOption.DEFAULT_ASC, - DSL.match(DSL.namedArgument("field", literal("message"))) - ) - ), + SortOption.DEFAULT_ASC, DSL.match(DSL.namedArgument("field", literal("message"))))), sort( relation("schema", table), Pair.of( SortOption.DEFAULT_ASC, - DSL.match(DSL.namedArgument("field", literal("message")) - ) - ) - ) - ); + DSL.match(DSL.namedArgument("field", literal("message")))))); } @Test void test_non_field_sort_returns_optimized_logical_sort() { // Invalid use case coverage OpenSearchIndexScanBuilder::sortByFieldsOnly returns false assertEqualsAfterOptimization( - sort( - indexScanBuilder(), - Pair.of( - SortOption.DEFAULT_ASC, - DSL.literal("field") - ) - ), - sort( - relation("schema", table), - Pair.of( - SortOption.DEFAULT_ASC, - DSL.literal("field") - ) - ) - ); + sort(indexScanBuilder(), Pair.of(SortOption.DEFAULT_ASC, DSL.literal("field"))), + sort(relation("schema", table), Pair.of(SortOption.DEFAULT_ASC, DSL.literal("field")))); } @Test void sort_with_expression_cannot_merge_with_relation() { assertEqualsAfterOptimization( sort( - indexScanBuilder(), - Pair.of(SortOption.DEFAULT_ASC, DSL.abs(DSL.ref("intV", INTEGER))) - ), + indexScanBuilder(), Pair.of(SortOption.DEFAULT_ASC, DSL.abs(DSL.ref("intV", INTEGER)))), sort( relation("schema", table), - Pair.of(SortOption.DEFAULT_ASC, DSL.abs(DSL.ref("intV", INTEGER))) - ) - ); + Pair.of(SortOption.DEFAULT_ASC, DSL.abs(DSL.ref("intV", INTEGER))))); } @Test @@ -669,20 +535,17 @@ void sort_with_expression_cannot_merge_with_aggregation() { aggregate("AVG(intV)") .aggregateBy("intV") .groupBy("stringV") - .resultTypes(Map.of( - "AVG(intV)", DOUBLE, - "stringV", STRING)))), - Pair.of(SortOption.DEFAULT_ASC, DSL.abs(DSL.ref("intV", INTEGER))) - ), + .resultTypes( + Map.of( + "AVG(intV)", DOUBLE, + "stringV", STRING)))), + Pair.of(SortOption.DEFAULT_ASC, DSL.abs(DSL.ref("intV", INTEGER)))), sort( aggregation( relation("schema", table), - ImmutableList - .of(DSL.named("AVG(intV)", DSL.avg(DSL.ref("intV", INTEGER)))), + ImmutableList.of(DSL.named("AVG(intV)", DSL.avg(DSL.ref("intV", INTEGER)))), ImmutableList.of(DSL.named("stringV", DSL.ref("stringV", STRING)))), - Pair.of(SortOption.DEFAULT_ASC, DSL.abs(DSL.ref("intV", INTEGER))) - ) - ); + Pair.of(SortOption.DEFAULT_ASC, DSL.abs(DSL.ref("intV", INTEGER))))); } @Test @@ -690,30 +553,21 @@ void aggregation_cant_merge_index_scan_with_limit() { assertEqualsAfterOptimization( project( aggregation( - indexScanBuilder( - withLimitPushedDown(10, 0)), - ImmutableList - .of(DSL.named("AVG(intV)", - DSL.avg(DSL.ref("intV", INTEGER)))), - ImmutableList.of(DSL.named("longV", - DSL.abs(DSL.ref("longV", LONG))))), + indexScanBuilder(withLimitPushedDown(10, 0)), + ImmutableList.of(DSL.named("AVG(intV)", DSL.avg(DSL.ref("intV", INTEGER)))), + ImmutableList.of(DSL.named("longV", DSL.abs(DSL.ref("longV", LONG))))), DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE))), project( aggregation( - limit( - relation("schema", table), - 10, 0), - ImmutableList - .of(DSL.named("AVG(intV)", - DSL.avg(DSL.ref("intV", INTEGER)))), - ImmutableList.of(DSL.named("longV", - DSL.abs(DSL.ref("longV", LONG))))), + limit(relation("schema", table), 10, 0), + ImmutableList.of(DSL.named("AVG(intV)", DSL.avg(DSL.ref("intV", INTEGER)))), + ImmutableList.of(DSL.named("longV", DSL.abs(DSL.ref("longV", LONG))))), DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE)))); } /** - * Can't Optimize the following query. - * SELECT avg(intV) FROM schema GROUP BY stringV ORDER BY avg(intV). + * Can't Optimize the following query. SELECT avg(intV) FROM schema GROUP BY stringV ORDER BY + * avg(intV). */ @Test void sort_refer_to_aggregator_should_not_merge_with_indexAgg() { @@ -725,52 +579,39 @@ void sort_refer_to_aggregator_should_not_merge_with_indexAgg() { aggregate("AVG(intV)") .aggregateBy("intV") .groupBy("stringV") - .resultTypes(Map.of( - "AVG(intV)", DOUBLE, - "stringV", STRING)))), - Pair.of(SortOption.DEFAULT_ASC, DSL.ref("AVG(intV)", INTEGER)) - ), + .resultTypes( + Map.of( + "AVG(intV)", DOUBLE, + "stringV", STRING)))), + Pair.of(SortOption.DEFAULT_ASC, DSL.ref("AVG(intV)", INTEGER))), DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE))), project( sort( aggregation( relation("schema", table), - ImmutableList - .of(DSL.named("AVG(intV)", DSL.avg(DSL.ref("intV", INTEGER)))), + ImmutableList.of(DSL.named("AVG(intV)", DSL.avg(DSL.ref("intV", INTEGER)))), ImmutableList.of(DSL.named("stringV", DSL.ref("stringV", STRING)))), - Pair.of(SortOption.DEFAULT_ASC, DSL.ref("AVG(intV)", INTEGER)) - ), - DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE)) - ) - ); + Pair.of(SortOption.DEFAULT_ASC, DSL.ref("AVG(intV)", INTEGER))), + DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE)))); } @Test void project_literal_should_not_be_pushed_down() { assertEqualsAfterOptimization( - project( - indexScanBuilder(), - DSL.named("i", DSL.literal("str")) - ), - optimize( - project( - relation("schema", table), - DSL.named("i", DSL.literal("str")) - ) - ) - ); + project(indexScanBuilder(), DSL.named("i", DSL.literal("str"))), + optimize(project(relation("schema", table), DSL.named("i", DSL.literal("str"))))); } private OpenSearchIndexScanBuilder indexScanBuilder(Runnable... verifyPushDownCalls) { this.verifyPushDownCalls = verifyPushDownCalls; - return new OpenSearchIndexScanBuilder(new OpenSearchIndexScanQueryBuilder(requestBuilder), - requestBuilder -> indexScan); + return new OpenSearchIndexScanBuilder( + new OpenSearchIndexScanQueryBuilder(requestBuilder), requestBuilder -> indexScan); } private OpenSearchIndexScanBuilder indexScanAggBuilder(Runnable... verifyPushDownCalls) { this.verifyPushDownCalls = verifyPushDownCalls; - var aggregationBuilder = new OpenSearchIndexScanAggregationBuilder( - requestBuilder, mock(LogicalAggregation.class)); + var aggregationBuilder = + new OpenSearchIndexScanAggregationBuilder(requestBuilder, mock(LogicalAggregation.class)); return new OpenSearchIndexScanBuilder(aggregationBuilder, builder -> indexScan); } @@ -797,29 +638,32 @@ private Runnable withAggregationPushedDown( AggregationAssertHelper.AggregationAssertHelperBuilder aggregation) { // Assume single term bucket and AVG metric in all tests in this suite - CompositeAggregationBuilder aggBuilder = AggregationBuilders.composite( - "composite_buckets", - Collections.singletonList( - new TermsValuesSourceBuilder(aggregation.groupBy) - .field(aggregation.groupBy) - .order(aggregation.sortBy.getSortOrder() == ASC ? "asc" : "desc") - .missingOrder(aggregation.sortBy.getNullOrder() == NULL_FIRST ? "first" : "last") - .missingBucket(true))) - .subAggregation( - AggregationBuilders.avg(aggregation.aggregateName) - .field(aggregation.aggregateBy)) - .size(AggregationQueryBuilder.AGGREGATION_BUCKET_SIZE); + CompositeAggregationBuilder aggBuilder = + AggregationBuilders.composite( + "composite_buckets", + Collections.singletonList( + new TermsValuesSourceBuilder(aggregation.groupBy) + .field(aggregation.groupBy) + .order(aggregation.sortBy.getSortOrder() == ASC ? "asc" : "desc") + .missingOrder( + aggregation.sortBy.getNullOrder() == NULL_FIRST ? "first" : "last") + .missingBucket(true))) + .subAggregation( + AggregationBuilders.avg(aggregation.aggregateName).field(aggregation.aggregateBy)) + .size(AggregationQueryBuilder.AGGREGATION_BUCKET_SIZE); List aggBuilders = Collections.singletonList(aggBuilder); OpenSearchAggregationResponseParser responseParser = - new CompositeAggregationParser( - new SingleValueParser(aggregation.aggregateName)); + new CompositeAggregationParser(new SingleValueParser(aggregation.aggregateName)); return () -> { verify(requestBuilder, times(1)).pushDownAggregation(Pair.of(aggBuilders, responseParser)); - verify(requestBuilder, times(1)).pushTypeMapping(aggregation.resultTypes - .entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, - e -> OpenSearchDataType.of(e.getValue())))); + verify(requestBuilder, times(1)) + .pushTypeMapping( + aggregation.resultTypes.entrySet().stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, e -> OpenSearchDataType.of(e.getValue())))); }; } @@ -832,8 +676,8 @@ private Runnable withLimitPushedDown(int size, int offset) { } private Runnable withProjectPushedDown(ReferenceExpression... references) { - return () -> verify(requestBuilder, times(1)).pushDownProjects( - new HashSet<>(Arrays.asList(references))); + return () -> + verify(requestBuilder, times(1)).pushDownProjects(new HashSet<>(Arrays.asList(references))); } private Runnable withHighlightPushedDown(String field, Map arguments) { @@ -875,16 +719,18 @@ private static class AggregationAssertHelper { } private LogicalPlan optimize(LogicalPlan plan) { - LogicalPlanOptimizer optimizer = new LogicalPlanOptimizer(List.of( - new CreateTableScanBuilder(), - new PushDownPageSize(), - PUSH_DOWN_FILTER, - PUSH_DOWN_AGGREGATION, - PUSH_DOWN_SORT, - PUSH_DOWN_LIMIT, - PUSH_DOWN_HIGHLIGHT, - PUSH_DOWN_NESTED, - PUSH_DOWN_PROJECT)); + LogicalPlanOptimizer optimizer = + new LogicalPlanOptimizer( + List.of( + new CreateTableScanBuilder(), + new PushDownPageSize(), + PUSH_DOWN_FILTER, + PUSH_DOWN_AGGREGATION, + PUSH_DOWN_SORT, + PUSH_DOWN_LIMIT, + PUSH_DOWN_HIGHLIGHT, + PUSH_DOWN_NESTED, + PUSH_DOWN_PROJECT)); return optimizer.optimize(plan); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanPaginationTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanPaginationTest.java index 67f0869d6e..2085519b12 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanPaginationTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanPaginationTest.java @@ -44,34 +44,37 @@ @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) public class OpenSearchIndexScanPaginationTest { - public static final OpenSearchRequest.IndexName INDEX_NAME - = new OpenSearchRequest.IndexName("test"); + public static final OpenSearchRequest.IndexName INDEX_NAME = + new OpenSearchRequest.IndexName("test"); public static final int MAX_RESULT_WINDOW = 3; public static final TimeValue SCROLL_TIMEOUT = TimeValue.timeValueMinutes(4); - @Mock - private Settings settings; + @Mock private Settings settings; @BeforeEach void setup() { lenient().when(settings.getSettingValue(Settings.Key.QUERY_SIZE_LIMIT)).thenReturn(QUERY_SIZE); - lenient().when(settings.getSettingValue(Settings.Key.SQL_CURSOR_KEEP_ALIVE)) - .thenReturn(TimeValue.timeValueMinutes(1)); + lenient() + .when(settings.getSettingValue(Settings.Key.SQL_CURSOR_KEEP_ALIVE)) + .thenReturn(TimeValue.timeValueMinutes(1)); } - @Mock - private OpenSearchClient client; + @Mock private OpenSearchClient client; - private final OpenSearchExprValueFactory exprValueFactory - = new OpenSearchExprValueFactory(Map.of( - "name", OpenSearchDataType.of(STRING), - "department", OpenSearchDataType.of(STRING))); + private final OpenSearchExprValueFactory exprValueFactory = + new OpenSearchExprValueFactory( + Map.of( + "name", OpenSearchDataType.of(STRING), + "department", OpenSearchDataType.of(STRING))); @Test void query_empty_result() { mockResponse(client); var builder = new OpenSearchRequestBuilder(QUERY_SIZE, exprValueFactory); - try (var indexScan = new OpenSearchIndexScan(client, MAX_RESULT_WINDOW, - builder.build(INDEX_NAME, MAX_RESULT_WINDOW, SCROLL_TIMEOUT))) { + try (var indexScan = + new OpenSearchIndexScan( + client, + MAX_RESULT_WINDOW, + builder.build(INDEX_NAME, MAX_RESULT_WINDOW, SCROLL_TIMEOUT))) { indexScan.open(); assertFalse(indexScan.hasNext()); } @@ -80,8 +83,11 @@ void query_empty_result() { @Test void explain_not_implemented() { - assertThrows(Throwable.class, () -> mock(OpenSearchIndexScan.class, - withSettings().defaultAnswer(CALLS_REAL_METHODS)).explain()); + assertThrows( + Throwable.class, + () -> + mock(OpenSearchIndexScan.class, withSettings().defaultAnswer(CALLS_REAL_METHODS)) + .explain()); } @Test @@ -92,9 +98,11 @@ void dont_serialize_if_no_cursor() { OpenSearchResponse response = mock(); when(builder.build(any(), anyInt(), any())).thenReturn(request); when(client.search(any())).thenReturn(response); - try (var indexScan - = new OpenSearchIndexScan(client, MAX_RESULT_WINDOW, - builder.build(INDEX_NAME, MAX_RESULT_WINDOW, SCROLL_TIMEOUT))) { + try (var indexScan = + new OpenSearchIndexScan( + client, + MAX_RESULT_WINDOW, + builder.build(INDEX_NAME, MAX_RESULT_WINDOW, SCROLL_TIMEOUT))) { indexScan.open(); when(request.hasAnotherBatch()).thenReturn(false); diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanTest.java index 9a0957ce75..ac1e9038fb 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.scan; import static org.junit.jupiter.api.Assertions.assertAll; @@ -12,7 +11,6 @@ import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.lenient; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -67,20 +65,19 @@ class OpenSearchIndexScanTest { public static final int QUERY_SIZE = 200; - public static final OpenSearchRequest.IndexName INDEX_NAME - = new OpenSearchRequest.IndexName("employees"); + public static final OpenSearchRequest.IndexName INDEX_NAME = + new OpenSearchRequest.IndexName("employees"); public static final int MAX_RESULT_WINDOW = 10000; public static final TimeValue CURSOR_KEEP_ALIVE = TimeValue.timeValueMinutes(1); - @Mock - private OpenSearchClient client; + @Mock private OpenSearchClient client; - private final OpenSearchExprValueFactory exprValueFactory = new OpenSearchExprValueFactory( - Map.of("name", OpenSearchDataType.of(STRING), - "department", OpenSearchDataType.of(STRING))); + private final OpenSearchExprValueFactory exprValueFactory = + new OpenSearchExprValueFactory( + Map.of( + "name", OpenSearchDataType.of(STRING), "department", OpenSearchDataType.of(STRING))); @BeforeEach - void setup() { - } + void setup() {} @Test void explain() { @@ -97,8 +94,8 @@ void throws_no_cursor_exception() { var request = mock(OpenSearchRequest.class); when(request.hasAnotherBatch()).thenReturn(false); try (var indexScan = new OpenSearchIndexScan(client, QUERY_SIZE, request); - var byteStream = new ByteArrayOutputStream(); - var objectStream = new ObjectOutputStream(byteStream)) { + var byteStream = new ByteArrayOutputStream(); + var objectStream = new ObjectOutputStream(byteStream)) { assertThrows(NoCursorException.class, () -> objectStream.writeObject(indexScan)); } } @@ -113,8 +110,9 @@ void serialize() { var index = mock(OpenSearchIndex.class); when(engine.getClient()).thenReturn(client); when(engine.getTable(any(), any())).thenReturn(index); - var request = new OpenSearchScrollRequest( - INDEX_NAME, CURSOR_KEEP_ALIVE, searchSourceBuilder, factory, List.of()); + var request = + new OpenSearchScrollRequest( + INDEX_NAME, CURSOR_KEEP_ALIVE, searchSourceBuilder, factory, List.of()); request.setScrollId("valid-id"); // make a response, so OpenSearchResponse::isEmpty would return true and unset needClean var response = mock(SearchResponse.class); @@ -122,7 +120,7 @@ void serialize() { var hits = mock(SearchHits.class); when(response.getHits()).thenReturn(hits); when(response.getScrollId()).thenReturn("valid-id"); - when(hits.getHits()).thenReturn(new SearchHit[]{ mock() }); + when(hits.getHits()).thenReturn(new SearchHit[] {mock()}); request.search(null, (req) -> response); try (var indexScan = new OpenSearchIndexScan(client, QUERY_SIZE, request)) { @@ -146,8 +144,9 @@ void query_empty_result() { mockResponse(client); final var name = new OpenSearchRequest.IndexName("test"); final var requestBuilder = new OpenSearchRequestBuilder(QUERY_SIZE, exprValueFactory); - try (OpenSearchIndexScan indexScan = new OpenSearchIndexScan(client, - QUERY_SIZE, requestBuilder.build(name, MAX_RESULT_WINDOW, CURSOR_KEEP_ALIVE))) { + try (OpenSearchIndexScan indexScan = + new OpenSearchIndexScan( + client, QUERY_SIZE, requestBuilder.build(name, MAX_RESULT_WINDOW, CURSOR_KEEP_ALIVE))) { indexScan.open(); assertFalse(indexScan.hasNext()); } @@ -156,88 +155,84 @@ void query_empty_result() { @Test void query_all_results_with_query() { - mockResponse(client, new ExprValue[]{ - employee(1, "John", "IT"), - employee(2, "Smith", "HR"), - employee(3, "Allen", "IT")}); + mockResponse( + client, + new ExprValue[] { + employee(1, "John", "IT"), employee(2, "Smith", "HR"), employee(3, "Allen", "IT") + }); final var requestBuilder = new OpenSearchRequestBuilder(QUERY_SIZE, exprValueFactory); - try (OpenSearchIndexScan indexScan = new OpenSearchIndexScan(client, - 10, requestBuilder.build(INDEX_NAME, 10000, CURSOR_KEEP_ALIVE))) { + try (OpenSearchIndexScan indexScan = + new OpenSearchIndexScan( + client, 10, requestBuilder.build(INDEX_NAME, 10000, CURSOR_KEEP_ALIVE))) { indexScan.open(); assertAll( () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(1, "John", "IT"), indexScan.next()), - () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(2, "Smith", "HR"), indexScan.next()), - () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(3, "Allen", "IT"), indexScan.next()), - - () -> assertFalse(indexScan.hasNext()) - ); + () -> assertFalse(indexScan.hasNext())); } verify(client).cleanup(any()); } - static final OpenSearchRequest.IndexName EMPLOYEES_INDEX - = new OpenSearchRequest.IndexName("employees"); + static final OpenSearchRequest.IndexName EMPLOYEES_INDEX = + new OpenSearchRequest.IndexName("employees"); @Test void query_all_results_with_scroll() { - mockResponse(client, - new ExprValue[]{employee(1, "John", "IT"), employee(2, "Smith", "HR")}, - new ExprValue[]{employee(3, "Allen", "IT")}); + mockResponse( + client, + new ExprValue[] {employee(1, "John", "IT"), employee(2, "Smith", "HR")}, + new ExprValue[] {employee(3, "Allen", "IT")}); final var requestBuilder = new OpenSearchRequestBuilder(QUERY_SIZE, exprValueFactory); - try (OpenSearchIndexScan indexScan = new OpenSearchIndexScan(client, - 10, requestBuilder.build(INDEX_NAME, 10000, CURSOR_KEEP_ALIVE))) { + try (OpenSearchIndexScan indexScan = + new OpenSearchIndexScan( + client, 10, requestBuilder.build(INDEX_NAME, 10000, CURSOR_KEEP_ALIVE))) { indexScan.open(); assertAll( () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(1, "John", "IT"), indexScan.next()), - () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(2, "Smith", "HR"), indexScan.next()), - () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(3, "Allen", "IT"), indexScan.next()), - - () -> assertFalse(indexScan.hasNext()) - ); + () -> assertFalse(indexScan.hasNext())); } verify(client).cleanup(any()); } @Test void query_some_results_with_query() { - mockResponse(client, new ExprValue[]{ - employee(1, "John", "IT"), - employee(2, "Smith", "HR"), - employee(3, "Allen", "IT"), - employee(4, "Bob", "HR")}); + mockResponse( + client, + new ExprValue[] { + employee(1, "John", "IT"), + employee(2, "Smith", "HR"), + employee(3, "Allen", "IT"), + employee(4, "Bob", "HR") + }); final int limit = 3; OpenSearchRequestBuilder builder = new OpenSearchRequestBuilder(0, exprValueFactory); - try (OpenSearchIndexScan indexScan = new OpenSearchIndexScan(client, - limit, builder.build(INDEX_NAME, MAX_RESULT_WINDOW, CURSOR_KEEP_ALIVE))) { + try (OpenSearchIndexScan indexScan = + new OpenSearchIndexScan( + client, limit, builder.build(INDEX_NAME, MAX_RESULT_WINDOW, CURSOR_KEEP_ALIVE))) { indexScan.open(); assertAll( () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(1, "John", "IT"), indexScan.next()), - () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(2, "Smith", "HR"), indexScan.next()), - () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(3, "Allen", "IT"), indexScan.next()), - - () -> assertFalse(indexScan.hasNext()) - ); + () -> assertFalse(indexScan.hasNext())); } verify(client).cleanup(any()); } @@ -246,55 +241,56 @@ void query_some_results_with_query() { void query_some_results_with_scroll() { mockTwoPageResponse(client); final var requestuilder = new OpenSearchRequestBuilder(10, exprValueFactory); - try (OpenSearchIndexScan indexScan = new OpenSearchIndexScan(client, - 3, requestuilder.build(INDEX_NAME, MAX_RESULT_WINDOW, CURSOR_KEEP_ALIVE))) { + try (OpenSearchIndexScan indexScan = + new OpenSearchIndexScan( + client, 3, requestuilder.build(INDEX_NAME, MAX_RESULT_WINDOW, CURSOR_KEEP_ALIVE))) { indexScan.open(); assertAll( () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(1, "John", "IT"), indexScan.next()), - () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(2, "Smith", "HR"), indexScan.next()), - () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(3, "Allen", "IT"), indexScan.next()), - - () -> assertFalse(indexScan.hasNext()) - ); + () -> assertFalse(indexScan.hasNext())); } verify(client).cleanup(any()); } static void mockTwoPageResponse(OpenSearchClient client) { - mockResponse(client, - new ExprValue[]{employee(1, "John", "IT"), employee(2, "Smith", "HR")}, - new ExprValue[]{employee(3, "Allen", "IT"), employee(4, "Bob", "HR")}); + mockResponse( + client, + new ExprValue[] {employee(1, "John", "IT"), employee(2, "Smith", "HR")}, + new ExprValue[] {employee(3, "Allen", "IT"), employee(4, "Bob", "HR")}); } @Test void query_results_limited_by_query_size() { - mockResponse(client, new ExprValue[]{ - employee(1, "John", "IT"), - employee(2, "Smith", "HR"), - employee(3, "Allen", "IT"), - employee(4, "Bob", "HR")}); + mockResponse( + client, + new ExprValue[] { + employee(1, "John", "IT"), + employee(2, "Smith", "HR"), + employee(3, "Allen", "IT"), + employee(4, "Bob", "HR") + }); final int defaultQuerySize = 2; final var requestBuilder = new OpenSearchRequestBuilder(defaultQuerySize, exprValueFactory); - try (OpenSearchIndexScan indexScan = new OpenSearchIndexScan(client, - defaultQuerySize, requestBuilder.build(INDEX_NAME, QUERY_SIZE, CURSOR_KEEP_ALIVE))) { + try (OpenSearchIndexScan indexScan = + new OpenSearchIndexScan( + client, + defaultQuerySize, + requestBuilder.build(INDEX_NAME, QUERY_SIZE, CURSOR_KEEP_ALIVE))) { indexScan.open(); assertAll( () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(1, "John", "IT"), indexScan.next()), - () -> assertTrue(indexScan.hasNext()), () -> assertEquals(employee(2, "Smith", "HR"), indexScan.next()), - - () -> assertFalse(indexScan.hasNext()) - ); + () -> assertFalse(indexScan.hasNext())); } verify(client).cleanup(any()); } @@ -324,7 +320,8 @@ void push_down_highlight() { .pushDown(QueryBuilders.termQuery("name", "John")) .pushDownHighlight("Title", args) .pushDownHighlight("Body", args) - .shouldQueryHighlight(QueryBuilders.termQuery("name", "John"), + .shouldQueryHighlight( + QueryBuilders.termQuery("name", "John"), new HighlightBuilder().field("Title").field("Body")); } @@ -333,14 +330,12 @@ void push_down_highlight_with_arguments() { Map args = new HashMap<>(); args.put("pre_tags", new Literal("", DataType.STRING)); args.put("post_tags", new Literal("", DataType.STRING)); - HighlightBuilder highlightBuilder = new HighlightBuilder() - .field("Title"); + HighlightBuilder highlightBuilder = new HighlightBuilder().field("Title"); highlightBuilder.fields().get(0).preTags("").postTags(""); assertThat() .pushDown(QueryBuilders.termQuery("name", "John")) .pushDownHighlight("Title", args) - .shouldQueryHighlight(QueryBuilders.termQuery("name", "John"), - highlightBuilder); + .shouldQueryHighlight(QueryBuilders.termQuery("name", "John"), highlightBuilder); } private PushDownAssertion assertThat() { @@ -353,8 +348,7 @@ private static class PushDownAssertion { private final OpenSearchResponse response; private final OpenSearchExprValueFactory factory; - public PushDownAssertion(OpenSearchClient client, - OpenSearchExprValueFactory valueFactory) { + public PushDownAssertion(OpenSearchClient client, OpenSearchExprValueFactory valueFactory) { this.client = client; this.requestBuilder = new OpenSearchRequestBuilder(QUERY_SIZE, valueFactory); @@ -374,35 +368,39 @@ PushDownAssertion pushDownHighlight(String query, Map arguments } PushDownAssertion shouldQueryHighlight(QueryBuilder query, HighlightBuilder highlight) { - var sourceBuilder = new SearchSourceBuilder() - .from(0) - .timeout(CURSOR_KEEP_ALIVE) - .query(query) - .size(QUERY_SIZE) - .highlighter(highlight) - .sort(DOC_FIELD_NAME, ASC); + var sourceBuilder = + new SearchSourceBuilder() + .from(0) + .timeout(CURSOR_KEEP_ALIVE) + .query(query) + .size(QUERY_SIZE) + .highlighter(highlight) + .sort(DOC_FIELD_NAME, ASC); OpenSearchRequest request = new OpenSearchQueryRequest(EMPLOYEES_INDEX, sourceBuilder, factory, List.of()); when(client.search(request)).thenReturn(response); - var indexScan = new OpenSearchIndexScan(client, - QUERY_SIZE, requestBuilder.build(EMPLOYEES_INDEX, 10000, CURSOR_KEEP_ALIVE)); + var indexScan = + new OpenSearchIndexScan( + client, QUERY_SIZE, requestBuilder.build(EMPLOYEES_INDEX, 10000, CURSOR_KEEP_ALIVE)); indexScan.open(); return this; } PushDownAssertion shouldQuery(QueryBuilder expected) { - var builder = new SearchSourceBuilder() - .from(0) - .query(expected) - .size(QUERY_SIZE) - .timeout(CURSOR_KEEP_ALIVE) - .sort(DOC_FIELD_NAME, ASC); + var builder = + new SearchSourceBuilder() + .from(0) + .query(expected) + .size(QUERY_SIZE) + .timeout(CURSOR_KEEP_ALIVE) + .sort(DOC_FIELD_NAME, ASC); OpenSearchRequest request = new OpenSearchQueryRequest(EMPLOYEES_INDEX, builder, factory, List.of()); when(client.search(request)).thenReturn(response); - var indexScan = new OpenSearchIndexScan(client, - 10000, requestBuilder.build(EMPLOYEES_INDEX, 10000, CURSOR_KEEP_ALIVE)); + var indexScan = + new OpenSearchIndexScan( + client, 10000, requestBuilder.build(EMPLOYEES_INDEX, 10000, CURSOR_KEEP_ALIVE)); indexScan.open(); return this; } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/PushDownQueryBuilderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/PushDownQueryBuilderTest.java index 0b0568a6b7..5f233d7f45 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/PushDownQueryBuilderTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/PushDownQueryBuilderTest.java @@ -1,6 +1,5 @@ package org.opensearch.sql.opensearch.storage.scan; - import static org.junit.jupiter.api.Assertions.assertAll; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.mockito.Mockito.mock; @@ -21,12 +20,13 @@ class PushDownQueryBuilderTest { @Test void default_implementations() { - var sample = new PushDownQueryBuilder() { - @Override - public OpenSearchRequestBuilder build() { - return null; - } - }; + var sample = + new PushDownQueryBuilder() { + @Override + public OpenSearchRequestBuilder build() { + return null; + } + }; assertAll( () -> assertFalse(sample.pushDownFilter(mock(LogicalFilter.class))), () -> assertFalse(sample.pushDownProject(mock(LogicalProject.class))), @@ -34,9 +34,6 @@ public OpenSearchRequestBuilder build() { () -> assertFalse(sample.pushDownSort(mock(LogicalSort.class))), () -> assertFalse(sample.pushDownNested(mock(LogicalNested.class))), () -> assertFalse(sample.pushDownLimit(mock(LogicalLimit.class))), - () -> assertFalse(sample.pushDownPageSize(mock(LogicalPaginate.class))) - - ); + () -> assertFalse(sample.pushDownPageSize(mock(LogicalPaginate.class)))); } - } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/ExpressionScriptEngineTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/ExpressionScriptEngineTest.java index 3d497c2f5b..63710e57aa 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/ExpressionScriptEngineTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/ExpressionScriptEngineTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script; import static java.util.Collections.emptyMap; @@ -34,8 +33,7 @@ @ExtendWith(MockitoExtension.class) class ExpressionScriptEngineTest { - @Mock - private ExpressionSerializer serializer; + @Mock private ExpressionSerializer serializer; private ScriptEngine scriptEngine; @@ -55,19 +53,20 @@ void should_return_custom_script_language_name() { void can_initialize_filter_script_factory_by_compiled_script() { when(serializer.deserialize("test code")).thenReturn(expression); - assertThat(scriptEngine.getSupportedContexts(), + assertThat( + scriptEngine.getSupportedContexts(), contains(FilterScript.CONTEXT, AggregationScript.CONTEXT)); - Object actualFactory = scriptEngine.compile( - "test", "test code", FilterScript.CONTEXT, emptyMap()); + Object actualFactory = + scriptEngine.compile("test", "test code", FilterScript.CONTEXT, emptyMap()); assertEquals(new ExpressionFilterScriptFactory(expression), actualFactory); } @Test void should_throw_exception_for_unsupported_script_context() { ScriptContext unknownCtx = mock(ScriptContext.class); - assertThrows(IllegalStateException.class, () -> - scriptEngine.compile("test", "test code", unknownCtx, emptyMap())); + assertThrows( + IllegalStateException.class, + () -> scriptEngine.compile("test", "test code", unknownCtx, emptyMap())); } - } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilderTest.java index 03f5cc8b52..6485dce124 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilderTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilderTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.aggregation; import static org.hamcrest.MatcherAssert.assertThat; @@ -52,7 +51,6 @@ import org.opensearch.sql.expression.NamedExpression; import org.opensearch.sql.expression.aggregation.AvgAggregator; import org.opensearch.sql.expression.aggregation.CountAggregator; -import org.opensearch.sql.expression.aggregation.MaxAggregator; import org.opensearch.sql.expression.aggregation.NamedAggregator; import org.opensearch.sql.opensearch.data.type.OpenSearchDataType; import org.opensearch.sql.opensearch.data.type.OpenSearchDateType; @@ -62,8 +60,7 @@ @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) @ExtendWith(MockitoExtension.class) class AggregationQueryBuilderTest { - @Mock - private ExpressionSerializer serializer; + @Mock private ExpressionSerializer serializer; private AggregationQueryBuilder queryBuilder; @@ -74,31 +71,32 @@ void set_up() { @Test void should_build_composite_aggregation_for_field_reference() { - assertEquals(format( - "{%n" - + " \"composite_buckets\" : {%n" - + " \"composite\" : {%n" - + " \"size\" : 1000,%n" - + " \"sources\" : [ {%n" - + " \"name\" : {%n" - + " \"terms\" : {%n" - + " \"field\" : \"name\",%n" - + " \"missing_bucket\" : true,%n" - + " \"missing_order\" : \"first\",%n" - + " \"order\" : \"asc\"%n" - + " }%n" - + " }%n" - + " } ]%n" - + " },%n" - + " \"aggregations\" : {%n" - + " \"avg(age)\" : {%n" - + " \"avg\" : {%n" - + " \"field\" : \"age\"%n" - + " }%n" - + " }%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"composite_buckets\" : {%n" + + " \"composite\" : {%n" + + " \"size\" : 1000,%n" + + " \"sources\" : [ {%n" + + " \"name\" : {%n" + + " \"terms\" : {%n" + + " \"field\" : \"name\",%n" + + " \"missing_bucket\" : true,%n" + + " \"missing_order\" : \"first\",%n" + + " \"order\" : \"asc\"%n" + + " }%n" + + " }%n" + + " } ]%n" + + " },%n" + + " \"aggregations\" : {%n" + + " \"avg(age)\" : {%n" + + " \"avg\" : {%n" + + " \"field\" : \"age\"%n" + + " }%n" + + " }%n" + + " }%n" + + " }%n" + + "}"), buildQuery( Arrays.asList( named("avg(age)", new AvgAggregator(Arrays.asList(ref("age", INTEGER)), INTEGER))), @@ -107,388 +105,415 @@ void should_build_composite_aggregation_for_field_reference() { @Test void should_build_composite_aggregation_for_field_reference_with_order() { - assertEquals(format( - "{%n" - + " \"composite_buckets\" : {%n" - + " \"composite\" : {%n" - + " \"size\" : 1000,%n" - + " \"sources\" : [ {%n" - + " \"name\" : {%n" - + " \"terms\" : {%n" - + " \"field\" : \"name\",%n" - + " \"missing_bucket\" : true,%n" - + " \"missing_order\" : \"last\",%n" - + " \"order\" : \"desc\"%n" - + " }%n" - + " }%n" - + " } ]%n" - + " },%n" - + " \"aggregations\" : {%n" - + " \"avg(age)\" : {%n" - + " \"avg\" : {%n" - + " \"field\" : \"age\"%n" - + " }%n" - + " }%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"composite_buckets\" : {%n" + + " \"composite\" : {%n" + + " \"size\" : 1000,%n" + + " \"sources\" : [ {%n" + + " \"name\" : {%n" + + " \"terms\" : {%n" + + " \"field\" : \"name\",%n" + + " \"missing_bucket\" : true,%n" + + " \"missing_order\" : \"last\",%n" + + " \"order\" : \"desc\"%n" + + " }%n" + + " }%n" + + " } ]%n" + + " },%n" + + " \"aggregations\" : {%n" + + " \"avg(age)\" : {%n" + + " \"avg\" : {%n" + + " \"field\" : \"age\"%n" + + " }%n" + + " }%n" + + " }%n" + + " }%n" + + "}"), buildQuery( Arrays.asList( named("avg(age)", new AvgAggregator(Arrays.asList(ref("age", INTEGER)), INTEGER))), Arrays.asList(named("name", ref("name", STRING))), - sort(ref("name", STRING), Sort.SortOption.DEFAULT_DESC) - )); + sort(ref("name", STRING), Sort.SortOption.DEFAULT_DESC))); } @Test void should_build_type_mapping_for_field_reference() { assertThat( - buildTypeMapping(Arrays.asList( - named("avg(age)", new AvgAggregator(Arrays.asList(ref("age", INTEGER)), INTEGER))), + buildTypeMapping( + Arrays.asList( + named("avg(age)", new AvgAggregator(Arrays.asList(ref("age", INTEGER)), INTEGER))), Arrays.asList(named("name", ref("name", STRING)))), containsInAnyOrder( map("avg(age)", OpenSearchDataType.of(INTEGER)), - map("name", OpenSearchDataType.of(STRING)) - )); + map("name", OpenSearchDataType.of(STRING)))); } @Test void should_build_type_mapping_for_datetime_type() { assertThat( - buildTypeMapping(Arrays.asList( - named("avg(datetime)", + buildTypeMapping( + Arrays.asList( + named( + "avg(datetime)", new AvgAggregator(Arrays.asList(ref("datetime", DATETIME)), DATETIME))), Arrays.asList(named("datetime", ref("datetime", DATETIME)))), containsInAnyOrder( map("avg(datetime)", OpenSearchDateType.of(DATETIME)), - map("datetime", OpenSearchDateType.of(DATETIME)) - )); + map("datetime", OpenSearchDateType.of(DATETIME)))); } @Test void should_build_type_mapping_for_timestamp_type() { assertThat( - buildTypeMapping(Arrays.asList( - named("avg(timestamp)", + buildTypeMapping( + Arrays.asList( + named( + "avg(timestamp)", new AvgAggregator(Arrays.asList(ref("timestamp", TIMESTAMP)), TIMESTAMP))), Arrays.asList(named("timestamp", ref("timestamp", TIMESTAMP)))), containsInAnyOrder( map("avg(timestamp)", OpenSearchDateType.of()), - map("timestamp", OpenSearchDateType.of()) - )); + map("timestamp", OpenSearchDateType.of()))); } @Test void should_build_type_mapping_for_date_type() { assertThat( - buildTypeMapping(Arrays.asList( - named("avg(date)", - new AvgAggregator(Arrays.asList(ref("date", DATE)), DATE))), + buildTypeMapping( + Arrays.asList( + named("avg(date)", new AvgAggregator(Arrays.asList(ref("date", DATE)), DATE))), Arrays.asList(named("date", ref("date", DATE)))), containsInAnyOrder( map("avg(date)", OpenSearchDateType.of(DATE)), - map("date", OpenSearchDateType.of(DATE)) - )); + map("date", OpenSearchDateType.of(DATE)))); } @Test void should_build_type_mapping_for_time_type() { assertThat( - buildTypeMapping(Arrays.asList( - named("avg(time)", - new AvgAggregator(Arrays.asList(ref("time", TIME)), TIME))), + buildTypeMapping( + Arrays.asList( + named("avg(time)", new AvgAggregator(Arrays.asList(ref("time", TIME)), TIME))), Arrays.asList(named("time", ref("time", TIME)))), containsInAnyOrder( map("avg(time)", OpenSearchDateType.of(TIME)), - map("time", OpenSearchDateType.of(TIME)) - )); + map("time", OpenSearchDateType.of(TIME)))); } @Test void should_build_composite_aggregation_for_field_reference_of_keyword() { - assertEquals(format( - "{%n" - + " \"composite_buckets\" : {%n" - + " \"composite\" : {%n" - + " \"size\" : 1000,%n" - + " \"sources\" : [ {%n" - + " \"name\" : {%n" - + " \"terms\" : {%n" - + " \"field\" : \"name.keyword\",%n" - + " \"missing_bucket\" : true,%n" - + " \"missing_order\" : \"first\",%n" - + " \"order\" : \"asc\"%n" - + " }%n" - + " }%n" - + " } ]%n" - + " },%n" - + " \"aggregations\" : {%n" - + " \"avg(age)\" : {%n" - + " \"avg\" : {%n" - + " \"field\" : \"age\"%n" - + " }%n" - + " }%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"composite_buckets\" : {%n" + + " \"composite\" : {%n" + + " \"size\" : 1000,%n" + + " \"sources\" : [ {%n" + + " \"name\" : {%n" + + " \"terms\" : {%n" + + " \"field\" : \"name.keyword\",%n" + + " \"missing_bucket\" : true,%n" + + " \"missing_order\" : \"first\",%n" + + " \"order\" : \"asc\"%n" + + " }%n" + + " }%n" + + " } ]%n" + + " },%n" + + " \"aggregations\" : {%n" + + " \"avg(age)\" : {%n" + + " \"avg\" : {%n" + + " \"field\" : \"age\"%n" + + " }%n" + + " }%n" + + " }%n" + + " }%n" + + "}"), buildQuery( Arrays.asList( named("avg(age)", new AvgAggregator(Arrays.asList(ref("age", INTEGER)), INTEGER))), - Arrays.asList(named("name", ref("name", OpenSearchTextType.of(Map.of("words", - OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword)))))))); + Arrays.asList( + named( + "name", + ref( + "name", + OpenSearchTextType.of( + Map.of( + "words", + OpenSearchDataType.of( + OpenSearchDataType.MappingType.Keyword)))))))); } @Test void should_build_type_mapping_for_field_reference_of_keyword() { assertThat( - buildTypeMapping(Arrays.asList( - named("avg(age)", new AvgAggregator(Arrays.asList(ref("age", INTEGER)), INTEGER))), + buildTypeMapping( + Arrays.asList( + named("avg(age)", new AvgAggregator(Arrays.asList(ref("age", INTEGER)), INTEGER))), Arrays.asList(named("name", ref("name", STRING)))), containsInAnyOrder( map("avg(age)", OpenSearchDataType.of(INTEGER)), - map("name", OpenSearchDataType.of(STRING)) - )); + map("name", OpenSearchDataType.of(STRING)))); } @Test void should_build_composite_aggregation_for_expression() { - doAnswer(invocation -> { - Expression expr = invocation.getArgument(0); - return expr.toString(); - }).when(serializer).serialize(any()); - assertEquals(format( - "{%n" - + " \"composite_buckets\" : {%n" - + " \"composite\" : {%n" - + " \"size\" : 1000,%n" - + " \"sources\" : [ {%n" - + " \"age\" : {%n" - + " \"terms\" : {%n" - + " \"script\" : {%n" - + " \"source\" : \"asin(age)\",%n" - + " \"lang\" : \"opensearch_query_expression\"%n" - + " },%n" - + " \"missing_bucket\" : true,%n" - + " \"missing_order\" : \"first\",%n" - + " \"order\" : \"asc\"%n" - + " }%n" - + " }%n" - + " } ]%n" - + " },%n" - + " \"aggregations\" : {%n" - + " \"avg(balance)\" : {%n" - + " \"avg\" : {%n" - + " \"script\" : {%n" - + " \"source\" : \"abs(balance)\",%n" - + " \"lang\" : \"opensearch_query_expression\"%n" - + " }%n" - + " }%n" - + " }%n" - + " }%n" - + " }%n" - + "}"), + doAnswer( + invocation -> { + Expression expr = invocation.getArgument(0); + return expr.toString(); + }) + .when(serializer) + .serialize(any()); + assertEquals( + format( + "{%n" + + " \"composite_buckets\" : {%n" + + " \"composite\" : {%n" + + " \"size\" : 1000,%n" + + " \"sources\" : [ {%n" + + " \"age\" : {%n" + + " \"terms\" : {%n" + + " \"script\" : {%n" + + " \"source\" : \"asin(age)\",%n" + + " \"lang\" : \"opensearch_query_expression\"%n" + + " },%n" + + " \"missing_bucket\" : true,%n" + + " \"missing_order\" : \"first\",%n" + + " \"order\" : \"asc\"%n" + + " }%n" + + " }%n" + + " } ]%n" + + " },%n" + + " \"aggregations\" : {%n" + + " \"avg(balance)\" : {%n" + + " \"avg\" : {%n" + + " \"script\" : {%n" + + " \"source\" : \"abs(balance)\",%n" + + " \"lang\" : \"opensearch_query_expression\"%n" + + " }%n" + + " }%n" + + " }%n" + + " }%n" + + " }%n" + + "}"), buildQuery( Arrays.asList( - named("avg(balance)", new AvgAggregator( - Arrays.asList(DSL.abs(ref("balance", INTEGER))), INTEGER))), + named( + "avg(balance)", + new AvgAggregator(Arrays.asList(DSL.abs(ref("balance", INTEGER))), INTEGER))), Arrays.asList(named("age", DSL.asin(ref("age", INTEGER)))))); } @Test void should_build_composite_aggregation_follow_with_order_by_position() { - assertEquals(format( - "{%n" - + " \"composite_buckets\" : {%n" - + " \"composite\" : {%n" - + " \"size\" : 1000,%n" - + " \"sources\" : [ {%n" - + " \"name\" : {%n" - + " \"terms\" : {%n" - + " \"field\" : \"name\",%n" - + " \"missing_bucket\" : true,%n" - + " \"missing_order\" : \"last\",%n" - + " \"order\" : \"desc\"%n" - + " }%n" - + " }%n" - + " }, {%n" - + " \"age\" : {%n" - + " \"terms\" : {%n" - + " \"field\" : \"age\",%n" - + " \"missing_bucket\" : true,%n" - + " \"missing_order\" : \"first\",%n" - + " \"order\" : \"asc\"%n" - + " }%n" - + " }%n" - + " } ]%n" - + " },%n" - + " \"aggregations\" : {%n" - + " \"avg(balance)\" : {%n" - + " \"avg\" : {%n" - + " \"field\" : \"balance\"%n" - + " }%n" - + " }%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"composite_buckets\" : {%n" + + " \"composite\" : {%n" + + " \"size\" : 1000,%n" + + " \"sources\" : [ {%n" + + " \"name\" : {%n" + + " \"terms\" : {%n" + + " \"field\" : \"name\",%n" + + " \"missing_bucket\" : true,%n" + + " \"missing_order\" : \"last\",%n" + + " \"order\" : \"desc\"%n" + + " }%n" + + " }%n" + + " }, {%n" + + " \"age\" : {%n" + + " \"terms\" : {%n" + + " \"field\" : \"age\",%n" + + " \"missing_bucket\" : true,%n" + + " \"missing_order\" : \"first\",%n" + + " \"order\" : \"asc\"%n" + + " }%n" + + " }%n" + + " } ]%n" + + " },%n" + + " \"aggregations\" : {%n" + + " \"avg(balance)\" : {%n" + + " \"avg\" : {%n" + + " \"field\" : \"balance\"%n" + + " }%n" + + " }%n" + + " }%n" + + " }%n" + + "}"), buildQuery( agg(named("avg(balance)", avg(ref("balance", INTEGER), INTEGER))), group(named("age", ref("age", INTEGER)), named("name", ref("name", STRING))), - sort(ref("name", STRING), Sort.SortOption.DEFAULT_DESC, - ref("age", INTEGER), Sort.SortOption.DEFAULT_ASC) - )); + sort( + ref("name", STRING), + Sort.SortOption.DEFAULT_DESC, + ref("age", INTEGER), + Sort.SortOption.DEFAULT_ASC))); } @Test void should_build_type_mapping_for_expression() { assertThat( - buildTypeMapping(Arrays.asList( - named("avg(balance)", new AvgAggregator( - Arrays.asList(DSL.abs(ref("balance", INTEGER))), INTEGER))), + buildTypeMapping( + Arrays.asList( + named( + "avg(balance)", + new AvgAggregator(Arrays.asList(DSL.abs(ref("balance", INTEGER))), INTEGER))), Arrays.asList(named("age", DSL.asin(ref("age", INTEGER))))), containsInAnyOrder( map("avg(balance)", OpenSearchDataType.of(INTEGER)), - map("age", OpenSearchDataType.of(DOUBLE)) - )); + map("age", OpenSearchDataType.of(DOUBLE)))); } @Test void should_build_aggregation_without_bucket() { - assertEquals(format( - "{%n" - + " \"avg(balance)\" : {%n" - + " \"avg\" : {%n" - + " \"field\" : \"balance\"%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"avg(balance)\" : {%n" + + " \"avg\" : {%n" + + " \"field\" : \"balance\"%n" + + " }%n" + + " }%n" + + "}"), buildQuery( Arrays.asList( - named("avg(balance)", new AvgAggregator( - Arrays.asList(ref("balance", INTEGER)), INTEGER))), + named( + "avg(balance)", + new AvgAggregator(Arrays.asList(ref("balance", INTEGER)), INTEGER))), Collections.emptyList())); } @Test void should_build_filter_aggregation() { - assertEquals(format( - "{%n" - + " \"avg(age) filter(where age > 34)\" : {%n" - + " \"filter\" : {%n" - + " \"range\" : {%n" - + " \"age\" : {%n" - + " \"from\" : 20,%n" - + " \"to\" : null,%n" - + " \"include_lower\" : false,%n" - + " \"include_upper\" : true,%n" - + " \"boost\" : 1.0%n" - + " }%n" - + " }%n" - + " },%n" - + " \"aggregations\" : {%n" - + " \"avg(age) filter(where age > 34)\" : {%n" - + " \"avg\" : {%n" - + " \"field\" : \"age\"%n" - + " }%n" - + " }%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"avg(age) filter(where age > 34)\" : {%n" + + " \"filter\" : {%n" + + " \"range\" : {%n" + + " \"age\" : {%n" + + " \"from\" : 20,%n" + + " \"to\" : null,%n" + + " \"include_lower\" : false,%n" + + " \"include_upper\" : true,%n" + + " \"boost\" : 1.0%n" + + " }%n" + + " }%n" + + " },%n" + + " \"aggregations\" : {%n" + + " \"avg(age) filter(where age > 34)\" : {%n" + + " \"avg\" : {%n" + + " \"field\" : \"age\"%n" + + " }%n" + + " }%n" + + " }%n" + + " }%n" + + "}"), buildQuery( - Arrays.asList(named("avg(age) filter(where age > 34)", - new AvgAggregator(Arrays.asList(ref("age", INTEGER)), INTEGER) - .condition(DSL.greater(ref("age", INTEGER), literal(20))))), + Arrays.asList( + named( + "avg(age) filter(where age > 34)", + new AvgAggregator(Arrays.asList(ref("age", INTEGER)), INTEGER) + .condition(DSL.greater(ref("age", INTEGER), literal(20))))), Collections.emptyList())); } @Test void should_build_filter_aggregation_group_by() { - assertEquals(format( - "{%n" - + " \"composite_buckets\" : {%n" - + " \"composite\" : {%n" - + " \"size\" : 1000,%n" - + " \"sources\" : [ {%n" - + " \"gender\" : {%n" - + " \"terms\" : {%n" - + " \"field\" : \"gender\",%n" - + " \"missing_bucket\" : true,%n" - + " \"missing_order\" : \"first\",%n" - + " \"order\" : \"asc\"%n" - + " }%n" - + " }%n" - + " } ]%n" - + " },%n" - + " \"aggregations\" : {%n" - + " \"avg(age) filter(where age > 34)\" : {%n" - + " \"filter\" : {%n" - + " \"range\" : {%n" - + " \"age\" : {%n" - + " \"from\" : 20,%n" - + " \"to\" : null,%n" - + " \"include_lower\" : false,%n" - + " \"include_upper\" : true,%n" - + " \"boost\" : 1.0%n" - + " }%n" - + " }%n" - + " },%n" - + " \"aggregations\" : {%n" - + " \"avg(age) filter(where age > 34)\" : {%n" - + " \"avg\" : {%n" - + " \"field\" : \"age\"%n" - + " }%n" - + " }%n" - + " }%n" - + " }%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"composite_buckets\" : {%n" + + " \"composite\" : {%n" + + " \"size\" : 1000,%n" + + " \"sources\" : [ {%n" + + " \"gender\" : {%n" + + " \"terms\" : {%n" + + " \"field\" : \"gender\",%n" + + " \"missing_bucket\" : true,%n" + + " \"missing_order\" : \"first\",%n" + + " \"order\" : \"asc\"%n" + + " }%n" + + " }%n" + + " } ]%n" + + " },%n" + + " \"aggregations\" : {%n" + + " \"avg(age) filter(where age > 34)\" : {%n" + + " \"filter\" : {%n" + + " \"range\" : {%n" + + " \"age\" : {%n" + + " \"from\" : 20,%n" + + " \"to\" : null,%n" + + " \"include_lower\" : false,%n" + + " \"include_upper\" : true,%n" + + " \"boost\" : 1.0%n" + + " }%n" + + " }%n" + + " },%n" + + " \"aggregations\" : {%n" + + " \"avg(age) filter(where age > 34)\" : {%n" + + " \"avg\" : {%n" + + " \"field\" : \"age\"%n" + + " }%n" + + " }%n" + + " }%n" + + " }%n" + + " }%n" + + " }%n" + + "}"), buildQuery( - Arrays.asList(named("avg(age) filter(where age > 34)", - new AvgAggregator(Arrays.asList(ref("age", INTEGER)), INTEGER) - .condition(DSL.greater(ref("age", INTEGER), literal(20))))), + Arrays.asList( + named( + "avg(age) filter(where age > 34)", + new AvgAggregator(Arrays.asList(ref("age", INTEGER)), INTEGER) + .condition(DSL.greater(ref("age", INTEGER), literal(20))))), Arrays.asList(named(ref("gender", OpenSearchDataType.of(STRING)))))); } @Test void should_build_type_mapping_without_bucket() { assertThat( - buildTypeMapping(Arrays.asList( - named("avg(balance)", new AvgAggregator( - Arrays.asList(ref("balance", INTEGER)), INTEGER))), + buildTypeMapping( + Arrays.asList( + named( + "avg(balance)", + new AvgAggregator(Arrays.asList(ref("balance", INTEGER)), INTEGER))), Collections.emptyList()), - containsInAnyOrder( - map("avg(balance)", OpenSearchDataType.of(INTEGER)) - )); + containsInAnyOrder(map("avg(balance)", OpenSearchDataType.of(INTEGER)))); } @Test void should_build_histogram() { - assertEquals(format( - "{%n" - + " \"composite_buckets\" : {%n" - + " \"composite\" : {%n" - + " \"size\" : 1000,%n" - + " \"sources\" : [ {%n" - + " \"SpanExpression(field=age, value=10, unit=NONE)\" : {%n" - + " \"histogram\" : {%n" - + " \"field\" : \"age\",%n" - + " \"missing_bucket\" : true,%n" - + " \"missing_order\" : \"first\",%n" - + " \"order\" : \"asc\",%n" - + " \"interval\" : 10.0%n" - + " }%n" - + " }%n" - + " } ]%n" - + " },%n" - + " \"aggregations\" : {%n" - + " \"count(a)\" : {%n" - + " \"value_count\" : {%n" - + " \"field\" : \"a\"%n" - + " }%n" - + " }%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"composite_buckets\" : {%n" + + " \"composite\" : {%n" + + " \"size\" : 1000,%n" + + " \"sources\" : [ {%n" + + " \"SpanExpression(field=age, value=10, unit=NONE)\" : {%n" + + " \"histogram\" : {%n" + + " \"field\" : \"age\",%n" + + " \"missing_bucket\" : true,%n" + + " \"missing_order\" : \"first\",%n" + + " \"order\" : \"asc\",%n" + + " \"interval\" : 10.0%n" + + " }%n" + + " }%n" + + " } ]%n" + + " },%n" + + " \"aggregations\" : {%n" + + " \"count(a)\" : {%n" + + " \"value_count\" : {%n" + + " \"field\" : \"a\"%n" + + " }%n" + + " }%n" + + " }%n" + + " }%n" + + "}"), buildQuery( Arrays.asList( named("count(a)", new CountAggregator(Arrays.asList(ref("a", INTEGER)), INTEGER))), @@ -497,37 +522,38 @@ void should_build_histogram() { @Test void should_build_histogram_two_metrics() { - assertEquals(format( - "{%n" - + " \"composite_buckets\" : {%n" - + " \"composite\" : {%n" - + " \"size\" : 1000,%n" - + " \"sources\" : [ {%n" - + " \"SpanExpression(field=age, value=10, unit=NONE)\" : {%n" - + " \"histogram\" : {%n" - + " \"field\" : \"age\",%n" - + " \"missing_bucket\" : true,%n" - + " \"missing_order\" : \"first\",%n" - + " \"order\" : \"asc\",%n" - + " \"interval\" : 10.0%n" - + " }%n" - + " }%n" - + " } ]%n" - + " },%n" - + " \"aggregations\" : {%n" - + " \"count(a)\" : {%n" - + " \"value_count\" : {%n" - + " \"field\" : \"a\"%n" - + " }%n" - + " },%n" - + " \"avg(b)\" : {%n" - + " \"avg\" : {%n" - + " \"field\" : \"b\"%n" - + " }%n" - + " }%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"composite_buckets\" : {%n" + + " \"composite\" : {%n" + + " \"size\" : 1000,%n" + + " \"sources\" : [ {%n" + + " \"SpanExpression(field=age, value=10, unit=NONE)\" : {%n" + + " \"histogram\" : {%n" + + " \"field\" : \"age\",%n" + + " \"missing_bucket\" : true,%n" + + " \"missing_order\" : \"first\",%n" + + " \"order\" : \"asc\",%n" + + " \"interval\" : 10.0%n" + + " }%n" + + " }%n" + + " } ]%n" + + " },%n" + + " \"aggregations\" : {%n" + + " \"count(a)\" : {%n" + + " \"value_count\" : {%n" + + " \"field\" : \"a\"%n" + + " }%n" + + " },%n" + + " \"avg(b)\" : {%n" + + " \"avg\" : {%n" + + " \"field\" : \"b\"%n" + + " }%n" + + " }%n" + + " }%n" + + " }%n" + + "}"), buildQuery( Arrays.asList( named("count(a)", new CountAggregator(Arrays.asList(ref("a", INTEGER)), INTEGER)), @@ -537,32 +563,33 @@ void should_build_histogram_two_metrics() { @Test void fixed_interval_time_span() { - assertEquals(format( - "{%n" - + " \"composite_buckets\" : {%n" - + " \"composite\" : {%n" - + " \"size\" : 1000,%n" - + " \"sources\" : [ {%n" - + " \"SpanExpression(field=timestamp, value=1, unit=H)\" : {%n" - + " \"date_histogram\" : {%n" - + " \"field\" : \"timestamp\",%n" - + " \"missing_bucket\" : true,%n" - + " \"missing_order\" : \"first\",%n" - + " \"order\" : \"asc\",%n" - + " \"fixed_interval\" : \"1h\"%n" - + " }%n" - + " }%n" - + " } ]%n" - + " },%n" - + " \"aggregations\" : {%n" - + " \"count(a)\" : {%n" - + " \"value_count\" : {%n" - + " \"field\" : \"a\"%n" - + " }%n" - + " }%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"composite_buckets\" : {%n" + + " \"composite\" : {%n" + + " \"size\" : 1000,%n" + + " \"sources\" : [ {%n" + + " \"SpanExpression(field=timestamp, value=1, unit=H)\" : {%n" + + " \"date_histogram\" : {%n" + + " \"field\" : \"timestamp\",%n" + + " \"missing_bucket\" : true,%n" + + " \"missing_order\" : \"first\",%n" + + " \"order\" : \"asc\",%n" + + " \"fixed_interval\" : \"1h\"%n" + + " }%n" + + " }%n" + + " } ]%n" + + " },%n" + + " \"aggregations\" : {%n" + + " \"count(a)\" : {%n" + + " \"value_count\" : {%n" + + " \"field\" : \"a\"%n" + + " }%n" + + " }%n" + + " }%n" + + " }%n" + + "}"), buildQuery( Arrays.asList( named("count(a)", new CountAggregator(Arrays.asList(ref("a", INTEGER)), INTEGER))), @@ -571,32 +598,33 @@ void fixed_interval_time_span() { @Test void calendar_interval_time_span() { - assertEquals(format( - "{%n" - + " \"composite_buckets\" : {%n" - + " \"composite\" : {%n" - + " \"size\" : 1000,%n" - + " \"sources\" : [ {%n" - + " \"SpanExpression(field=date, value=1, unit=W)\" : {%n" - + " \"date_histogram\" : {%n" - + " \"field\" : \"date\",%n" - + " \"missing_bucket\" : true,%n" - + " \"missing_order\" : \"first\",%n" - + " \"order\" : \"asc\",%n" - + " \"calendar_interval\" : \"1w\"%n" - + " }%n" - + " }%n" - + " } ]%n" - + " },%n" - + " \"aggregations\" : {%n" - + " \"count(a)\" : {%n" - + " \"value_count\" : {%n" - + " \"field\" : \"a\"%n" - + " }%n" - + " }%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"composite_buckets\" : {%n" + + " \"composite\" : {%n" + + " \"size\" : 1000,%n" + + " \"sources\" : [ {%n" + + " \"SpanExpression(field=date, value=1, unit=W)\" : {%n" + + " \"date_histogram\" : {%n" + + " \"field\" : \"date\",%n" + + " \"missing_bucket\" : true,%n" + + " \"missing_order\" : \"first\",%n" + + " \"order\" : \"asc\",%n" + + " \"calendar_interval\" : \"1w\"%n" + + " }%n" + + " }%n" + + " } ]%n" + + " },%n" + + " \"aggregations\" : {%n" + + " \"count(a)\" : {%n" + + " \"value_count\" : {%n" + + " \"field\" : \"a\"%n" + + " }%n" + + " }%n" + + " }%n" + + " }%n" + + "}"), buildQuery( Arrays.asList( named("count(a)", new CountAggregator(Arrays.asList(ref("a", INTEGER)), INTEGER))), @@ -605,32 +633,33 @@ void calendar_interval_time_span() { @Test void general_span() { - assertEquals(format( - "{%n" - + " \"composite_buckets\" : {%n" - + " \"composite\" : {%n" - + " \"size\" : 1000,%n" - + " \"sources\" : [ {%n" - + " \"SpanExpression(field=age, value=1, unit=NONE)\" : {%n" - + " \"histogram\" : {%n" - + " \"field\" : \"age\",%n" - + " \"missing_bucket\" : true,%n" - + " \"missing_order\" : \"first\",%n" - + " \"order\" : \"asc\",%n" - + " \"interval\" : 1.0%n" - + " }%n" - + " }%n" - + " } ]%n" - + " },%n" - + " \"aggregations\" : {%n" - + " \"count(a)\" : {%n" - + " \"value_count\" : {%n" - + " \"field\" : \"a\"%n" - + " }%n" - + " }%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"composite_buckets\" : {%n" + + " \"composite\" : {%n" + + " \"size\" : 1000,%n" + + " \"sources\" : [ {%n" + + " \"SpanExpression(field=age, value=1, unit=NONE)\" : {%n" + + " \"histogram\" : {%n" + + " \"field\" : \"age\",%n" + + " \"missing_bucket\" : true,%n" + + " \"missing_order\" : \"first\",%n" + + " \"order\" : \"asc\",%n" + + " \"interval\" : 1.0%n" + + " }%n" + + " }%n" + + " } ]%n" + + " },%n" + + " \"aggregations\" : {%n" + + " \"count(a)\" : {%n" + + " \"value_count\" : {%n" + + " \"field\" : \"a\"%n" + + " }%n" + + " }%n" + + " }%n" + + " }%n" + + "}"), buildQuery( Arrays.asList( named("count(a)", new CountAggregator(Arrays.asList(ref("a", INTEGER)), INTEGER))), @@ -639,15 +668,20 @@ void general_span() { @Test void invalid_unit() { - assertThrows(IllegalStateException.class, () -> buildQuery( - Arrays.asList( - named("count(a)", new CountAggregator(Arrays.asList(ref("a", INTEGER)), INTEGER))), - Arrays.asList(named(span(ref("age", INTEGER), literal(1), "invalid_unit"))))); + assertThrows( + IllegalStateException.class, + () -> + buildQuery( + Arrays.asList( + named( + "count(a)", + new CountAggregator(Arrays.asList(ref("a", INTEGER)), INTEGER))), + Arrays.asList(named(span(ref("age", INTEGER), literal(1), "invalid_unit"))))); } @SneakyThrows - private String buildQuery(List namedAggregatorList, - List groupByList) { + private String buildQuery( + List namedAggregatorList, List groupByList) { return buildQuery(namedAggregatorList, groupByList, null); } @@ -668,8 +702,7 @@ private String buildQuery( } private Set> buildTypeMapping( - List namedAggregatorList, - List groupByList) { + List namedAggregatorList, List groupByList) { return queryBuilder.buildTypeMapping(namedAggregatorList, groupByList).entrySet(); } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScriptFactoryTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScriptFactoryTest.java index 38107934a0..618a9ca77a 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScriptFactoryTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScriptFactoryTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.aggregation; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -31,14 +30,11 @@ @ExtendWith(MockitoExtension.class) class ExpressionAggregationScriptFactoryTest { - @Mock - private SearchLookup searchLookup; + @Mock private SearchLookup searchLookup; - @Mock - private LeafSearchLookup leafSearchLookup; + @Mock private LeafSearchLookup leafSearchLookup; - @Mock - private LeafReaderContext leafReaderContext; + @Mock private LeafReaderContext leafReaderContext; private final Expression expression = DSL.literal(true); @@ -63,7 +59,6 @@ void can_initialize_expression_filter_script() throws IOException { assertEquals( new ExpressionAggregationScript(expression, searchLookup, leafReaderContext, params), - actualScript - ); + actualScript); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScriptTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScriptTest.java index b98bc538ab..520e301301 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScriptTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/ExpressionAggregationScriptTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.aggregation; import static java.time.temporal.ChronoUnit.MILLIS; @@ -46,21 +45,17 @@ @ExtendWith(MockitoExtension.class) class ExpressionAggregationScriptTest { - @Mock - private SearchLookup lookup; + @Mock private SearchLookup lookup; - @Mock - private LeafSearchLookup leafLookup; + @Mock private LeafSearchLookup leafLookup; - @Mock - private LeafReaderContext context; + @Mock private LeafReaderContext context; @Test void can_execute_expression_with_integer_field() { assertThat() .docValues("age", 30L) // DocValue only supports long - .evaluate( - DSL.abs(ref("age", INTEGER))) + .evaluate(DSL.abs(ref("age", INTEGER))) .shouldMatch(30); } @@ -68,8 +63,7 @@ void can_execute_expression_with_integer_field() { void can_execute_expression_with_integer_field_with_boolean_result() { assertThat() .docValues("age", 30L) // DocValue only supports long - .evaluate( - DSL.greater(ref("age", INTEGER), literal(20))) + .evaluate(DSL.greater(ref("age", INTEGER), literal(20))) .shouldMatch(true); } @@ -78,34 +72,36 @@ void can_execute_expression_with_text_keyword_field() { assertThat() .docValues("name.keyword", "John") .evaluate( - DSL.equal(ref("name", OpenSearchTextType.of(Map.of("words", - OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword)))), + DSL.equal( + ref( + "name", + OpenSearchTextType.of( + Map.of( + "words", + OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword)))), literal("John"))) .shouldMatch(true); } @Test void can_execute_expression_with_null_field() { - assertThat() - .docValues("age", null) - .evaluate(ref("age", INTEGER)) - .shouldMatch(null); + assertThat().docValues("age", null).evaluate(ref("age", INTEGER)).shouldMatch(null); } @Test void can_execute_expression_with_missing_field() { - assertThat() - .docValues("age", 30) - .evaluate(ref("name", STRING)) - .shouldMatch(null); + assertThat().docValues("age", 30).evaluate(ref("name", STRING)).shouldMatch(null); } @Test void can_execute_parse_expression() { assertThat() .docValues("age_string", "age: 30") - .evaluate(DSL.regex(DSL.ref("age_string", STRING), DSL.literal("age: (?\\d+)"), - DSL.literal("age"))) + .evaluate( + DSL.regex( + DSL.ref("age_string", STRING), + DSL.literal("age: (?\\d+)"), + DSL.literal("age"))) .shouldMatch("30"); } @@ -113,28 +109,23 @@ void can_execute_parse_expression() { void can_execute_expression_interpret_dates_for_aggregation() { assertThat() .docValues("date", "1961-04-12") - .evaluate( - DSL.date(ref("date", STRING))) - .shouldMatch(new ExprDateValue(LocalDate.of(1961, 4, 12)) - .timestampValue().toEpochMilli()); + .evaluate(DSL.date(ref("date", STRING))) + .shouldMatch(new ExprDateValue(LocalDate.of(1961, 4, 12)).timestampValue().toEpochMilli()); } @Test void can_execute_expression_interpret_datetimes_for_aggregation() { assertThat() .docValues("datetime", "1984-03-17 22:16:42") - .evaluate( - DSL.datetime(ref("datetime", STRING))) - .shouldMatch(new ExprDatetimeValue("1984-03-17 22:16:42") - .timestampValue().toEpochMilli()); + .evaluate(DSL.datetime(ref("datetime", STRING))) + .shouldMatch(new ExprDatetimeValue("1984-03-17 22:16:42").timestampValue().toEpochMilli()); } @Test void can_execute_expression_interpret_times_for_aggregation() { assertThat() .docValues("time", "22:13:42") - .evaluate( - DSL.time(ref("time", STRING))) + .evaluate(DSL.time(ref("time", STRING))) .shouldMatch(MILLIS.between(LocalTime.MIN, LocalTime.of(22, 13, 42))); } @@ -142,10 +133,8 @@ void can_execute_expression_interpret_times_for_aggregation() { void can_execute_expression_interpret_timestamps_for_aggregation() { assertThat() .docValues("timestamp", "1984-03-17 22:16:42") - .evaluate( - DSL.timestamp(ref("timestamp", STRING))) - .shouldMatch(new ExprTimestampValue("1984-03-17 22:16:42") - .timestampValue().toEpochMilli()); + .evaluate(DSL.timestamp(ref("timestamp", STRING))) + .shouldMatch(new ExprTimestampValue("1984-03-17 22:16:42").timestampValue().toEpochMilli()); } @Test @@ -172,20 +161,20 @@ ExprScriptAssertion docValues() { } ExprScriptAssertion docValues(String name, Object value) { - LeafDocLookup leafDocLookup = mockLeafDocLookup( - ImmutableMap.of(name, new FakeScriptDocValues<>(value))); + LeafDocLookup leafDocLookup = + mockLeafDocLookup(ImmutableMap.of(name, new FakeScriptDocValues<>(value))); when(lookup.getLeafSearchLookup(any())).thenReturn(leafLookup); when(leafLookup.doc()).thenReturn(leafDocLookup); return this; } - ExprScriptAssertion docValues(String name1, Object value1, - String name2, Object value2) { - LeafDocLookup leafDocLookup = mockLeafDocLookup( - ImmutableMap.of( - name1, new FakeScriptDocValues<>(value1), - name2, new FakeScriptDocValues<>(value2))); + ExprScriptAssertion docValues(String name1, Object value1, String name2, Object value2) { + LeafDocLookup leafDocLookup = + mockLeafDocLookup( + ImmutableMap.of( + name1, new FakeScriptDocValues<>(value1), + name2, new FakeScriptDocValues<>(value2))); when(lookup.getLeafSearchLookup(any())).thenReturn(leafLookup); when(leafLookup.doc()).thenReturn(leafDocLookup); diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/GroupSortOrderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/GroupSortOrderTest.java index bff04604c1..2ab8a24d68 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/GroupSortOrderTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/GroupSortOrderTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.aggregation; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -28,10 +27,12 @@ class GroupSortOrderTest { private final AggregationQueryBuilder.GroupSortOrder groupSortOrder = new AggregationQueryBuilder.GroupSortOrder( - sort(ref("name", STRING), Sort.SortOption.DEFAULT_DESC, - ref("age", INTEGER), Sort.SortOption.DEFAULT_ASC)); - @Mock - private ReferenceExpression ref; + sort( + ref("name", STRING), + Sort.SortOption.DEFAULT_DESC, + ref("age", INTEGER), + Sort.SortOption.DEFAULT_ASC)); + @Mock private ReferenceExpression ref; @Test void both_expression_in_sort_list() { diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/BucketAggregationBuilderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/BucketAggregationBuilderTest.java index 208904d9c3..d11d7da2fe 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/BucketAggregationBuilderTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/BucketAggregationBuilderTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.aggregation.dsl; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -48,8 +47,7 @@ @ExtendWith(MockitoExtension.class) class BucketAggregationBuilderTest { - @Mock - private ExpressionSerializer serializer; + @Mock private ExpressionSerializer serializer; private BucketAggregationBuilder aggregationBuilder; @@ -69,9 +67,7 @@ void should_build_bucket_with_field() { + " \"order\" : \"asc\"\n" + " }\n" + "}", - buildQuery( - Arrays.asList( - asc(named("age", ref("age", INTEGER)))))); + buildQuery(Arrays.asList(asc(named("age", ref("age", INTEGER)))))); } @Test @@ -90,9 +86,7 @@ void should_build_bucket_with_literal() { + " \"order\" : \"asc\"\n" + " }\n" + "}", - buildQuery( - Arrays.asList( - asc(named(literal))))); + buildQuery(Arrays.asList(asc(named(literal))))); } @Test @@ -108,8 +102,16 @@ void should_build_bucket_with_keyword_field() { + "}", buildQuery( Arrays.asList( - asc(named("name", ref("name", OpenSearchTextType.of(Map.of("words", - OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword))))))))); + asc( + named( + "name", + ref( + "name", + OpenSearchTextType.of( + Map.of( + "words", + OpenSearchDataType.of( + OpenSearchDataType.MappingType.Keyword))))))))); } @Test @@ -129,13 +131,13 @@ void should_build_bucket_with_parse_expression() { + " \"order\" : \"asc\"\n" + " }\n" + "}", - buildQuery( - Arrays.asList( - asc(named("name", parseExpression))))); + buildQuery(Arrays.asList(asc(named("name", parseExpression))))); } @ParameterizedTest(name = "{0}") - @EnumSource(value = ExprCoreType.class, names = {"TIMESTAMP", "TIME", "DATE", "DATETIME"}) + @EnumSource( + value = ExprCoreType.class, + names = {"TIMESTAMP", "TIME", "DATE", "DATETIME"}) void terms_bucket_for_datetime_types_uses_long(ExprType dataType) { assertEquals( "{\n" @@ -147,9 +149,7 @@ void terms_bucket_for_datetime_types_uses_long(ExprType dataType) { + " \"order\" : \"asc\"\n" + " }\n" + "}", - buildQuery( - Arrays.asList( - asc(named("date", ref("date", dataType)))))); + buildQuery(Arrays.asList(asc(named("date", ref("date", dataType)))))); } @SneakyThrows diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/MetricAggregationBuilderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/MetricAggregationBuilderTest.java index 94f152f913..7f302c9c53 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/MetricAggregationBuilderTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/MetricAggregationBuilderTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.aggregation.dsl; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -49,11 +48,9 @@ @ExtendWith(MockitoExtension.class) class MetricAggregationBuilderTest { - @Mock - private ExpressionSerializer serializer; + @Mock private ExpressionSerializer serializer; - @Mock - private NamedAggregator aggregator; + @Mock private NamedAggregator aggregator; private MetricAggregationBuilder aggregationBuilder; @@ -64,299 +61,332 @@ void set_up() { @Test void should_build_avg_aggregation() { - assertEquals(format( - "{%n" - + " \"avg(age)\" : {%n" - + " \"avg\" : {%n" - + " \"field\" : \"age\"%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"avg(age)\" : {%n" + + " \"avg\" : {%n" + + " \"field\" : \"age\"%n" + + " }%n" + + " }%n" + + "}"), buildQuery( Arrays.asList( - named("avg(age)", - new AvgAggregator(Arrays.asList(ref("age", INTEGER)), INTEGER))))); + named( + "avg(age)", new AvgAggregator(Arrays.asList(ref("age", INTEGER)), INTEGER))))); } @Test void should_build_sum_aggregation() { - assertEquals(format( - "{%n" - + " \"sum(age)\" : {%n" - + " \"sum\" : {%n" - + " \"field\" : \"age\"%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"sum(age)\" : {%n" + + " \"sum\" : {%n" + + " \"field\" : \"age\"%n" + + " }%n" + + " }%n" + + "}"), buildQuery( Arrays.asList( - named("sum(age)", - new SumAggregator(Arrays.asList(ref("age", INTEGER)), INTEGER))))); + named( + "sum(age)", new SumAggregator(Arrays.asList(ref("age", INTEGER)), INTEGER))))); } @Test void should_build_count_aggregation() { - assertEquals(format( - "{%n" - + " \"count(age)\" : {%n" - + " \"value_count\" : {%n" - + " \"field\" : \"age\"%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"count(age)\" : {%n" + + " \"value_count\" : {%n" + + " \"field\" : \"age\"%n" + + " }%n" + + " }%n" + + "}"), buildQuery( Arrays.asList( - named("count(age)", + named( + "count(age)", new CountAggregator(Arrays.asList(ref("age", INTEGER)), INTEGER))))); } @Test void should_build_count_star_aggregation() { - assertEquals(format( - "{%n" - + " \"count(*)\" : {%n" - + " \"value_count\" : {%n" - + " \"field\" : \"_index\"%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"count(*)\" : {%n" + + " \"value_count\" : {%n" + + " \"field\" : \"_index\"%n" + + " }%n" + + " }%n" + + "}"), buildQuery( Arrays.asList( - named("count(*)", - new CountAggregator(Arrays.asList(literal("*")), INTEGER))))); + named("count(*)", new CountAggregator(Arrays.asList(literal("*")), INTEGER))))); } @Test void should_build_count_other_literal_aggregation() { - assertEquals(format( - "{%n" - + " \"count(1)\" : {%n" - + " \"value_count\" : {%n" - + " \"field\" : \"_index\"%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"count(1)\" : {%n" + + " \"value_count\" : {%n" + + " \"field\" : \"_index\"%n" + + " }%n" + + " }%n" + + "}"), buildQuery( Arrays.asList( - named("count(1)", - new CountAggregator(Arrays.asList(literal(1)), INTEGER))))); + named("count(1)", new CountAggregator(Arrays.asList(literal(1)), INTEGER))))); } @Test void should_build_min_aggregation() { - assertEquals(format( - "{%n" - + " \"min(age)\" : {%n" - + " \"min\" : {%n" - + " \"field\" : \"age\"%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"min(age)\" : {%n" + + " \"min\" : {%n" + + " \"field\" : \"age\"%n" + + " }%n" + + " }%n" + + "}"), buildQuery( Arrays.asList( - named("min(age)", - new MinAggregator(Arrays.asList(ref("age", INTEGER)), INTEGER))))); + named( + "min(age)", new MinAggregator(Arrays.asList(ref("age", INTEGER)), INTEGER))))); } @Test void should_build_max_aggregation() { - assertEquals(format( - "{%n" - + " \"max(age)\" : {%n" - + " \"max\" : {%n" - + " \"field\" : \"age\"%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"max(age)\" : {%n" + + " \"max\" : {%n" + + " \"field\" : \"age\"%n" + + " }%n" + + " }%n" + + "}"), buildQuery( Arrays.asList( - named("max(age)", - new MaxAggregator(Arrays.asList(ref("age", INTEGER)), INTEGER))))); + named( + "max(age)", new MaxAggregator(Arrays.asList(ref("age", INTEGER)), INTEGER))))); } @Test void should_build_varPop_aggregation() { - assertEquals(format( - "{%n" - + " \"var_pop(age)\" : {%n" - + " \"extended_stats\" : {%n" - + " \"field\" : \"age\",%n" - + " \"sigma\" : 2.0%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"var_pop(age)\" : {%n" + + " \"extended_stats\" : {%n" + + " \"field\" : \"age\",%n" + + " \"sigma\" : 2.0%n" + + " }%n" + + " }%n" + + "}"), buildQuery( Arrays.asList( - named("var_pop(age)", + named( + "var_pop(age)", variancePopulation(Arrays.asList(ref("age", INTEGER)), INTEGER))))); } @Test void should_build_varSamp_aggregation() { - assertEquals(format( - "{%n" - + " \"var_samp(age)\" : {%n" - + " \"extended_stats\" : {%n" - + " \"field\" : \"age\",%n" - + " \"sigma\" : 2.0%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"var_samp(age)\" : {%n" + + " \"extended_stats\" : {%n" + + " \"field\" : \"age\",%n" + + " \"sigma\" : 2.0%n" + + " }%n" + + " }%n" + + "}"), buildQuery( Arrays.asList( - named("var_samp(age)", + named( + "var_samp(age)", varianceSample(Arrays.asList(ref("age", INTEGER)), INTEGER))))); } @Test void should_build_stddevPop_aggregation() { - assertEquals(format( - "{%n" - + " \"stddev_pop(age)\" : {%n" - + " \"extended_stats\" : {%n" - + " \"field\" : \"age\",%n" - + " \"sigma\" : 2.0%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"stddev_pop(age)\" : {%n" + + " \"extended_stats\" : {%n" + + " \"field\" : \"age\",%n" + + " \"sigma\" : 2.0%n" + + " }%n" + + " }%n" + + "}"), buildQuery( Arrays.asList( - named("stddev_pop(age)", + named( + "stddev_pop(age)", stddevPopulation(Arrays.asList(ref("age", INTEGER)), INTEGER))))); } @Test void should_build_stddevSamp_aggregation() { - assertEquals(format( - "{%n" - + " \"stddev_samp(age)\" : {%n" - + " \"extended_stats\" : {%n" - + " \"field\" : \"age\",%n" - + " \"sigma\" : 2.0%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"stddev_samp(age)\" : {%n" + + " \"extended_stats\" : {%n" + + " \"field\" : \"age\",%n" + + " \"sigma\" : 2.0%n" + + " }%n" + + " }%n" + + "}"), buildQuery( Arrays.asList( - named("stddev_samp(age)", + named( + "stddev_samp(age)", stddevSample(Arrays.asList(ref("age", INTEGER)), INTEGER))))); } @Test void should_build_cardinality_aggregation() { - assertEquals(format( - "{%n" - + " \"count(distinct name)\" : {%n" - + " \"cardinality\" : {%n" - + " \"field\" : \"name\"%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"count(distinct name)\" : {%n" + + " \"cardinality\" : {%n" + + " \"field\" : \"name\"%n" + + " }%n" + + " }%n" + + "}"), buildQuery( - Collections.singletonList(named("count(distinct name)", new CountAggregator( - Collections.singletonList(ref("name", STRING)), INTEGER).distinct(true))))); + Collections.singletonList( + named( + "count(distinct name)", + new CountAggregator(Collections.singletonList(ref("name", STRING)), INTEGER) + .distinct(true))))); } @Test void should_build_filtered_cardinality_aggregation() { - assertEquals(format( - "{%n" - + " \"count(distinct name) filter(where age > 30)\" : {%n" - + " \"filter\" : {%n" - + " \"range\" : {%n" - + " \"age\" : {%n" - + " \"from\" : 30,%n" - + " \"to\" : null,%n" - + " \"include_lower\" : false,%n" - + " \"include_upper\" : true,%n" - + " \"boost\" : 1.0%n" - + " }%n" - + " }%n" - + " },%n" - + " \"aggregations\" : {%n" - + " \"count(distinct name) filter(where age > 30)\" : {%n" - + " \"cardinality\" : {%n" - + " \"field\" : \"name\"%n" - + " }%n" - + " }%n" - + " }%n" - + " }%n" - + "}"), - buildQuery(Collections.singletonList(named( - "count(distinct name) filter(where age > 30)", - new CountAggregator(Collections.singletonList(ref("name", STRING)), INTEGER) - .condition(DSL.greater(ref("age", INTEGER), literal(30))) - .distinct(true))))); + assertEquals( + format( + "{%n" + + " \"count(distinct name) filter(where age > 30)\" : {%n" + + " \"filter\" : {%n" + + " \"range\" : {%n" + + " \"age\" : {%n" + + " \"from\" : 30,%n" + + " \"to\" : null,%n" + + " \"include_lower\" : false,%n" + + " \"include_upper\" : true,%n" + + " \"boost\" : 1.0%n" + + " }%n" + + " }%n" + + " },%n" + + " \"aggregations\" : {%n" + + " \"count(distinct name) filter(where age > 30)\" : {%n" + + " \"cardinality\" : {%n" + + " \"field\" : \"name\"%n" + + " }%n" + + " }%n" + + " }%n" + + " }%n" + + "}"), + buildQuery( + Collections.singletonList( + named( + "count(distinct name) filter(where age > 30)", + new CountAggregator(Collections.singletonList(ref("name", STRING)), INTEGER) + .condition(DSL.greater(ref("age", INTEGER), literal(30))) + .distinct(true))))); } @Test void should_build_top_hits_aggregation() { - assertEquals(format( - "{%n" - + " \"take(name, 10)\" : {%n" - + " \"top_hits\" : {%n" - + " \"from\" : 0,%n" - + " \"size\" : 10,%n" - + " \"version\" : false,%n" - + " \"seq_no_primary_term\" : false,%n" - + " \"explain\" : false,%n" - + " \"_source\" : {%n" - + " \"includes\" : [ \"name\" ],%n" - + " \"excludes\" : [ ]%n" - + " }%n" - + " }%n" - + " }%n" - + "}"), + assertEquals( + format( + "{%n" + + " \"take(name, 10)\" : {%n" + + " \"top_hits\" : {%n" + + " \"from\" : 0,%n" + + " \"size\" : 10,%n" + + " \"version\" : false,%n" + + " \"seq_no_primary_term\" : false,%n" + + " \"explain\" : false,%n" + + " \"_source\" : {%n" + + " \"includes\" : [ \"name\" ],%n" + + " \"excludes\" : [ ]%n" + + " }%n" + + " }%n" + + " }%n" + + "}"), buildQuery( - Collections.singletonList(named("take(name, 10)", new TakeAggregator( - ImmutableList.of(ref("name", STRING), literal(10)), ARRAY))))); + Collections.singletonList( + named( + "take(name, 10)", + new TakeAggregator( + ImmutableList.of(ref("name", STRING), literal(10)), ARRAY))))); } @Test void should_build_filtered_top_hits_aggregation() { - assertEquals(format( - "{%n" - + " \"take(name, 10) filter(where age > 30)\" : {%n" - + " \"filter\" : {%n" - + " \"range\" : {%n" - + " \"age\" : {%n" - + " \"from\" : 30,%n" - + " \"to\" : null,%n" - + " \"include_lower\" : false,%n" - + " \"include_upper\" : true,%n" - + " \"boost\" : 1.0%n" - + " }%n" - + " }%n" - + " },%n" - + " \"aggregations\" : {%n" - + " \"take(name, 10) filter(where age > 30)\" : {%n" - + " \"top_hits\" : {%n" - + " \"from\" : 0,%n" - + " \"size\" : 10,%n" - + " \"version\" : false,%n" - + " \"seq_no_primary_term\" : false,%n" - + " \"explain\" : false,%n" - + " \"_source\" : {%n" - + " \"includes\" : [ \"name\" ],%n" - + " \"excludes\" : [ ]%n" - + " }%n" - + " }%n" - + " }%n" - + " }%n" - + " }%n" - + "}"), - buildQuery(Collections.singletonList(named( - "take(name, 10) filter(where age > 30)", - new TakeAggregator( - ImmutableList.of(ref("name", STRING), literal(10)), ARRAY) - .condition(DSL.greater(ref("age", INTEGER), literal(30))))))); + assertEquals( + format( + "{%n" + + " \"take(name, 10) filter(where age > 30)\" : {%n" + + " \"filter\" : {%n" + + " \"range\" : {%n" + + " \"age\" : {%n" + + " \"from\" : 30,%n" + + " \"to\" : null,%n" + + " \"include_lower\" : false,%n" + + " \"include_upper\" : true,%n" + + " \"boost\" : 1.0%n" + + " }%n" + + " }%n" + + " },%n" + + " \"aggregations\" : {%n" + + " \"take(name, 10) filter(where age > 30)\" : {%n" + + " \"top_hits\" : {%n" + + " \"from\" : 0,%n" + + " \"size\" : 10,%n" + + " \"version\" : false,%n" + + " \"seq_no_primary_term\" : false,%n" + + " \"explain\" : false,%n" + + " \"_source\" : {%n" + + " \"includes\" : [ \"name\" ],%n" + + " \"excludes\" : [ ]%n" + + " }%n" + + " }%n" + + " }%n" + + " }%n" + + " }%n" + + "}"), + buildQuery( + Collections.singletonList( + named( + "take(name, 10) filter(where age > 30)", + new TakeAggregator(ImmutableList.of(ref("name", STRING), literal(10)), ARRAY) + .condition(DSL.greater(ref("age", INTEGER), literal(30))))))); } @Test void should_throw_exception_for_unsupported_distinct_aggregator() { - assertThrows(IllegalStateException.class, - () -> buildQuery(Collections.singletonList(named("avg(distinct age)", new AvgAggregator( - Collections.singletonList(ref("name", STRING)), STRING).distinct(true)))), + assertThrows( + IllegalStateException.class, + () -> + buildQuery( + Collections.singletonList( + named( + "avg(distinct age)", + new AvgAggregator(Collections.singletonList(ref("name", STRING)), STRING) + .distinct(true)))), "unsupported distinct aggregator avg"); } @@ -366,7 +396,8 @@ void should_throw_exception_for_unsupported_aggregator() { when(aggregator.getArguments()).thenReturn(Arrays.asList(ref("age", INTEGER))); IllegalStateException exception = - assertThrows(IllegalStateException.class, + assertThrows( + IllegalStateException.class, () -> buildQuery(Arrays.asList(named("unsupported_agg(age)", aggregator)))); assertEquals("unsupported aggregator unsupported_agg", exception.getMessage()); } @@ -374,19 +405,23 @@ void should_throw_exception_for_unsupported_aggregator() { @Test void should_throw_exception_for_unsupported_exception() { IllegalStateException exception = - assertThrows(IllegalStateException.class, () -> buildQuery(Arrays.asList( - named("count(age)", - new CountAggregator(Arrays.asList(named("age", ref("age", INTEGER))), INTEGER))))); - assertEquals( - "metric aggregation doesn't support expression age", - exception.getMessage()); + assertThrows( + IllegalStateException.class, + () -> + buildQuery( + Arrays.asList( + named( + "count(age)", + new CountAggregator( + Arrays.asList(named("age", ref("age", INTEGER))), INTEGER))))); + assertEquals("metric aggregation doesn't support expression age", exception.getMessage()); } @SneakyThrows private String buildQuery(List namedAggregatorList) { ObjectMapper objectMapper = new ObjectMapper(); - return objectMapper.readTree( - aggregationBuilder.build(namedAggregatorList).getLeft().toString()) + return objectMapper + .readTree(aggregationBuilder.build(namedAggregatorList).getLeft().toString()) .toPrettyString(); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScriptFactoryTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScriptFactoryTest.java index 3c927c9a0b..d2d349c14b 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScriptFactoryTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScriptFactoryTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.filter; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -30,14 +29,11 @@ @ExtendWith(MockitoExtension.class) class ExpressionFilterScriptFactoryTest { - @Mock - private SearchLookup searchLookup; + @Mock private SearchLookup searchLookup; - @Mock - private LeafSearchLookup leafSearchLookup; + @Mock private LeafSearchLookup leafSearchLookup; - @Mock - private LeafReaderContext leafReaderContext; + @Mock private LeafReaderContext leafReaderContext; private final Expression expression = DSL.literal(true); @@ -59,8 +55,6 @@ void can_initialize_expression_filter_script() throws IOException { assertEquals( new ExpressionFilterScript(expression, searchLookup, leafReaderContext, params), - actualFilterScript - ); + actualFilterScript); } - } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScriptTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScriptTest.java index 61a3e9d35f..cca51c8f4a 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScriptTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/ExpressionFilterScriptTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.filter; import static java.util.Collections.emptyList; @@ -55,37 +54,27 @@ @ExtendWith(MockitoExtension.class) class ExpressionFilterScriptTest { - @Mock - private SearchLookup lookup; + @Mock private SearchLookup lookup; - @Mock - private LeafSearchLookup leafLookup; + @Mock private LeafSearchLookup leafLookup; - @Mock - private LeafReaderContext context; + @Mock private LeafReaderContext context; @Test void should_match_if_true_literal() { - assertThat() - .docValues() - .filterBy(literal(true)) - .shouldMatch(); + assertThat().docValues().filterBy(literal(true)).shouldMatch(); } @Test void should_not_match_if_false_literal() { - assertThat() - .docValues() - .filterBy(literal(false)) - .shouldNotMatch(); + assertThat().docValues().filterBy(literal(false)).shouldNotMatch(); } @Test void can_execute_expression_with_integer_field() { assertThat() .docValues("age", 30L) // DocValue only supports long - .filterBy( - DSL.greater(ref("age", INTEGER), literal(20))) + .filterBy(DSL.greater(ref("age", INTEGER), literal(20))) .shouldMatch(); } @@ -94,8 +83,13 @@ void can_execute_expression_with_text_keyword_field() { assertThat() .docValues("name.keyword", "John") .filterBy( - DSL.equal(ref("name", OpenSearchTextType.of(Map.of("words", - OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword)))), + DSL.equal( + ref( + "name", + OpenSearchTextType.of( + Map.of( + "words", + OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword)))), literal("John"))) .shouldMatch(); } @@ -151,34 +145,31 @@ void can_execute_expression_with_time_field() { @Test void can_execute_expression_with_missing_field() { - assertThat() - .docValues("age", 30) - .filterBy(ref("name", STRING)) - .shouldNotMatch(); + assertThat().docValues("age", 30).filterBy(ref("name", STRING)).shouldNotMatch(); } @Test void can_execute_expression_with_empty_doc_value() { - assertThat() - .docValues("name", emptyList()) - .filterBy(ref("name", STRING)) - .shouldNotMatch(); + assertThat().docValues("name", emptyList()).filterBy(ref("name", STRING)).shouldNotMatch(); } @Test void can_execute_parse_expression() { assertThat() .docValues("age_string", "age: 30") - .filterBy(DSL.equal( - DSL.regex(DSL.ref("age_string", STRING), literal("age: (?\\d+)"), literal("age")), - literal("30"))) + .filterBy( + DSL.equal( + DSL.regex( + DSL.ref("age_string", STRING), literal("age: (?\\d+)"), literal("age")), + literal("30"))) .shouldMatch(); } @Test void cannot_execute_non_predicate_expression() { - assertThrow(IllegalStateException.class, - "Expression has wrong result type instead of boolean: expression [10], result [10]") + assertThrow( + IllegalStateException.class, + "Expression has wrong result type instead of boolean: expression [10], result [10]") .docValues() .filterBy(literal(10)); } @@ -187,8 +178,7 @@ private ExprScriptAssertion assertThat() { return new ExprScriptAssertion(lookup, leafLookup, context); } - private ExprScriptAssertion assertThrow(Class clazz, - String message) { + private ExprScriptAssertion assertThrow(Class clazz, String message) { return new ExprScriptAssertion(lookup, leafLookup, context) { @Override ExprScriptAssertion filterBy(Expression expr) { @@ -211,20 +201,20 @@ ExprScriptAssertion docValues() { } ExprScriptAssertion docValues(String name, Object value) { - LeafDocLookup leafDocLookup = mockLeafDocLookup( - ImmutableMap.of(name, new FakeScriptDocValues<>(value))); + LeafDocLookup leafDocLookup = + mockLeafDocLookup(ImmutableMap.of(name, new FakeScriptDocValues<>(value))); when(lookup.getLeafSearchLookup(any())).thenReturn(leafLookup); when(leafLookup.doc()).thenReturn(leafDocLookup); return this; } - ExprScriptAssertion docValues(String name1, Object value1, - String name2, Object value2) { - LeafDocLookup leafDocLookup = mockLeafDocLookup( - ImmutableMap.of( - name1, new FakeScriptDocValues<>(value1), - name2, new FakeScriptDocValues<>(value2))); + ExprScriptAssertion docValues(String name1, Object value1, String name2, Object value2) { + LeafDocLookup leafDocLookup = + mockLeafDocLookup( + ImmutableMap.of( + name1, new FakeScriptDocValues<>(value1), + name2, new FakeScriptDocValues<>(value2))); when(lookup.getLeafSearchLookup(any())).thenReturn(leafLookup); when(leafLookup.doc()).thenReturn(leafDocLookup); @@ -276,5 +266,4 @@ public int size() { return values.size(); } } - } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/FilterQueryBuilderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/FilterQueryBuilderTest.java index eb07076257..1fc2d5ee29 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/FilterQueryBuilderTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/FilterQueryBuilderTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.filter; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -63,22 +62,42 @@ class FilterQueryBuilderTest { private static Stream numericCastSource() { - return Stream.of(literal((byte) 1), literal((short) -1), literal( - 1), literal(21L), literal(3.14F), literal(3.1415D), literal(true), literal("1")); + return Stream.of( + literal((byte) 1), + literal((short) -1), + literal(1), + literal(21L), + literal(3.14F), + literal(3.1415D), + literal(true), + literal("1")); } private static Stream booleanTrueCastSource() { - return Stream.of(literal((byte) 1), literal((short) -1), literal( - 1), literal(42L), literal(3.14F), literal(3.1415D), literal(true), literal("true")); + return Stream.of( + literal((byte) 1), + literal((short) -1), + literal(1), + literal(42L), + literal(3.14F), + literal(3.1415D), + literal(true), + literal("true")); } private static Stream booleanFalseCastSource() { - return Stream.of(literal((byte) 0), literal((short) 0), literal( - 0), literal(0L), literal(0.0F), literal(0.0D), literal(false), literal("false")); + return Stream.of( + literal((byte) 0), + literal((short) 0), + literal(0), + literal(0L), + literal(0.0F), + literal(0.0D), + literal(false), + literal("false")); } - @Mock - private ExpressionSerializer serializer; + @Mock private ExpressionSerializer serializer; private FilterQueryBuilder filterQueryBuilder; @@ -98,34 +117,42 @@ void should_build_term_query_for_equality_expression() { + " }\n" + " }\n" + "}", - buildQuery( - DSL.equal( - ref("name", STRING), literal("John")))); + buildQuery(DSL.equal(ref("name", STRING), literal("John")))); } @Test void should_build_range_query_for_comparison_expression() { Expression[] params = {ref("age", INTEGER), literal(30)}; - Map ranges = ImmutableMap.of( - DSL.less(params), new Object[]{null, 30, true, false}, - DSL.greater(params), new Object[]{30, null, false, true}, - DSL.lte(params), new Object[]{null, 30, true, true}, - DSL.gte(params), new Object[]{30, null, true, true}); - - ranges.forEach((expr, range) -> - assertJsonEquals( - "{\n" - + " \"range\" : {\n" - + " \"age\" : {\n" - + " \"from\" : " + range[0] + ",\n" - + " \"to\" : " + range[1] + ",\n" - + " \"include_lower\" : " + range[2] + ",\n" - + " \"include_upper\" : " + range[3] + ",\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}", - buildQuery(expr))); + Map ranges = + ImmutableMap.of( + DSL.less(params), new Object[] {null, 30, true, false}, + DSL.greater(params), new Object[] {30, null, false, true}, + DSL.lte(params), new Object[] {null, 30, true, true}, + DSL.gte(params), new Object[] {30, null, true, true}); + + ranges.forEach( + (expr, range) -> + assertJsonEquals( + "{\n" + + " \"range\" : {\n" + + " \"age\" : {\n" + + " \"from\" : " + + range[0] + + ",\n" + + " \"to\" : " + + range[1] + + ",\n" + + " \"include_lower\" : " + + range[2] + + ",\n" + + " \"include_upper\" : " + + range[3] + + ",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}", + buildQuery(expr))); } @Test @@ -140,9 +167,7 @@ void should_build_wildcard_query_for_like_expression() { + " }\n" + " }\n" + "}", - buildQuery( - DSL.like( - ref("name", STRING), literal("%John_")))); + buildQuery(DSL.like(ref("name", STRING), literal("%John_")))); } @Test @@ -158,8 +183,7 @@ void should_build_script_query_for_unsupported_lucene_query() { + " \"boost\" : 1.0\n" + " }\n" + "}", - buildQuery( - DSL.isnotnull(ref("age", INTEGER)))); + buildQuery(DSL.isnotnull(ref("age", INTEGER)))); } @Test @@ -175,9 +199,7 @@ void should_build_script_query_for_function_expression() { + " \"boost\" : 1.0\n" + " }\n" + "}", - buildQuery( - DSL.equal( - DSL.abs(ref("age", INTEGER)), literal(30)))); + buildQuery(DSL.equal(DSL.abs(ref("age", INTEGER)), literal(30)))); } @Test @@ -193,26 +215,23 @@ void should_build_script_query_for_comparison_between_fields() { + " \"boost\" : 1.0\n" + " }\n" + "}", - buildQuery( - DSL.equal( - ref("age1", INTEGER), ref("age2", INTEGER)))); + buildQuery(DSL.equal(ref("age1", INTEGER), ref("age2", INTEGER)))); } @Test void should_build_bool_query_for_and_or_expression() { - String[] names = { "filter", "should" }; + String[] names = {"filter", "should"}; FunctionExpression expr1 = DSL.equal(ref("name", STRING), literal("John")); FunctionExpression expr2 = DSL.equal(ref("age", INTEGER), literal(30)); - Expression[] exprs = { - DSL.and(expr1, expr2), - DSL.or(expr1, expr2) - }; + Expression[] exprs = {DSL.and(expr1, expr2), DSL.or(expr1, expr2)}; for (int i = 0; i < names.length; i++) { assertJsonEquals( "{\n" + " \"bool\" : {\n" - + " \"" + names[i] + "\" : [\n" + + " \"" + + names[i] + + "\" : [\n" + " {\n" + " \"term\" : {\n" + " \"name\" : {\n" @@ -257,10 +276,7 @@ void should_build_bool_query_for_not_expression() { + " \"boost\" : 1.0\n" + " }\n" + "}", - buildQuery( - DSL.not( - DSL.equal( - ref("age", INTEGER), literal(30))))); + buildQuery(DSL.not(DSL.equal(ref("age", INTEGER), literal(30))))); } @Test @@ -276,8 +292,12 @@ void should_use_keyword_for_multi_field_in_equality_expression() { + "}", buildQuery( DSL.equal( - ref("name", OpenSearchTextType.of(Map.of("words", - OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword)))), + ref( + "name", + OpenSearchTextType.of( + Map.of( + "words", + OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword)))), literal("John")))); } @@ -295,8 +315,12 @@ void should_use_keyword_for_multi_field_in_like_expression() { + "}", buildQuery( DSL.like( - ref("name", OpenSearchTextType.of(Map.of("words", - OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword)))), + ref( + "name", + OpenSearchTextType.of( + Map.of( + "words", + OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword)))), literal("John%")))); } @@ -320,13 +344,9 @@ void should_build_term_query_predicate_expression_with_nested_function() { + " }\n" + "}", buildQuery( - DSL.equal(DSL.nested( - DSL.ref("message.info", STRING), - DSL.ref("message", STRING)), - literal("string_value") - ) - ) - ); + DSL.equal( + DSL.nested(DSL.ref("message.info", STRING), DSL.ref("message", STRING)), + literal("string_value")))); } @Test @@ -351,78 +371,67 @@ void should_build_range_query_predicate_expression_with_nested_function() { + " \"boost\" : 1.0\n" + " }\n" + "}", - buildQuery( - DSL.greater(DSL.nested( - DSL.ref("lottery.number.id", INTEGER)), literal(1234) - ) - ) - ); + buildQuery(DSL.greater(DSL.nested(DSL.ref("lottery.number.id", INTEGER)), literal(1234)))); } // TODO remove this test when alternate syntax is implemented for nested // function in WHERE clause: nested(path, condition) @Test void ensure_alternate_syntax_falls_back_to_legacy_engine() { - assertThrows(SyntaxCheckException.class, () -> - buildQuery( - DSL.nested( - DSL.ref("message", STRING), - DSL.equal(DSL.literal("message.info"), literal("a")) - ) - ) - ); + assertThrows( + SyntaxCheckException.class, + () -> + buildQuery( + DSL.nested( + DSL.ref("message", STRING), + DSL.equal(DSL.literal("message.info"), literal("a"))))); } @Test void nested_filter_wrong_right_side_type_in_predicate_throws_exception() { - assertThrows(IllegalArgumentException.class, () -> - buildQuery( - DSL.equal(DSL.nested( - DSL.ref("message.info", STRING), - DSL.ref("message", STRING)), - DSL.ref("string_value", STRING) - ) - ) - ); + assertThrows( + IllegalArgumentException.class, + () -> + buildQuery( + DSL.equal( + DSL.nested(DSL.ref("message.info", STRING), DSL.ref("message", STRING)), + DSL.ref("string_value", STRING)))); } @Test void nested_filter_wrong_first_param_type_throws_exception() { - assertThrows(IllegalArgumentException.class, () -> - buildQuery( - DSL.equal(DSL.nested( - DSL.namedArgument("field", literal("message"))), - literal("string_value") - ) - ) - ); + assertThrows( + IllegalArgumentException.class, + () -> + buildQuery( + DSL.equal( + DSL.nested(DSL.namedArgument("field", literal("message"))), + literal("string_value")))); } @Test void nested_filter_wrong_second_param_type_throws_exception() { - assertThrows(IllegalArgumentException.class, () -> - buildQuery( - DSL.equal(DSL.nested( - DSL.ref("message.info", STRING), - DSL.literal(2)), - literal("string_value") - ) - ) - ); + assertThrows( + IllegalArgumentException.class, + () -> + buildQuery( + DSL.equal( + DSL.nested(DSL.ref("message.info", STRING), DSL.literal(2)), + literal("string_value")))); } @Test void nested_filter_too_many_params_throws_exception() { - assertThrows(IllegalArgumentException.class, () -> - buildQuery( - DSL.equal(DSL.nested( - DSL.ref("message.info", STRING), - DSL.ref("message", STRING), - DSL.ref("message", STRING)), - literal("string_value") - ) - ) - ); + assertThrows( + IllegalArgumentException.class, + () -> + buildQuery( + DSL.equal( + DSL.nested( + DSL.ref("message.info", STRING), + DSL.ref("message", STRING), + DSL.ref("message", STRING)), + literal("string_value")))); } @Test @@ -445,8 +454,8 @@ void should_build_match_query_with_default_parameters() { + "}", buildQuery( DSL.match( - DSL.namedArgument("field", - new ReferenceExpression("message", OpenSearchTextType.of())), + DSL.namedArgument( + "field", new ReferenceExpression("message", OpenSearchTextType.of())), DSL.namedArgument("query", literal("search query"))))); } @@ -474,8 +483,8 @@ void should_build_match_query_with_custom_parameters() { + "}", buildQuery( DSL.match( - DSL.namedArgument("field", - new ReferenceExpression("message", OpenSearchTextType.of())), + DSL.namedArgument( + "field", new ReferenceExpression("message", OpenSearchTextType.of())), DSL.namedArgument("query", literal("search query")), DSL.namedArgument("operator", literal("AND")), DSL.namedArgument("analyzer", literal("keyword")), @@ -493,60 +502,65 @@ void should_build_match_query_with_custom_parameters() { @Test void match_invalid_parameter() { - FunctionExpression expr = DSL.match( - DSL.namedArgument("field", - new ReferenceExpression("message", OpenSearchTextType.of())), - DSL.namedArgument("query", literal("search query")), - DSL.namedArgument("invalid_parameter", literal("invalid_value"))); + FunctionExpression expr = + DSL.match( + DSL.namedArgument("field", new ReferenceExpression("message", OpenSearchTextType.of())), + DSL.namedArgument("query", literal("search query")), + DSL.namedArgument("invalid_parameter", literal("invalid_value"))); var msg = assertThrows(SemanticCheckException.class, () -> buildQuery(expr)).getMessage(); assertTrue(msg.startsWith("Parameter invalid_parameter is invalid for match function.")); } @Test void match_disallow_duplicate_parameter() { - FunctionExpression expr = DSL.match( - DSL.namedArgument("field", literal("message")), - DSL.namedArgument("query", literal("search query")), - DSL.namedArgument("analyzer", literal("keyword")), - DSL.namedArgument("AnalYzer", literal("english"))); + FunctionExpression expr = + DSL.match( + DSL.namedArgument("field", literal("message")), + DSL.namedArgument("query", literal("search query")), + DSL.namedArgument("analyzer", literal("keyword")), + DSL.namedArgument("AnalYzer", literal("english"))); var msg = assertThrows(SemanticCheckException.class, () -> buildQuery(expr)).getMessage(); assertEquals("Parameter 'analyzer' can only be specified once.", msg); } @Test void match_disallow_duplicate_query() { - FunctionExpression expr = DSL.match( - DSL.namedArgument("field", literal("message")), - DSL.namedArgument("query", literal("search query")), - DSL.namedArgument("analyzer", literal("keyword")), - DSL.namedArgument("QUERY", literal("something"))); + FunctionExpression expr = + DSL.match( + DSL.namedArgument("field", literal("message")), + DSL.namedArgument("query", literal("search query")), + DSL.namedArgument("analyzer", literal("keyword")), + DSL.namedArgument("QUERY", literal("something"))); var msg = assertThrows(SemanticCheckException.class, () -> buildQuery(expr)).getMessage(); assertEquals("Parameter 'query' can only be specified once.", msg); } @Test void match_disallow_duplicate_field() { - FunctionExpression expr = DSL.match( - DSL.namedArgument("field", literal("message")), - DSL.namedArgument("query", literal("search query")), - DSL.namedArgument("analyzer", literal("keyword")), - DSL.namedArgument("Field", literal("something"))); + FunctionExpression expr = + DSL.match( + DSL.namedArgument("field", literal("message")), + DSL.namedArgument("query", literal("search query")), + DSL.namedArgument("analyzer", literal("keyword")), + DSL.namedArgument("Field", literal("something"))); var msg = assertThrows(SemanticCheckException.class, () -> buildQuery(expr)).getMessage(); assertEquals("Parameter 'field' can only be specified once.", msg); } @Test void match_missing_field() { - FunctionExpression expr = DSL.match( - DSL.namedArgument("query", literal("search query")), - DSL.namedArgument("analyzer", literal("keyword"))); + FunctionExpression expr = + DSL.match( + DSL.namedArgument("query", literal("search query")), + DSL.namedArgument("analyzer", literal("keyword"))); var msg = assertThrows(SemanticCheckException.class, () -> buildQuery(expr)).getMessage(); assertEquals("'field' parameter is missing.", msg); } @Test void match_missing_query() { - FunctionExpression expr = DSL.match( + FunctionExpression expr = + DSL.match( DSL.namedArgument("field", literal("field1")), DSL.namedArgument("analyzer", literal("keyword"))); var msg = assertThrows(SemanticCheckException.class, () -> buildQuery(expr)).getMessage(); @@ -556,7 +570,7 @@ void match_missing_query() { @Test void should_build_match_phrase_query_with_default_parameters() { assertJsonEquals( - "{\n" + "{\n" + " \"match_phrase\" : {\n" + " \"message\" : {\n" + " \"query\" : \"search query\",\n" @@ -568,14 +582,15 @@ void should_build_match_phrase_query_with_default_parameters() { + "}", buildQuery( DSL.match_phrase( - DSL.namedArgument("field", - new ReferenceExpression("message", OpenSearchTextType.of())), + DSL.namedArgument( + "field", new ReferenceExpression("message", OpenSearchTextType.of())), DSL.namedArgument("query", literal("search query"))))); } @Test void should_build_multi_match_query_with_default_parameters_single_field() { - assertJsonEquals("{\n" + assertJsonEquals( + "{\n" + " \"multi_match\" : {\n" + " \"query\" : \"search query\",\n" + " \"fields\" : [\n" @@ -592,16 +607,21 @@ void should_build_multi_match_query_with_default_parameters_single_field() { + " \"boost\" : 1.0,\n" + " }\n" + "}", - buildQuery(DSL.multi_match( - DSL.namedArgument("fields", DSL.literal(new ExprTupleValue( - new LinkedHashMap<>(ImmutableMap.of( - "field1", ExprValueUtils.floatValue(1.F)))))), - DSL.namedArgument("query", literal("search query"))))); + buildQuery( + DSL.multi_match( + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of("field1", ExprValueUtils.floatValue(1.F)))))), + DSL.namedArgument("query", literal("search query"))))); } @Test void should_build_multi_match_query_with_default_parameters_all_fields() { - assertJsonEquals("{\n" + assertJsonEquals( + "{\n" + " \"multi_match\" : {\n" + " \"query\" : \"search query\",\n" + " \"fields\" : [\n" @@ -618,16 +638,21 @@ void should_build_multi_match_query_with_default_parameters_all_fields() { + " \"boost\" : 1.0,\n" + " }\n" + "}", - buildQuery(DSL.multi_match( - DSL.namedArgument("fields", DSL.literal(new ExprTupleValue( - new LinkedHashMap<>(ImmutableMap.of( - "*", ExprValueUtils.floatValue(1.F)))))), - DSL.namedArgument("query", literal("search query"))))); + buildQuery( + DSL.multi_match( + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of("*", ExprValueUtils.floatValue(1.F)))))), + DSL.namedArgument("query", literal("search query"))))); } @Test void should_build_multi_match_query_with_default_parameters_no_fields() { - assertJsonEquals("{\n" + assertJsonEquals( + "{\n" + " \"multi_match\" : {\n" + " \"query\" : \"search query\",\n" + " \"fields\" : [],\n" @@ -642,17 +667,20 @@ void should_build_multi_match_query_with_default_parameters_no_fields() { + " \"boost\" : 1.0,\n" + " }\n" + "}", - buildQuery(DSL.multi_match( - DSL.namedArgument("fields", DSL.literal(new ExprTupleValue( - new LinkedHashMap<>(ImmutableMap.of())))), - DSL.namedArgument("query", literal("search query"))))); + buildQuery( + DSL.multi_match( + DSL.namedArgument( + "fields", + DSL.literal(new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of())))), + DSL.namedArgument("query", literal("search query"))))); } // Note: we can't test `multi_match` and `simple_query_string` without weight(s) @Test void should_build_multi_match_query_with_default_parameters_multiple_fields() { - var expected = "{\n" + var expected = + "{\n" + " \"multi_match\" : {\n" + " \"query\" : \"search query\",\n" + " \"fields\" : [%s],\n" @@ -667,23 +695,31 @@ void should_build_multi_match_query_with_default_parameters_multiple_fields() { + " \"boost\" : 1.0,\n" + " }\n" + "}"; - var actual = buildQuery(DSL.multi_match( - DSL.namedArgument("fields", DSL.literal(new ExprTupleValue( - new LinkedHashMap<>(ImmutableMap.of( - "field1", ExprValueUtils.floatValue(1.F), - "field2", ExprValueUtils.floatValue(.3F)))))), - DSL.namedArgument("query", literal("search query")))); + var actual = + buildQuery( + DSL.multi_match( + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of( + "field1", ExprValueUtils.floatValue(1.F), + "field2", ExprValueUtils.floatValue(.3F)))))), + DSL.namedArgument("query", literal("search query")))); var ex1 = String.format(expected, "\"field1^1.0\", \"field2^0.3\""); var ex2 = String.format(expected, "\"field2^0.3\", \"field1^1.0\""); - assertTrue(new JSONObject(ex1).similar(new JSONObject(actual)) - || new JSONObject(ex2).similar(new JSONObject(actual)), + assertTrue( + new JSONObject(ex1).similar(new JSONObject(actual)) + || new JSONObject(ex2).similar(new JSONObject(actual)), StringUtils.format("Actual %s doesn't match neither expected %s nor %s", actual, ex1, ex2)); } @Test void should_build_multi_match_query_with_custom_parameters() { - var expected = "{\n" + var expected = + "{\n" + " \"multi_match\" : {\n" + " \"query\" : \"search query\",\n" + " \"fields\" : [%s],\n" @@ -704,10 +740,13 @@ void should_build_multi_match_query_with_custom_parameters() { + " \"boost\" : 2.0\n" + " }\n" + "}"; - var actual = buildQuery( + var actual = + buildQuery( DSL.multi_match( - DSL.namedArgument("fields", DSL.literal( - ExprValueUtils.tupleValue(ImmutableMap.of("field1", 1.F, "field2", .3F)))), + DSL.namedArgument( + "fields", + DSL.literal( + ExprValueUtils.tupleValue(ImmutableMap.of("field1", 1.F, "field2", .3F)))), DSL.namedArgument("query", literal("search query")), DSL.namedArgument("analyzer", literal("keyword")), DSL.namedArgument("auto_generate_synonyms_phrase_query", literal("false")), @@ -727,28 +766,36 @@ void should_build_multi_match_query_with_custom_parameters() { var ex1 = String.format(expected, "\"field1^1.0\", \"field2^0.3\""); var ex2 = String.format(expected, "\"field2^0.3\", \"field1^1.0\""); - assertTrue(new JSONObject(ex1).similar(new JSONObject(actual)) - || new JSONObject(ex2).similar(new JSONObject(actual)), + assertTrue( + new JSONObject(ex1).similar(new JSONObject(actual)) + || new JSONObject(ex2).similar(new JSONObject(actual)), StringUtils.format("Actual %s doesn't match neither expected %s nor %s", actual, ex1, ex2)); } @Test void multi_match_invalid_parameter() { - FunctionExpression expr = DSL.multi_match( - DSL.namedArgument("fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "field1", ExprValueUtils.floatValue(1.F), - "field2", ExprValueUtils.floatValue(.3F)))))), - DSL.namedArgument("query", literal("search query")), - DSL.namedArgument("invalid_parameter", literal("invalid_value"))); - assertThrows(SemanticCheckException.class, () -> buildQuery(expr), + FunctionExpression expr = + DSL.multi_match( + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of( + "field1", ExprValueUtils.floatValue(1.F), + "field2", ExprValueUtils.floatValue(.3F)))))), + DSL.namedArgument("query", literal("search query")), + DSL.namedArgument("invalid_parameter", literal("invalid_value"))); + assertThrows( + SemanticCheckException.class, + () -> buildQuery(expr), "Parameter invalid_parameter is invalid for match function."); } @Test void should_build_match_phrase_query_with_custom_parameters() { assertJsonEquals( - "{\n" + "{\n" + " \"match_phrase\" : {\n" + " \"message\" : {\n" + " \"query\" : \"search query\",\n" @@ -761,8 +808,8 @@ void should_build_match_phrase_query_with_custom_parameters() { + "}", buildQuery( DSL.match_phrase( - DSL.namedArgument("field", - new ReferenceExpression("message", OpenSearchTextType.of())), + DSL.namedArgument( + "field", new ReferenceExpression("message", OpenSearchTextType.of())), DSL.namedArgument("boost", literal("1.2")), DSL.namedArgument("query", literal("search query")), DSL.namedArgument("analyzer", literal("keyword")), @@ -772,150 +819,171 @@ void should_build_match_phrase_query_with_custom_parameters() { @Test void wildcard_query_invalid_parameter() { - FunctionExpression expr = DSL.wildcard_query( - DSL.namedArgument("field", - new ReferenceExpression("field", OpenSearchTextType.of())), - DSL.namedArgument("query", literal("search query*")), - DSL.namedArgument("invalid_parameter", literal("invalid_value"))); - assertThrows(SemanticCheckException.class, () -> buildQuery(expr), + FunctionExpression expr = + DSL.wildcard_query( + DSL.namedArgument("field", new ReferenceExpression("field", OpenSearchTextType.of())), + DSL.namedArgument("query", literal("search query*")), + DSL.namedArgument("invalid_parameter", literal("invalid_value"))); + assertThrows( + SemanticCheckException.class, + () -> buildQuery(expr), "Parameter invalid_parameter is invalid for wildcard_query function."); } @Test void wildcard_query_convert_sql_wildcard_to_lucene() { // Test conversion of % wildcard to * - assertJsonEquals("{\n" - + " \"wildcard\" : {\n" - + " \"field\" : {\n" - + " \"wildcard\" : \"search query*\",\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}", - buildQuery(DSL.wildcard_query( - DSL.namedArgument("field", - new ReferenceExpression("field", OpenSearchTextType.of())), - DSL.namedArgument("query", literal("search query%"))))); - - assertJsonEquals("{\n" - + " \"wildcard\" : {\n" - + " \"field\" : {\n" - + " \"wildcard\" : \"search query?\",\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}", - buildQuery(DSL.wildcard_query( - DSL.namedArgument("field", - new ReferenceExpression("field", OpenSearchTextType.of())), - DSL.namedArgument("query", literal("search query_"))))); + assertJsonEquals( + "{\n" + + " \"wildcard\" : {\n" + + " \"field\" : {\n" + + " \"wildcard\" : \"search query*\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}", + buildQuery( + DSL.wildcard_query( + DSL.namedArgument( + "field", new ReferenceExpression("field", OpenSearchTextType.of())), + DSL.namedArgument("query", literal("search query%"))))); + + assertJsonEquals( + "{\n" + + " \"wildcard\" : {\n" + + " \"field\" : {\n" + + " \"wildcard\" : \"search query?\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}", + buildQuery( + DSL.wildcard_query( + DSL.namedArgument( + "field", new ReferenceExpression("field", OpenSearchTextType.of())), + DSL.namedArgument("query", literal("search query_"))))); } @Test void wildcard_query_escape_wildcards_characters() { - assertJsonEquals("{\n" - + " \"wildcard\" : {\n" - + " \"field\" : {\n" - + " \"wildcard\" : \"search query%\",\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}", - buildQuery(DSL.wildcard_query( - DSL.namedArgument("field", - new ReferenceExpression("field", OpenSearchTextType.of())), - DSL.namedArgument("query", literal("search query\\%"))))); - - assertJsonEquals("{\n" - + " \"wildcard\" : {\n" - + " \"field\" : {\n" - + " \"wildcard\" : \"search query_\",\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}", - buildQuery(DSL.wildcard_query( - DSL.namedArgument("field", - new ReferenceExpression("field", OpenSearchTextType.of())), - DSL.namedArgument("query", literal("search query\\_"))))); - - assertJsonEquals("{\n" - + " \"wildcard\" : {\n" - + " \"field\" : {\n" - + " \"wildcard\" : \"search query\\\\*\",\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}", - buildQuery(DSL.wildcard_query( - DSL.namedArgument("field", - new ReferenceExpression("field", OpenSearchTextType.of())), - DSL.namedArgument("query", literal("search query\\*"))))); - - assertJsonEquals("{\n" - + " \"wildcard\" : {\n" - + " \"field\" : {\n" - + " \"wildcard\" : \"search query\\\\?\",\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}", - buildQuery(DSL.wildcard_query( - DSL.namedArgument("field", - new ReferenceExpression("field", OpenSearchTextType.of())), - DSL.namedArgument("query", literal("search query\\?"))))); + assertJsonEquals( + "{\n" + + " \"wildcard\" : {\n" + + " \"field\" : {\n" + + " \"wildcard\" : \"search query%\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}", + buildQuery( + DSL.wildcard_query( + DSL.namedArgument( + "field", new ReferenceExpression("field", OpenSearchTextType.of())), + DSL.namedArgument("query", literal("search query\\%"))))); + + assertJsonEquals( + "{\n" + + " \"wildcard\" : {\n" + + " \"field\" : {\n" + + " \"wildcard\" : \"search query_\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}", + buildQuery( + DSL.wildcard_query( + DSL.namedArgument( + "field", new ReferenceExpression("field", OpenSearchTextType.of())), + DSL.namedArgument("query", literal("search query\\_"))))); + + assertJsonEquals( + "{\n" + + " \"wildcard\" : {\n" + + " \"field\" : {\n" + + " \"wildcard\" : \"search query\\\\*\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}", + buildQuery( + DSL.wildcard_query( + DSL.namedArgument( + "field", new ReferenceExpression("field", OpenSearchTextType.of())), + DSL.namedArgument("query", literal("search query\\*"))))); + + assertJsonEquals( + "{\n" + + " \"wildcard\" : {\n" + + " \"field\" : {\n" + + " \"wildcard\" : \"search query\\\\?\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}", + buildQuery( + DSL.wildcard_query( + DSL.namedArgument( + "field", new ReferenceExpression("field", OpenSearchTextType.of())), + DSL.namedArgument("query", literal("search query\\?"))))); } @Test void should_build_wildcard_query_with_default_parameters() { - assertJsonEquals("{\n" - + " \"wildcard\" : {\n" - + " \"field\" : {\n" - + " \"wildcard\" : \"search query*\",\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}", - buildQuery(DSL.wildcard_query( - DSL.namedArgument("field", - new ReferenceExpression("field", OpenSearchTextType.of())), - DSL.namedArgument("query", literal("search query*"))))); + assertJsonEquals( + "{\n" + + " \"wildcard\" : {\n" + + " \"field\" : {\n" + + " \"wildcard\" : \"search query*\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}", + buildQuery( + DSL.wildcard_query( + DSL.namedArgument( + "field", new ReferenceExpression("field", OpenSearchTextType.of())), + DSL.namedArgument("query", literal("search query*"))))); } @Test void should_build_wildcard_query_query_with_custom_parameters() { - assertJsonEquals("{\n" - + " \"wildcard\" : {\n" - + " \"field\" : {\n" - + " \"wildcard\" : \"search query*\",\n" - + " \"boost\" : 0.6,\n" - + " \"case_insensitive\" : true,\n" - + " \"rewrite\" : \"constant_score_boolean\"\n" - + " }\n" - + " }\n" - + "}", - buildQuery(DSL.wildcard_query( - DSL.namedArgument("field", - new ReferenceExpression("field", OpenSearchTextType.of())), - DSL.namedArgument("query", literal("search query*")), - DSL.namedArgument("boost", literal("0.6")), - DSL.namedArgument("case_insensitive", literal("true")), - DSL.namedArgument("rewrite", literal("constant_score_boolean"))))); + assertJsonEquals( + "{\n" + + " \"wildcard\" : {\n" + + " \"field\" : {\n" + + " \"wildcard\" : \"search query*\",\n" + + " \"boost\" : 0.6,\n" + + " \"case_insensitive\" : true,\n" + + " \"rewrite\" : \"constant_score_boolean\"\n" + + " }\n" + + " }\n" + + "}", + buildQuery( + DSL.wildcard_query( + DSL.namedArgument( + "field", new ReferenceExpression("field", OpenSearchTextType.of())), + DSL.namedArgument("query", literal("search query*")), + DSL.namedArgument("boost", literal("0.6")), + DSL.namedArgument("case_insensitive", literal("true")), + DSL.namedArgument("rewrite", literal("constant_score_boolean"))))); } @Test void query_invalid_parameter() { - FunctionExpression expr = DSL.query( - DSL.namedArgument("invalid_parameter", literal("invalid_value"))); - assertThrows(SemanticCheckException.class, () -> buildQuery(expr), - "Parameter invalid_parameter is invalid for query function."); + FunctionExpression expr = + DSL.query(DSL.namedArgument("invalid_parameter", literal("invalid_value"))); + assertThrows( + SemanticCheckException.class, + () -> buildQuery(expr), + "Parameter invalid_parameter is invalid for query function."); } @Test void query_invalid_fields_parameter_exception_message() { - FunctionExpression expr = DSL.query( - DSL.namedArgument("fields", literal("field1")), - DSL.namedArgument("query", literal("search query"))); + FunctionExpression expr = + DSL.query( + DSL.namedArgument("fields", literal("field1")), + DSL.namedArgument("query", literal("search query"))); var exception = assertThrows(SemanticCheckException.class, () -> buildQuery(expr)); assertEquals("Parameter fields is invalid for query function.", exception.getMessage()); @@ -923,7 +991,8 @@ void query_invalid_fields_parameter_exception_message() { @Test void should_build_query_query_with_default_parameters() { - var expected = "{\n" + var expected = + "{\n" + " \"query_string\" : {\n" + " \"query\" : \"field1:query_value\",\n" + " \"fields\" : [],\n" @@ -942,13 +1011,14 @@ void should_build_query_query_with_default_parameters() { + " }\n" + "}"; - assertJsonEquals(expected, buildQuery(DSL.query( - DSL.namedArgument("query", literal("field1:query_value"))))); + assertJsonEquals( + expected, buildQuery(DSL.query(DSL.namedArgument("query", literal("field1:query_value"))))); } @Test void should_build_query_query_with_custom_parameters() { - var expected = "{\n" + var expected = + "{\n" + " \"query_string\" : {\n" + " \"query\" : \"field1:query_value\",\n" + " \"fields\" : [],\n" @@ -971,125 +1041,147 @@ void should_build_query_query_with_custom_parameters() { + " \"boost\" : 2.0,\n" + " }\n" + "}"; - var actual = buildQuery( + var actual = + buildQuery( DSL.query( - DSL.namedArgument("query", literal("field1:query_value")), - DSL.namedArgument("analyze_wildcard", literal("true")), - DSL.namedArgument("analyzer", literal("keyword")), - DSL.namedArgument("auto_generate_synonyms_phrase_query", literal("false")), - DSL.namedArgument("default_operator", literal("AND")), - DSL.namedArgument("fuzzy_max_expansions", literal("10")), - DSL.namedArgument("fuzzy_prefix_length", literal("2")), - DSL.namedArgument("fuzzy_transpositions", literal("false")), - DSL.namedArgument("lenient", literal("false")), - DSL.namedArgument("minimum_should_match", literal("3")), - DSL.namedArgument("tie_breaker", literal("1.3")), - DSL.namedArgument("type", literal("cross_fields")), - DSL.namedArgument("boost", literal("2.0")))); + DSL.namedArgument("query", literal("field1:query_value")), + DSL.namedArgument("analyze_wildcard", literal("true")), + DSL.namedArgument("analyzer", literal("keyword")), + DSL.namedArgument("auto_generate_synonyms_phrase_query", literal("false")), + DSL.namedArgument("default_operator", literal("AND")), + DSL.namedArgument("fuzzy_max_expansions", literal("10")), + DSL.namedArgument("fuzzy_prefix_length", literal("2")), + DSL.namedArgument("fuzzy_transpositions", literal("false")), + DSL.namedArgument("lenient", literal("false")), + DSL.namedArgument("minimum_should_match", literal("3")), + DSL.namedArgument("tie_breaker", literal("1.3")), + DSL.namedArgument("type", literal("cross_fields")), + DSL.namedArgument("boost", literal("2.0")))); assertJsonEquals(expected, actual); } @Test void query_string_invalid_parameter() { - FunctionExpression expr = DSL.query_string( - DSL.namedArgument("fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "field1", ExprValueUtils.floatValue(1.F), - "field2", ExprValueUtils.floatValue(.3F)))))), - DSL.namedArgument("query", literal("search query")), - DSL.namedArgument("invalid_parameter", literal("invalid_value"))); - assertThrows(SemanticCheckException.class, () -> buildQuery(expr), + FunctionExpression expr = + DSL.query_string( + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of( + "field1", ExprValueUtils.floatValue(1.F), + "field2", ExprValueUtils.floatValue(.3F)))))), + DSL.namedArgument("query", literal("search query")), + DSL.namedArgument("invalid_parameter", literal("invalid_value"))); + assertThrows( + SemanticCheckException.class, + () -> buildQuery(expr), "Parameter invalid_parameter is invalid for match function."); } @Test void should_build_query_string_query_with_default_parameters_multiple_fields() { - var expected = "{\n" - + " \"query_string\" : {\n" - + " \"query\" : \"query_value\",\n" - + " \"fields\" : [%s],\n" - + " \"type\" : \"best_fields\",\n" - + " \"default_operator\" : \"or\",\n" - + " \"max_determinized_states\" : 10000,\n" - + " \"enable_position_increments\" : true,\n" - + " \"fuzziness\" : \"AUTO\",\n" - + " \"fuzzy_prefix_length\" : 0,\n" - + " \"fuzzy_max_expansions\" : 50,\n" - + " \"phrase_slop\" : 0,\n" - + " \"escape\" : false,\n" - + " \"auto_generate_synonyms_phrase_query\" : true,\n" - + " \"fuzzy_transpositions\" : true,\n" - + " \"boost\" : 1.0\n" - + " }\n" - + "}"; - var actual = buildQuery(DSL.query_string( - DSL.namedArgument("fields", DSL.literal(new ExprTupleValue( - new LinkedHashMap<>(ImmutableMap.of( - "field1", ExprValueUtils.floatValue(1.F), - "field2", ExprValueUtils.floatValue(.3F)))))), - DSL.namedArgument("query", literal("query_value")))); + var expected = + "{\n" + + " \"query_string\" : {\n" + + " \"query\" : \"query_value\",\n" + + " \"fields\" : [%s],\n" + + " \"type\" : \"best_fields\",\n" + + " \"default_operator\" : \"or\",\n" + + " \"max_determinized_states\" : 10000,\n" + + " \"enable_position_increments\" : true,\n" + + " \"fuzziness\" : \"AUTO\",\n" + + " \"fuzzy_prefix_length\" : 0,\n" + + " \"fuzzy_max_expansions\" : 50,\n" + + " \"phrase_slop\" : 0,\n" + + " \"escape\" : false,\n" + + " \"auto_generate_synonyms_phrase_query\" : true,\n" + + " \"fuzzy_transpositions\" : true,\n" + + " \"boost\" : 1.0\n" + + " }\n" + + "}"; + var actual = + buildQuery( + DSL.query_string( + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of( + "field1", ExprValueUtils.floatValue(1.F), + "field2", ExprValueUtils.floatValue(.3F)))))), + DSL.namedArgument("query", literal("query_value")))); var ex1 = String.format(expected, "\"field1^1.0\", \"field2^0.3\""); var ex2 = String.format(expected, "\"field2^0.3\", \"field1^1.0\""); - assertTrue(new JSONObject(ex1).similar(new JSONObject(actual)) + assertTrue( + new JSONObject(ex1).similar(new JSONObject(actual)) || new JSONObject(ex2).similar(new JSONObject(actual)), StringUtils.format("Actual %s doesn't match neither expected %s nor %s", actual, ex1, ex2)); } @Test void should_build_query_string_query_with_custom_parameters() { - var expected = "{\n" - + " \"query_string\" : {\n" - + " \"query\" : \"query_value\",\n" - + " \"fields\" : [%s],\n" - + " \"type\" : \"cross_fields\",\n" - + " \"tie_breaker\" : 1.3,\n" - + " \"default_operator\" : \"and\",\n" - + " \"analyzer\" : \"keyword\",\n" - + " \"max_determinized_states\" : 10000,\n" - + " \"enable_position_increments\" : true,\n" - + " \"fuzziness\" : \"AUTO\",\n" - + " \"fuzzy_prefix_length\" : 2,\n" - + " \"fuzzy_max_expansions\" : 10,\n" - + " \"phrase_slop\" : 0,\n" - + " \"analyze_wildcard\" : true,\n" - + " \"minimum_should_match\" : \"3\",\n" - + " \"lenient\" : false,\n" - + " \"escape\" : false,\n" - + " \"auto_generate_synonyms_phrase_query\" : false,\n" - + " \"fuzzy_transpositions\" : false,\n" - + " \"boost\" : 2.0,\n" - + " }\n" - + "}"; - var actual = buildQuery( - DSL.query_string( - DSL.namedArgument("fields", DSL.literal( - ExprValueUtils.tupleValue(ImmutableMap.of("field1", 1.F, "field2", .3F)))), - DSL.namedArgument("query", literal("query_value")), - DSL.namedArgument("analyze_wildcard", literal("true")), - DSL.namedArgument("analyzer", literal("keyword")), - DSL.namedArgument("auto_generate_synonyms_phrase_query", literal("false")), - DSL.namedArgument("default_operator", literal("AND")), - DSL.namedArgument("fuzzy_max_expansions", literal("10")), - DSL.namedArgument("fuzzy_prefix_length", literal("2")), - DSL.namedArgument("fuzzy_transpositions", literal("false")), - DSL.namedArgument("lenient", literal("false")), - DSL.namedArgument("minimum_should_match", literal("3")), - DSL.namedArgument("tie_breaker", literal("1.3")), - DSL.namedArgument("type", literal("cross_fields")), - DSL.namedArgument("boost", literal("2.0")))); + var expected = + "{\n" + + " \"query_string\" : {\n" + + " \"query\" : \"query_value\",\n" + + " \"fields\" : [%s],\n" + + " \"type\" : \"cross_fields\",\n" + + " \"tie_breaker\" : 1.3,\n" + + " \"default_operator\" : \"and\",\n" + + " \"analyzer\" : \"keyword\",\n" + + " \"max_determinized_states\" : 10000,\n" + + " \"enable_position_increments\" : true,\n" + + " \"fuzziness\" : \"AUTO\",\n" + + " \"fuzzy_prefix_length\" : 2,\n" + + " \"fuzzy_max_expansions\" : 10,\n" + + " \"phrase_slop\" : 0,\n" + + " \"analyze_wildcard\" : true,\n" + + " \"minimum_should_match\" : \"3\",\n" + + " \"lenient\" : false,\n" + + " \"escape\" : false,\n" + + " \"auto_generate_synonyms_phrase_query\" : false,\n" + + " \"fuzzy_transpositions\" : false,\n" + + " \"boost\" : 2.0,\n" + + " }\n" + + "}"; + var actual = + buildQuery( + DSL.query_string( + DSL.namedArgument( + "fields", + DSL.literal( + ExprValueUtils.tupleValue(ImmutableMap.of("field1", 1.F, "field2", .3F)))), + DSL.namedArgument("query", literal("query_value")), + DSL.namedArgument("analyze_wildcard", literal("true")), + DSL.namedArgument("analyzer", literal("keyword")), + DSL.namedArgument("auto_generate_synonyms_phrase_query", literal("false")), + DSL.namedArgument("default_operator", literal("AND")), + DSL.namedArgument("fuzzy_max_expansions", literal("10")), + DSL.namedArgument("fuzzy_prefix_length", literal("2")), + DSL.namedArgument("fuzzy_transpositions", literal("false")), + DSL.namedArgument("lenient", literal("false")), + DSL.namedArgument("minimum_should_match", literal("3")), + DSL.namedArgument("tie_breaker", literal("1.3")), + DSL.namedArgument("type", literal("cross_fields")), + DSL.namedArgument("boost", literal("2.0")))); var ex1 = String.format(expected, "\"field1^1.0\", \"field2^0.3\""); var ex2 = String.format(expected, "\"field2^0.3\", \"field1^1.0\""); - assertTrue(new JSONObject(ex1).similar(new JSONObject(actual)) + assertTrue( + new JSONObject(ex1).similar(new JSONObject(actual)) || new JSONObject(ex2).similar(new JSONObject(actual)), StringUtils.format("Actual %s doesn't match neither expected %s nor %s", actual, ex1, ex2)); } @Test void should_build_query_string_query_with_default_parameters_single_field() { - assertJsonEquals("{\n" + assertJsonEquals( + "{\n" + " \"query_string\" : {\n" + " \"query\" : \"query_value\",\n" + " \"fields\" : [\n" @@ -1109,11 +1201,15 @@ void should_build_query_string_query_with_default_parameters_single_field() { + " \"boost\" : 1.0,\n" + " }\n" + "}", - buildQuery(DSL.query_string( - DSL.namedArgument("fields", DSL.literal(new ExprTupleValue( - new LinkedHashMap<>(ImmutableMap.of( - "field1", ExprValueUtils.floatValue(1.F)))))), - DSL.namedArgument("query", literal("query_value"))))); + buildQuery( + DSL.query_string( + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of("field1", ExprValueUtils.floatValue(1.F)))))), + DSL.namedArgument("query", literal("query_value"))))); } @Test @@ -1122,7 +1218,8 @@ void should_build_query_string_query_with_default_parameters_single_field() { // 2) `flags` are printed by OpenSearch as an integer // 3) `minimum_should_match` printed as a string void should_build_simple_query_string_query_with_default_parameters_single_field() { - assertJsonEquals("{\n" + assertJsonEquals( + "{\n" + " \"simple_query_string\" : {\n" + " \"query\" : \"search query\",\n" + " \"fields\" : [\n" @@ -1138,16 +1235,21 @@ void should_build_simple_query_string_query_with_default_parameters_single_field + " \"boost\" : 1.0\n" + " }\n" + "}", - buildQuery(DSL.simple_query_string( - DSL.namedArgument("fields", DSL.literal(new ExprTupleValue( - new LinkedHashMap<>(ImmutableMap.of( - "field1", ExprValueUtils.floatValue(1.F)))))), - DSL.namedArgument("query", literal("search query"))))); + buildQuery( + DSL.simple_query_string( + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of("field1", ExprValueUtils.floatValue(1.F)))))), + DSL.namedArgument("query", literal("search query"))))); } @Test void should_build_simple_query_string_query_with_default_parameters_multiple_fields() { - var expected = "{\n" + var expected = + "{\n" + " \"simple_query_string\" : {\n" + " \"query\" : \"search query\",\n" + " \"fields\" : [%s],\n" @@ -1161,23 +1263,31 @@ void should_build_simple_query_string_query_with_default_parameters_multiple_fie + " \"boost\" : 1.0\n" + " }\n" + "}"; - var actual = buildQuery(DSL.simple_query_string( - DSL.namedArgument("fields", DSL.literal(new ExprTupleValue( - new LinkedHashMap<>(ImmutableMap.of( - "field1", ExprValueUtils.floatValue(1.F), - "field2", ExprValueUtils.floatValue(.3F)))))), - DSL.namedArgument("query", literal("search query")))); + var actual = + buildQuery( + DSL.simple_query_string( + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of( + "field1", ExprValueUtils.floatValue(1.F), + "field2", ExprValueUtils.floatValue(.3F)))))), + DSL.namedArgument("query", literal("search query")))); var ex1 = String.format(expected, "\"field1^1.0\", \"field2^0.3\""); var ex2 = String.format(expected, "\"field2^0.3\", \"field1^1.0\""); - assertTrue(new JSONObject(ex1).similar(new JSONObject(actual)) - || new JSONObject(ex2).similar(new JSONObject(actual)), + assertTrue( + new JSONObject(ex1).similar(new JSONObject(actual)) + || new JSONObject(ex2).similar(new JSONObject(actual)), StringUtils.format("Actual %s doesn't match neither expected %s nor %s", actual, ex1, ex2)); } @Test void should_build_simple_query_string_query_with_custom_parameters() { - var expected = "{\n" + var expected = + "{\n" + " \"simple_query_string\" : {\n" + " \"query\" : \"search query\",\n" + " \"fields\" : [%s],\n" @@ -1194,10 +1304,13 @@ void should_build_simple_query_string_query_with_custom_parameters() { + " \"boost\" : 2.0\n" + " }\n" + "}"; - var actual = buildQuery( + var actual = + buildQuery( DSL.simple_query_string( - DSL.namedArgument("fields", DSL.literal( - ExprValueUtils.tupleValue(ImmutableMap.of("field1", 1.F, "field2", .3F)))), + DSL.namedArgument( + "fields", + DSL.literal( + ExprValueUtils.tupleValue(ImmutableMap.of("field1", 1.F, "field2", .3F)))), DSL.namedArgument("query", literal("search query")), DSL.namedArgument("analyze_wildcard", literal("true")), DSL.namedArgument("analyzer", literal("keyword")), @@ -1213,95 +1326,105 @@ void should_build_simple_query_string_query_with_custom_parameters() { var ex1 = String.format(expected, "\"field1^1.0\", \"field2^0.3\""); var ex2 = String.format(expected, "\"field2^0.3\", \"field1^1.0\""); - assertTrue(new JSONObject(ex1).similar(new JSONObject(actual)) - || new JSONObject(ex2).similar(new JSONObject(actual)), + assertTrue( + new JSONObject(ex1).similar(new JSONObject(actual)) + || new JSONObject(ex2).similar(new JSONObject(actual)), StringUtils.format("Actual %s doesn't match neither expected %s nor %s", actual, ex1, ex2)); } @Test void simple_query_string_invalid_parameter() { - FunctionExpression expr = DSL.simple_query_string( - DSL.namedArgument("fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "field1", ExprValueUtils.floatValue(1.F), - "field2", ExprValueUtils.floatValue(.3F)))))), - DSL.namedArgument("query", literal("search query")), - DSL.namedArgument("invalid_parameter", literal("invalid_value"))); - assertThrows(SemanticCheckException.class, () -> buildQuery(expr), + FunctionExpression expr = + DSL.simple_query_string( + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of( + "field1", ExprValueUtils.floatValue(1.F), + "field2", ExprValueUtils.floatValue(.3F)))))), + DSL.namedArgument("query", literal("search query")), + DSL.namedArgument("invalid_parameter", literal("invalid_value"))); + assertThrows( + SemanticCheckException.class, + () -> buildQuery(expr), "Parameter invalid_parameter is invalid for match function."); } @Test void match_phrase_invalid_parameter() { - FunctionExpression expr = DSL.match_phrase( - DSL.namedArgument("field", - new ReferenceExpression("message", OpenSearchTextType.of())), - DSL.namedArgument("query", literal("search query")), - DSL.namedArgument("invalid_parameter", literal("invalid_value"))); + FunctionExpression expr = + DSL.match_phrase( + DSL.namedArgument("field", new ReferenceExpression("message", OpenSearchTextType.of())), + DSL.namedArgument("query", literal("search query")), + DSL.namedArgument("invalid_parameter", literal("invalid_value"))); var msg = assertThrows(SemanticCheckException.class, () -> buildQuery(expr)).getMessage(); assertTrue(msg.startsWith("Parameter invalid_parameter is invalid for match_phrase function.")); } @Test void relevancy_func_invalid_arg_values() { - final var field = DSL.namedArgument("field", - new ReferenceExpression("message", OpenSearchTextType.of())); - final var fields = DSL.namedArgument("fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "field1", ExprValueUtils.floatValue(1.F), - "field2", ExprValueUtils.floatValue(.3F)))))); + final var field = + DSL.namedArgument("field", new ReferenceExpression("message", OpenSearchTextType.of())); + final var fields = + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of( + "field1", ExprValueUtils.floatValue(1.F), + "field2", ExprValueUtils.floatValue(.3F)))))); final var query = DSL.namedArgument("query", literal("search query")); - var slopTest = DSL.match_phrase(field, query, - DSL.namedArgument("slop", literal("1.5"))); + var slopTest = DSL.match_phrase(field, query, DSL.namedArgument("slop", literal("1.5"))); var msg = assertThrows(RuntimeException.class, () -> buildQuery(slopTest)).getMessage(); assertEquals("Invalid slop value: '1.5'. Accepts only integer values.", msg); - var ztqTest = DSL.match_phrase(field, query, - DSL.namedArgument("zero_terms_query", literal("meow"))); + var ztqTest = + DSL.match_phrase(field, query, DSL.namedArgument("zero_terms_query", literal("meow"))); msg = assertThrows(RuntimeException.class, () -> buildQuery(ztqTest)).getMessage(); assertEquals( "Invalid zero_terms_query value: 'meow'. Available values are: NONE, ALL, NULL.", msg); - var boostTest = DSL.match(field, query, - DSL.namedArgument("boost", literal("pewpew"))); + var boostTest = DSL.match(field, query, DSL.namedArgument("boost", literal("pewpew"))); msg = assertThrows(RuntimeException.class, () -> buildQuery(boostTest)).getMessage(); assertEquals( "Invalid boost value: 'pewpew'. Accepts only floating point values greater than 0.", msg); - var boolTest = DSL.query_string(fields, query, - DSL.namedArgument("escape", literal("42"))); + var boolTest = DSL.query_string(fields, query, DSL.namedArgument("escape", literal("42"))); msg = assertThrows(RuntimeException.class, () -> buildQuery(boolTest)).getMessage(); assertEquals( "Invalid escape value: '42'. Accepts only boolean values: 'true' or 'false'.", msg); - var typeTest = DSL.multi_match(fields, query, - DSL.namedArgument("type", literal("42"))); + var typeTest = DSL.multi_match(fields, query, DSL.namedArgument("type", literal("42"))); msg = assertThrows(RuntimeException.class, () -> buildQuery(typeTest)).getMessage(); assertTrue(msg.startsWith("Invalid type value: '42'. Available values are:")); - var operatorTest = DSL.simple_query_string(fields, query, - DSL.namedArgument("default_operator", literal("42"))); + var operatorTest = + DSL.simple_query_string( + fields, query, DSL.namedArgument("default_operator", literal("42"))); msg = assertThrows(RuntimeException.class, () -> buildQuery(operatorTest)).getMessage(); assertTrue(msg.startsWith("Invalid default_operator value: '42'. Available values are:")); - var flagsTest = DSL.simple_query_string(fields, query, - DSL.namedArgument("flags", literal("42"))); + var flagsTest = + DSL.simple_query_string(fields, query, DSL.namedArgument("flags", literal("42"))); msg = assertThrows(RuntimeException.class, () -> buildQuery(flagsTest)).getMessage(); assertTrue(msg.startsWith("Invalid flags value: '42'. Available values are:")); - var fuzzinessTest = DSL.match_bool_prefix(field, query, - DSL.namedArgument("fuzziness", literal("AUTO:"))); + var fuzzinessTest = + DSL.match_bool_prefix(field, query, DSL.namedArgument("fuzziness", literal("AUTO:"))); msg = assertThrows(RuntimeException.class, () -> buildQuery(fuzzinessTest)).getMessage(); assertTrue(msg.startsWith("Invalid fuzziness value: 'AUTO:'. Available values are:")); - var rewriteTest = DSL.match_bool_prefix(field, query, - DSL.namedArgument("fuzzy_rewrite", literal("42"))); + var rewriteTest = + DSL.match_bool_prefix(field, query, DSL.namedArgument("fuzzy_rewrite", literal("42"))); msg = assertThrows(RuntimeException.class, () -> buildQuery(rewriteTest)).getMessage(); assertTrue(msg.startsWith("Invalid fuzzy_rewrite value: '42'. Available values are:")); - var timezoneTest = DSL.query_string(fields, query, - DSL.namedArgument("time_zone", literal("42"))); + var timezoneTest = + DSL.query_string(fields, query, DSL.namedArgument("time_zone", literal("42"))); msg = assertThrows(RuntimeException.class, () -> buildQuery(timezoneTest)).getMessage(); assertTrue(msg.startsWith("Invalid time_zone value: '42'.")); } @@ -1323,30 +1446,39 @@ void should_build_match_bool_prefix_query_with_default_parameters() { + "}", buildQuery( DSL.match_bool_prefix( - DSL.namedArgument("field", - new ReferenceExpression("message", OpenSearchTextType.of())), + DSL.namedArgument( + "field", new ReferenceExpression("message", OpenSearchTextType.of())), DSL.namedArgument("query", literal("search query"))))); } @Test void multi_match_missing_fields_even_with_struct() { - FunctionExpression expr = DSL.multi_match( - DSL.namedArgument("something-but-not-fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "pewpew", ExprValueUtils.integerValue(42)))))), - DSL.namedArgument("query", literal("search query")), - DSL.namedArgument("analyzer", literal("keyword"))); + FunctionExpression expr = + DSL.multi_match( + DSL.namedArgument( + "something-but-not-fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of("pewpew", ExprValueUtils.integerValue(42)))))), + DSL.namedArgument("query", literal("search query")), + DSL.namedArgument("analyzer", literal("keyword"))); var msg = assertThrows(SemanticCheckException.class, () -> buildQuery(expr)).getMessage(); assertEquals("'fields' parameter is missing.", msg); } @Test void multi_match_missing_query_even_with_struct() { - FunctionExpression expr = DSL.multi_match( - DSL.namedArgument("fields", DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "field1", ExprValueUtils.floatValue(1.F), - "field2", ExprValueUtils.floatValue(.3F)))))), + FunctionExpression expr = + DSL.multi_match( + DSL.namedArgument( + "fields", + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of( + "field1", ExprValueUtils.floatValue(1.F), + "field2", ExprValueUtils.floatValue(.3F)))))), DSL.namedArgument("analyzer", literal("keyword"))); var msg = assertThrows(SemanticCheckException.class, () -> buildQuery(expr)).getMessage(); assertEquals("'query' parameter is missing", msg); @@ -1368,8 +1500,8 @@ void should_build_match_phrase_prefix_query_with_default_parameters() { + "}", buildQuery( DSL.match_phrase_prefix( - DSL.namedArgument("field", - new ReferenceExpression("message", OpenSearchTextType.of())), + DSL.namedArgument( + "field", new ReferenceExpression("message", OpenSearchTextType.of())), DSL.namedArgument("query", literal("search query"))))); } @@ -1390,8 +1522,8 @@ void should_build_match_phrase_prefix_query_with_non_default_parameters() { + "}", buildQuery( DSL.match_phrase_prefix( - DSL.namedArgument("field", - new ReferenceExpression("message", OpenSearchTextType.of())), + DSL.namedArgument( + "field", new ReferenceExpression("message", OpenSearchTextType.of())), DSL.namedArgument("query", literal("search query")), DSL.namedArgument("boost", literal("1.2")), DSL.namedArgument("max_expansions", literal("42")), @@ -1400,30 +1532,31 @@ void should_build_match_phrase_prefix_query_with_non_default_parameters() { @Test void cast_to_string_in_filter() { - String json = "{\n" - + " \"term\" : {\n" - + " \"string_value\" : {\n" - + " \"value\" : \"1\",\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}"; + String json = + "{\n" + + " \"term\" : {\n" + + " \"string_value\" : {\n" + + " \"value\" : \"1\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}"; - assertJsonEquals(json, buildQuery( - DSL.equal(ref("string_value", STRING), DSL.castString(literal(1))))); - assertJsonEquals(json, buildQuery( - DSL.equal(ref("string_value", STRING), DSL.castString(literal("1"))))); + assertJsonEquals( + json, buildQuery(DSL.equal(ref("string_value", STRING), DSL.castString(literal(1))))); + assertJsonEquals( + json, buildQuery(DSL.equal(ref("string_value", STRING), DSL.castString(literal("1"))))); } private Float castToFloat(Object o) { if (o instanceof Number) { - return ((Number)o).floatValue(); + return ((Number) o).floatValue(); } if (o instanceof String) { return Float.parseFloat((String) o); } if (o instanceof Boolean) { - return ((Boolean)o) ? 1F : 0F; + return ((Boolean) o) ? 1F : 0F; } // unreachable code throw new IllegalArgumentException(); @@ -1431,13 +1564,13 @@ private Float castToFloat(Object o) { private Integer castToInteger(Object o) { if (o instanceof Number) { - return ((Number)o).intValue(); + return ((Number) o).intValue(); } if (o instanceof String) { return Integer.parseInt((String) o); } if (o instanceof Boolean) { - return ((Boolean)o) ? 1 : 0; + return ((Boolean) o) ? 1 : 0; } // unreachable code throw new IllegalArgumentException(); @@ -1446,75 +1579,85 @@ private Integer castToInteger(Object o) { @ParameterizedTest(name = "castByte({0})") @MethodSource({"numericCastSource"}) void cast_to_byte_in_filter(LiteralExpression expr) { - assertJsonEquals(String.format( - "{\n" - + " \"term\" : {\n" - + " \"byte_value\" : {\n" - + " \"value\" : %d,\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}", castToInteger(expr.valueOf().value())), + assertJsonEquals( + String.format( + "{\n" + + " \"term\" : {\n" + + " \"byte_value\" : {\n" + + " \"value\" : %d,\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}", + castToInteger(expr.valueOf().value())), buildQuery(DSL.equal(ref("byte_value", BYTE), DSL.castByte(expr)))); } @ParameterizedTest(name = "castShort({0})") @MethodSource({"numericCastSource"}) void cast_to_short_in_filter(LiteralExpression expr) { - assertJsonEquals(String.format( - "{\n" - + " \"term\" : {\n" - + " \"short_value\" : {\n" - + " \"value\" : %d,\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}", castToInteger(expr.valueOf().value())), + assertJsonEquals( + String.format( + "{\n" + + " \"term\" : {\n" + + " \"short_value\" : {\n" + + " \"value\" : %d,\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}", + castToInteger(expr.valueOf().value())), buildQuery(DSL.equal(ref("short_value", SHORT), DSL.castShort(expr)))); } @ParameterizedTest(name = "castInt({0})") @MethodSource({"numericCastSource"}) void cast_to_int_in_filter(LiteralExpression expr) { - assertJsonEquals(String.format( - "{\n" - + " \"term\" : {\n" - + " \"integer_value\" : {\n" - + " \"value\" : %d,\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}", castToInteger(expr.valueOf().value())), + assertJsonEquals( + String.format( + "{\n" + + " \"term\" : {\n" + + " \"integer_value\" : {\n" + + " \"value\" : %d,\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}", + castToInteger(expr.valueOf().value())), buildQuery(DSL.equal(ref("integer_value", INTEGER), DSL.castInt(expr)))); } @ParameterizedTest(name = "castLong({0})") @MethodSource({"numericCastSource"}) void cast_to_long_in_filter(LiteralExpression expr) { - assertJsonEquals(String.format( - "{\n" - + " \"term\" : {\n" - + " \"long_value\" : {\n" - + " \"value\" : %d,\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}", castToInteger(expr.valueOf().value())), + assertJsonEquals( + String.format( + "{\n" + + " \"term\" : {\n" + + " \"long_value\" : {\n" + + " \"value\" : %d,\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}", + castToInteger(expr.valueOf().value())), buildQuery(DSL.equal(ref("long_value", LONG), DSL.castLong(expr)))); } @ParameterizedTest(name = "castFloat({0})") @MethodSource({"numericCastSource"}) void cast_to_float_in_filter(LiteralExpression expr) { - assertJsonEquals(String.format( - "{\n" - + " \"term\" : {\n" - + " \"float_value\" : {\n" - + " \"value\" : %f,\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}", castToFloat(expr.valueOf().value())), + assertJsonEquals( + String.format( + "{\n" + + " \"term\" : {\n" + + " \"float_value\" : {\n" + + " \"value\" : %f,\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}", + castToFloat(expr.valueOf().value())), buildQuery(DSL.equal(ref("float_value", FLOAT), DSL.castFloat(expr)))); } @@ -1523,32 +1666,35 @@ void cast_to_float_in_filter(LiteralExpression expr) { void cast_to_double_in_filter(LiteralExpression expr) { // double values affected by floating point imprecision, so we can't compare them in json // (Double)(Float)3.14 -> 3.14000010490417 - assertEquals(castToFloat(expr.valueOf().value()), - DSL.castDouble(expr).valueOf().doubleValue(), 0.00001); + assertEquals( + castToFloat(expr.valueOf().value()), DSL.castDouble(expr).valueOf().doubleValue(), 0.00001); - assertJsonEquals(String.format( - "{\n" - + " \"term\" : {\n" - + " \"double_value\" : {\n" - + " \"value\" : %2.20f,\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}", DSL.castDouble(expr).valueOf().doubleValue()), + assertJsonEquals( + String.format( + "{\n" + + " \"term\" : {\n" + + " \"double_value\" : {\n" + + " \"value\" : %2.20f,\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}", + DSL.castDouble(expr).valueOf().doubleValue()), buildQuery(DSL.equal(ref("double_value", DOUBLE), DSL.castDouble(expr)))); } @ParameterizedTest(name = "castBooleanTrue({0})") @MethodSource({"booleanTrueCastSource"}) void cast_to_boolean_true_in_filter(LiteralExpression expr) { - String json = "{\n" - + " \"term\" : {\n" - + " \"boolean_value\" : {\n" - + " \"value\" : true,\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}"; + String json = + "{\n" + + " \"term\" : {\n" + + " \"boolean_value\" : {\n" + + " \"value\" : true,\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}"; assertJsonEquals( json, buildQuery(DSL.equal(ref("boolean_value", BOOLEAN), DSL.castBoolean(expr)))); @@ -1557,14 +1703,15 @@ void cast_to_boolean_true_in_filter(LiteralExpression expr) { @ParameterizedTest(name = "castBooleanFalse({0})") @MethodSource({"booleanFalseCastSource"}) void cast_to_boolean_false_in_filter(LiteralExpression expr) { - String json = "{\n" - + " \"term\" : {\n" - + " \"boolean_value\" : {\n" - + " \"value\" : false,\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}"; + String json = + "{\n" + + " \"term\" : {\n" + + " \"boolean_value\" : {\n" + + " \"value\" : false,\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}"; assertJsonEquals( json, buildQuery(DSL.equal(ref("boolean_value", BOOLEAN), DSL.castBoolean(expr)))); @@ -1573,118 +1720,153 @@ void cast_to_boolean_false_in_filter(LiteralExpression expr) { @Test void cast_from_boolean() { Expression booleanExpr = literal(false); - String json = "{\n" - + " \"term\" : {\n" - + " \"my_value\" : {\n" - + " \"value\" : 0,\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}"; - assertJsonEquals(json, buildQuery( - DSL.equal(ref("my_value", BYTE), DSL.castByte(booleanExpr)))); - assertJsonEquals(json, buildQuery( - DSL.equal(ref("my_value", SHORT), DSL.castShort(booleanExpr)))); - assertJsonEquals(json, buildQuery( - DSL.equal(ref("my_value", INTEGER), DSL.castInt(booleanExpr)))); - assertJsonEquals(json, buildQuery( - DSL.equal(ref("my_value", LONG), DSL.castLong(booleanExpr)))); - - json = "{\n" - + " \"term\" : {\n" - + " \"my_value\" : {\n" - + " \"value\" : 0.0,\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}"; - assertJsonEquals(json, buildQuery( - DSL.equal(ref("my_value", FLOAT), DSL.castFloat(booleanExpr)))); - assertJsonEquals(json, buildQuery( - DSL.equal(ref("my_value", DOUBLE), DSL.castDouble(booleanExpr)))); - - json = "{\n" - + " \"term\" : {\n" - + " \"my_value\" : {\n" - + " \"value\" : \"false\",\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}"; - assertJsonEquals(json, buildQuery( - DSL.equal(ref("my_value", STRING), DSL.castString(booleanExpr)))); + String json = + "{\n" + + " \"term\" : {\n" + + " \"my_value\" : {\n" + + " \"value\" : 0,\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}"; + assertJsonEquals(json, buildQuery(DSL.equal(ref("my_value", BYTE), DSL.castByte(booleanExpr)))); + assertJsonEquals( + json, buildQuery(DSL.equal(ref("my_value", SHORT), DSL.castShort(booleanExpr)))); + assertJsonEquals( + json, buildQuery(DSL.equal(ref("my_value", INTEGER), DSL.castInt(booleanExpr)))); + assertJsonEquals(json, buildQuery(DSL.equal(ref("my_value", LONG), DSL.castLong(booleanExpr)))); + + json = + "{\n" + + " \"term\" : {\n" + + " \"my_value\" : {\n" + + " \"value\" : 0.0,\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}"; + assertJsonEquals( + json, buildQuery(DSL.equal(ref("my_value", FLOAT), DSL.castFloat(booleanExpr)))); + assertJsonEquals( + json, buildQuery(DSL.equal(ref("my_value", DOUBLE), DSL.castDouble(booleanExpr)))); + + json = + "{\n" + + " \"term\" : {\n" + + " \"my_value\" : {\n" + + " \"value\" : \"false\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}"; + assertJsonEquals( + json, buildQuery(DSL.equal(ref("my_value", STRING), DSL.castString(booleanExpr)))); } @Test void cast_to_date_in_filter() { - String json = "{\n" - + " \"term\" : {\n" - + " \"date_value\" : {\n" - + " \"value\" : \"2021-11-08\",\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}"; + String json = + "{\n" + + " \"term\" : {\n" + + " \"date_value\" : {\n" + + " \"value\" : \"2021-11-08\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}"; - assertJsonEquals(json, buildQuery(DSL.equal( - ref("date_value", DATE), DSL.castDate(literal("2021-11-08"))))); - assertJsonEquals(json, buildQuery(DSL.equal( - ref("date_value", DATE), DSL.castDate(literal(new ExprDateValue("2021-11-08")))))); - assertJsonEquals(json, buildQuery(DSL.equal(ref( - "date_value", DATE), DSL.castDate(literal(new ExprDatetimeValue("2021-11-08 17:00:00")))))); + assertJsonEquals( + json, buildQuery(DSL.equal(ref("date_value", DATE), DSL.castDate(literal("2021-11-08"))))); + assertJsonEquals( + json, + buildQuery( + DSL.equal( + ref("date_value", DATE), DSL.castDate(literal(new ExprDateValue("2021-11-08")))))); + assertJsonEquals( + json, + buildQuery( + DSL.equal( + ref("date_value", DATE), + DSL.castDate(literal(new ExprDatetimeValue("2021-11-08 17:00:00")))))); } @Test void cast_to_time_in_filter() { - String json = "{\n" - + " \"term\" : {\n" - + " \"time_value\" : {\n" - + " \"value\" : \"17:00:00\",\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}"; + String json = + "{\n" + + " \"term\" : {\n" + + " \"time_value\" : {\n" + + " \"value\" : \"17:00:00\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}"; - assertJsonEquals(json, buildQuery(DSL.equal( - ref("time_value", TIME), DSL.castTime(literal("17:00:00"))))); - assertJsonEquals(json, buildQuery(DSL.equal( - ref("time_value", TIME), DSL.castTime(literal(new ExprTimeValue("17:00:00")))))); - assertJsonEquals(json, buildQuery(DSL.equal(ref("time_value", TIME), DSL - .castTime(literal(new ExprTimestampValue("2021-11-08 17:00:00")))))); + assertJsonEquals( + json, buildQuery(DSL.equal(ref("time_value", TIME), DSL.castTime(literal("17:00:00"))))); + assertJsonEquals( + json, + buildQuery( + DSL.equal( + ref("time_value", TIME), DSL.castTime(literal(new ExprTimeValue("17:00:00")))))); + assertJsonEquals( + json, + buildQuery( + DSL.equal( + ref("time_value", TIME), + DSL.castTime(literal(new ExprTimestampValue("2021-11-08 17:00:00")))))); } @Test void cast_to_datetime_in_filter() { - String json = "{\n" - + " \"term\" : {\n" - + " \"datetime_value\" : {\n" - + " \"value\" : \"2021-11-08 17:00:00\",\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}"; + String json = + "{\n" + + " \"term\" : {\n" + + " \"datetime_value\" : {\n" + + " \"value\" : \"2021-11-08 17:00:00\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}"; - assertJsonEquals(json, buildQuery(DSL.equal(ref("datetime_value", DATETIME), DSL - .castDatetime(literal("2021-11-08 17:00:00"))))); - assertJsonEquals(json, buildQuery(DSL.equal(ref("datetime_value", DATETIME), DSL - .castDatetime(literal(new ExprTimestampValue("2021-11-08 17:00:00")))))); + assertJsonEquals( + json, + buildQuery( + DSL.equal( + ref("datetime_value", DATETIME), + DSL.castDatetime(literal("2021-11-08 17:00:00"))))); + assertJsonEquals( + json, + buildQuery( + DSL.equal( + ref("datetime_value", DATETIME), + DSL.castDatetime(literal(new ExprTimestampValue("2021-11-08 17:00:00")))))); } @Test void cast_to_timestamp_in_filter() { - String json = "{\n" - + " \"term\" : {\n" - + " \"timestamp_value\" : {\n" - + " \"value\" : 1636390800000,\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + "}"; + String json = + "{\n" + + " \"term\" : {\n" + + " \"timestamp_value\" : {\n" + + " \"value\" : 1636390800000,\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}"; - assertJsonEquals(json, buildQuery(DSL.equal(ref("timestamp_value", TIMESTAMP), DSL - .castTimestamp(literal("2021-11-08 17:00:00"))))); - assertJsonEquals(json, buildQuery(DSL.equal(ref("timestamp_value", TIMESTAMP), DSL - .castTimestamp(literal(new ExprTimestampValue("2021-11-08 17:00:00")))))); + assertJsonEquals( + json, + buildQuery( + DSL.equal( + ref("timestamp_value", TIMESTAMP), + DSL.castTimestamp(literal("2021-11-08 17:00:00"))))); + assertJsonEquals( + json, + buildQuery( + DSL.equal( + ref("timestamp_value", TIMESTAMP), + DSL.castTimestamp(literal(new ExprTimestampValue("2021-11-08 17:00:00")))))); } @Test @@ -1701,8 +1883,10 @@ void cast_in_range_query() { + " }\n" + " }\n" + "}", - buildQuery(DSL.greater(ref("timestamp_value", TIMESTAMP), DSL - .castTimestamp(literal("2021-11-08 17:00:00"))))); + buildQuery( + DSL.greater( + ref("timestamp_value", TIMESTAMP), + DSL.castTimestamp(literal("2021-11-08 17:00:00"))))); } @Test @@ -1718,9 +1902,9 @@ void non_literal_in_cast_should_build_script() { + " \"boost\" : 1.0\n" + " }\n" + "}", - buildQuery(DSL.equal(ref("string_value", STRING), DSL.castString(DSL - .add(literal(1), literal(0))))) - ); + buildQuery( + DSL.equal( + ref("string_value", STRING), DSL.castString(DSL.add(literal(1), literal(0)))))); } @Test @@ -1736,13 +1920,13 @@ void non_cast_nested_function_should_build_script() { + " \"boost\" : 1.0\n" + " }\n" + "}", - buildQuery(DSL.equal(ref("integer_value", INTEGER), DSL.abs(DSL - .add(literal(1), literal(0))))) - ); + buildQuery( + DSL.equal(ref("integer_value", INTEGER), DSL.abs(DSL.add(literal(1), literal(0)))))); } private static void assertJsonEquals(String expected, String actual) { - assertTrue(new JSONObject(expected).similar(new JSONObject(actual)), + assertTrue( + new JSONObject(expected).similar(new JSONObject(actual)), StringUtils.format("Expected: %s, actual: %s", expected, actual)); } @@ -1751,10 +1935,12 @@ private String buildQuery(Expression expr) { } private void mockToStringSerializer() { - doAnswer(invocation -> { - Expression expr = invocation.getArgument(0); - return expr.toString(); - }).when(serializer).serialize(any()); + doAnswer( + invocation -> { + Expression expr = invocation.getArgument(0); + return expr.toString(); + }) + .when(serializer) + .serialize(any()); } - } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/LuceneQueryTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/LuceneQueryTest.java index 37b8326ef4..df3a730bad 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/LuceneQueryTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/LuceneQueryTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.filter.lucene; import static org.junit.jupiter.api.Assertions.assertFalse; @@ -20,13 +19,12 @@ class LuceneQueryTest { @Test void should_not_support_single_argument_by_default() { - assertFalse(new LuceneQuery(){}.canSupport(DSL.abs(DSL.ref("age", INTEGER)))); + assertFalse(new LuceneQuery() {}.canSupport(DSL.abs(DSL.ref("age", INTEGER)))); } @Test void should_throw_exception_if_not_implemented() { - assertThrows(UnsupportedOperationException.class, () -> - new LuceneQuery(){}.doBuild(null, null, null)); + assertThrows( + UnsupportedOperationException.class, () -> new LuceneQuery() {}.doBuild(null, null, null)); } - } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/MatchBoolPrefixQueryTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/MatchBoolPrefixQueryTest.java index 6906619065..7465bfc5a4 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/MatchBoolPrefixQueryTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/MatchBoolPrefixQueryTest.java @@ -35,8 +35,8 @@ public class MatchBoolPrefixQueryTest { private final FunctionName matchBoolPrefix = FunctionName.of("match_bool_prefix"); static Stream> generateValidData() { - NamedArgumentExpression field = DSL.namedArgument("field", - new ReferenceExpression("field_value", OpenSearchTextType.of())); + NamedArgumentExpression field = + DSL.namedArgument("field", new ReferenceExpression("field_value", OpenSearchTextType.of())); NamedArgumentExpression query = DSL.namedArgument("query", DSL.literal("query_value")); return List.of( DSL.namedArgument("fuzziness", DSL.literal("AUTO")), @@ -48,8 +48,9 @@ static Stream> generateValidData() { DSL.namedArgument("boost", DSL.literal("1")), DSL.namedArgument("analyzer", DSL.literal("simple")), DSL.namedArgument("operator", DSL.literal("Or")), - DSL.namedArgument("operator", DSL.literal("and")) - ).stream().map(arg -> List.of(field, query, arg)); + DSL.namedArgument("operator", DSL.literal("and"))) + .stream() + .map(arg -> List.of(field, query, arg)); } @ParameterizedTest @@ -60,35 +61,40 @@ public void test_valid_arguments(List validArgs) { @Test public void test_valid_when_two_arguments() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("field_value", OpenSearchTextType.of())), - DSL.namedArgument("query", "query_value")); + List arguments = + List.of( + DSL.namedArgument( + "field", new ReferenceExpression("field_value", OpenSearchTextType.of())), + DSL.namedArgument("query", "query_value")); Assertions.assertNotNull(matchBoolPrefixQuery.build(new MatchExpression(arguments))); } @Test public void test_SyntaxCheckException_when_no_arguments() { List arguments = List.of(); - assertThrows(SyntaxCheckException.class, + assertThrows( + SyntaxCheckException.class, () -> matchBoolPrefixQuery.build(new MatchExpression(arguments))); } @Test public void test_SyntaxCheckException_when_one_argument() { List arguments = List.of(DSL.namedArgument("field", "field_value")); - assertThrows(SyntaxCheckException.class, + assertThrows( + SyntaxCheckException.class, () -> matchBoolPrefixQuery.build(new MatchExpression(arguments))); } @Test public void test_SemanticCheckException_when_invalid_argument() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("field_value", OpenSearchTextType.of())), - DSL.namedArgument("query", "query_value"), - DSL.namedArgument("unsupported", "unsupported_value")); - Assertions.assertThrows(SemanticCheckException.class, + List arguments = + List.of( + DSL.namedArgument( + "field", new ReferenceExpression("field_value", OpenSearchTextType.of())), + DSL.namedArgument("query", "query_value"), + DSL.namedArgument("unsupported", "unsupported_value")); + Assertions.assertThrows( + SemanticCheckException.class, () -> matchBoolPrefixQuery.build(new MatchExpression(arguments))); } @@ -99,14 +105,16 @@ public MatchExpression(List arguments) { @Override public ExprValue valueOf(Environment valueEnv) { - throw new UnsupportedOperationException("Invalid function call, " - + "valueOf function need implementation only to support Expression interface"); + throw new UnsupportedOperationException( + "Invalid function call, " + + "valueOf function need implementation only to support Expression interface"); } @Override public ExprType type() { - throw new UnsupportedOperationException("Invalid function call, " - + "type function need implementation only to support Expression interface"); + throw new UnsupportedOperationException( + "Invalid function call, " + + "type function need implementation only to support Expression interface"); } } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/MatchPhrasePrefixQueryTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/MatchPhrasePrefixQueryTest.java index 0defee0008..a3cf54bc5f 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/MatchPhrasePrefixQueryTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/MatchPhrasePrefixQueryTest.java @@ -5,7 +5,6 @@ package org.opensearch.sql.opensearch.storage.script.filter.lucene; - import static org.junit.jupiter.api.Assertions.assertThrows; import java.util.List; @@ -27,7 +26,7 @@ import org.opensearch.sql.opensearch.storage.script.filter.lucene.relevance.MatchPhrasePrefixQuery; @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) -public class MatchPhrasePrefixQueryTest { +public class MatchPhrasePrefixQueryTest { private final MatchPhrasePrefixQuery matchPhrasePrefixQuery = new MatchPhrasePrefixQuery(); private final FunctionName matchPhrasePrefix = FunctionName.of("match_phrase_prefix"); @@ -35,90 +34,89 @@ public class MatchPhrasePrefixQueryTest { @Test public void test_SyntaxCheckException_when_no_arguments() { List arguments = List.of(); - assertThrows(SyntaxCheckException.class, + assertThrows( + SyntaxCheckException.class, () -> matchPhrasePrefixQuery.build(new MatchPhraseExpression(arguments))); } @Test public void test_SyntaxCheckException_when_one_argument() { - List arguments = List.of(DSL.namedArgument("field", - new ReferenceExpression("test", OpenSearchTextType.of()))); - assertThrows(SyntaxCheckException.class, + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("test", OpenSearchTextType.of()))); + assertThrows( + SyntaxCheckException.class, () -> matchPhrasePrefixQuery.build(new MatchPhraseExpression(arguments))); } @Test public void test_SyntaxCheckException_when_invalid_parameter() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("test", OpenSearchTextType.of())), - DSL.namedArgument("query", "test2"), - DSL.namedArgument("unsupported", "3")); - Assertions.assertThrows(SemanticCheckException.class, + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("test", OpenSearchTextType.of())), + DSL.namedArgument("query", "test2"), + DSL.namedArgument("unsupported", "3")); + Assertions.assertThrows( + SemanticCheckException.class, () -> matchPhrasePrefixQuery.build(new MatchPhraseExpression(arguments))); } @Test public void test_analyzer_parameter() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("t1", OpenSearchTextType.of())), - DSL.namedArgument("query", "t2"), - DSL.namedArgument("analyzer", "standard") - ); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("t1", OpenSearchTextType.of())), + DSL.namedArgument("query", "t2"), + DSL.namedArgument("analyzer", "standard")); Assertions.assertNotNull(matchPhrasePrefixQuery.build(new MatchPhraseExpression(arguments))); } @Test public void build_succeeds_with_two_arguments() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("test", OpenSearchTextType.of())), - DSL.namedArgument("query", "test2")); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("test", OpenSearchTextType.of())), + DSL.namedArgument("query", "test2")); Assertions.assertNotNull(matchPhrasePrefixQuery.build(new MatchPhraseExpression(arguments))); } @Test public void test_slop_parameter() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("t1", OpenSearchTextType.of())), - DSL.namedArgument("query", "t2"), - DSL.namedArgument("slop", "2") - ); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("t1", OpenSearchTextType.of())), + DSL.namedArgument("query", "t2"), + DSL.namedArgument("slop", "2")); Assertions.assertNotNull(matchPhrasePrefixQuery.build(new MatchPhraseExpression(arguments))); } @Test public void test_zero_terms_query_parameter() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("t1", OpenSearchTextType.of())), - DSL.namedArgument("query", "t2"), - DSL.namedArgument("zero_terms_query", "ALL") - ); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("t1", OpenSearchTextType.of())), + DSL.namedArgument("query", "t2"), + DSL.namedArgument("zero_terms_query", "ALL")); Assertions.assertNotNull(matchPhrasePrefixQuery.build(new MatchPhraseExpression(arguments))); } @Test public void test_zero_terms_query_parameter_lower_case() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("t1", OpenSearchTextType.of())), - DSL.namedArgument("query", "t2"), - DSL.namedArgument("zero_terms_query", "all") - ); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("t1", OpenSearchTextType.of())), + DSL.namedArgument("query", "t2"), + DSL.namedArgument("zero_terms_query", "all")); Assertions.assertNotNull(matchPhrasePrefixQuery.build(new MatchPhraseExpression(arguments))); } @Test public void test_boost_parameter() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("test", OpenSearchTextType.of())), - DSL.namedArgument("query", "t2"), - DSL.namedArgument("boost", "0.1") - ); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("test", OpenSearchTextType.of())), + DSL.namedArgument("query", "t2"), + DSL.namedArgument("boost", "0.1")); Assertions.assertNotNull(matchPhrasePrefixQuery.build(new MatchPhraseExpression(arguments))); } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/MatchPhraseQueryTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/MatchPhraseQueryTest.java index 20ecb869ba..66c4c00059 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/MatchPhraseQueryTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/MatchPhraseQueryTest.java @@ -5,7 +5,6 @@ package org.opensearch.sql.opensearch.storage.script.filter.lucene; - import static org.junit.jupiter.api.Assertions.assertThrows; import java.util.List; @@ -37,256 +36,259 @@ public class MatchPhraseQueryTest { @Test public void test_SyntaxCheckException_when_no_arguments() { List arguments = List.of(); - assertThrows(SyntaxCheckException.class, + assertThrows( + SyntaxCheckException.class, () -> matchPhraseQuery.build(new MatchPhraseExpression(arguments))); } @Test public void test_SyntaxCheckException_when_one_argument() { - List arguments = List.of(DSL.namedArgument("field", - new ReferenceExpression("test", OpenSearchTextType.of()))); - assertThrows(SyntaxCheckException.class, + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("test", OpenSearchTextType.of()))); + assertThrows( + SyntaxCheckException.class, () -> matchPhraseQuery.build(new MatchPhraseExpression(arguments))); } @Test public void test_SyntaxCheckException_when_invalid_parameter() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("test", OpenSearchTextType.of())), - DSL.namedArgument("query", "test2"), - DSL.namedArgument("unsupported", "3")); - Assertions.assertThrows(SemanticCheckException.class, + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("test", OpenSearchTextType.of())), + DSL.namedArgument("query", "test2"), + DSL.namedArgument("unsupported", "3")); + Assertions.assertThrows( + SemanticCheckException.class, () -> matchPhraseQuery.build(new MatchPhraseExpression(arguments))); } @Test public void test_analyzer_parameter() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("t1", OpenSearchTextType.of())), - DSL.namedArgument("query", "t2"), - DSL.namedArgument("analyzer", "standard") - ); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("t1", OpenSearchTextType.of())), + DSL.namedArgument("query", "t2"), + DSL.namedArgument("analyzer", "standard")); Assertions.assertNotNull(matchPhraseQuery.build(new MatchPhraseExpression(arguments))); } @Test public void build_succeeds_with_two_arguments() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("test", OpenSearchTextType.of())), - DSL.namedArgument("query", "test2")); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("test", OpenSearchTextType.of())), + DSL.namedArgument("query", "test2")); Assertions.assertNotNull(matchPhraseQuery.build(new MatchPhraseExpression(arguments))); } @Test public void test_slop_parameter() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("t1", OpenSearchTextType.of())), - DSL.namedArgument("query", "t2"), - DSL.namedArgument("slop", "2") - ); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("t1", OpenSearchTextType.of())), + DSL.namedArgument("query", "t2"), + DSL.namedArgument("slop", "2")); Assertions.assertNotNull(matchPhraseQuery.build(new MatchPhraseExpression(arguments))); } @Test public void test_zero_terms_query_parameter() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("t1", OpenSearchTextType.of())), - DSL.namedArgument("query", "t2"), - DSL.namedArgument("zero_terms_query", "ALL") - ); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("t1", OpenSearchTextType.of())), + DSL.namedArgument("query", "t2"), + DSL.namedArgument("zero_terms_query", "ALL")); Assertions.assertNotNull(matchPhraseQuery.build(new MatchPhraseExpression(arguments))); } @Test public void test_zero_terms_query_parameter_lower_case() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("t1", OpenSearchTextType.of())), - DSL.namedArgument("query", "t2"), - DSL.namedArgument("zero_terms_query", "all") - ); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("t1", OpenSearchTextType.of())), + DSL.namedArgument("query", "t2"), + DSL.namedArgument("zero_terms_query", "all")); Assertions.assertNotNull(matchPhraseQuery.build(new MatchPhraseExpression(arguments))); } @Test public void test_SyntaxCheckException_when_no_arguments_match_phrase_syntax() { List arguments = List.of(); - assertThrows(SyntaxCheckException.class, - () -> matchPhraseQuery.build(new MatchPhraseExpression( - arguments, matchPhraseWithUnderscoreName))); + assertThrows( + SyntaxCheckException.class, + () -> + matchPhraseQuery.build( + new MatchPhraseExpression(arguments, matchPhraseWithUnderscoreName))); } @Test public void test_SyntaxCheckException_when_one_argument_match_phrase_syntax() { - List arguments = List.of(DSL.namedArgument("field", - new ReferenceExpression("test", OpenSearchTextType.of()))); - assertThrows(SyntaxCheckException.class, - () -> matchPhraseQuery.build(new MatchPhraseExpression( - arguments, matchPhraseWithUnderscoreName))); - + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("test", OpenSearchTextType.of()))); + assertThrows( + SyntaxCheckException.class, + () -> + matchPhraseQuery.build( + new MatchPhraseExpression(arguments, matchPhraseWithUnderscoreName))); } @Test public void test_SyntaxCheckException_when_invalid_parameter_match_phrase_syntax() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("test", OpenSearchTextType.of())), - DSL.namedArgument("query", "test2"), - DSL.namedArgument("unsupported", "3")); - Assertions.assertThrows(SemanticCheckException.class, - () -> matchPhraseQuery.build(new MatchPhraseExpression( - arguments, matchPhraseWithUnderscoreName))); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("test", OpenSearchTextType.of())), + DSL.namedArgument("query", "test2"), + DSL.namedArgument("unsupported", "3")); + Assertions.assertThrows( + SemanticCheckException.class, + () -> + matchPhraseQuery.build( + new MatchPhraseExpression(arguments, matchPhraseWithUnderscoreName))); } @Test public void test_analyzer_parameter_match_phrase_syntax() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("t1", OpenSearchTextType.of())), - DSL.namedArgument("query", "t2"), - DSL.namedArgument("analyzer", "standard") - ); - Assertions.assertNotNull(matchPhraseQuery.build(new MatchPhraseExpression( - arguments, matchPhraseWithUnderscoreName))); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("t1", OpenSearchTextType.of())), + DSL.namedArgument("query", "t2"), + DSL.namedArgument("analyzer", "standard")); + Assertions.assertNotNull( + matchPhraseQuery.build( + new MatchPhraseExpression(arguments, matchPhraseWithUnderscoreName))); } @Test public void build_succeeds_with_two_arguments_match_phrase_syntax() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("test", OpenSearchTextType.of())), - DSL.namedArgument("query", "test2")); - Assertions.assertNotNull(matchPhraseQuery.build(new MatchPhraseExpression( - arguments, matchPhraseWithUnderscoreName))); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("test", OpenSearchTextType.of())), + DSL.namedArgument("query", "test2")); + Assertions.assertNotNull( + matchPhraseQuery.build( + new MatchPhraseExpression(arguments, matchPhraseWithUnderscoreName))); } @Test public void test_slop_parameter_match_phrase_syntax() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("t1", OpenSearchTextType.of())), - DSL.namedArgument("query", "t2"), - DSL.namedArgument("slop", "2") - ); - Assertions.assertNotNull(matchPhraseQuery.build(new MatchPhraseExpression( - arguments, matchPhraseWithUnderscoreName))); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("t1", OpenSearchTextType.of())), + DSL.namedArgument("query", "t2"), + DSL.namedArgument("slop", "2")); + Assertions.assertNotNull( + matchPhraseQuery.build( + new MatchPhraseExpression(arguments, matchPhraseWithUnderscoreName))); } @Test public void test_zero_terms_query_parameter_match_phrase_syntax() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("t1", OpenSearchTextType.of())), - DSL.namedArgument("query", "t2"), - DSL.namedArgument("zero_terms_query", "ALL") - ); - Assertions.assertNotNull(matchPhraseQuery.build(new MatchPhraseExpression( - arguments, matchPhraseWithUnderscoreName))); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("t1", OpenSearchTextType.of())), + DSL.namedArgument("query", "t2"), + DSL.namedArgument("zero_terms_query", "ALL")); + Assertions.assertNotNull( + matchPhraseQuery.build( + new MatchPhraseExpression(arguments, matchPhraseWithUnderscoreName))); } @Test public void test_zero_terms_query_parameter_lower_case_match_phrase_syntax() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("t1", OpenSearchTextType.of())), - DSL.namedArgument("query", "t2"), - DSL.namedArgument("zero_terms_query", "all") - ); - Assertions.assertNotNull(matchPhraseQuery.build(new MatchPhraseExpression( - arguments, matchPhraseWithUnderscoreName))); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("t1", OpenSearchTextType.of())), + DSL.namedArgument("query", "t2"), + DSL.namedArgument("zero_terms_query", "all")); + Assertions.assertNotNull( + matchPhraseQuery.build( + new MatchPhraseExpression(arguments, matchPhraseWithUnderscoreName))); } @Test public void test_SyntaxCheckException_when_no_arguments_matchphrase_syntax() { List arguments = List.of(); - assertThrows(SyntaxCheckException.class, - () -> matchPhraseQuery.build(new MatchPhraseExpression( - arguments, matchPhraseQueryName))); + assertThrows( + SyntaxCheckException.class, + () -> matchPhraseQuery.build(new MatchPhraseExpression(arguments, matchPhraseQueryName))); } @Test public void test_SyntaxCheckException_when_one_argument_matchphrase_syntax() { - List arguments = List.of(DSL.namedArgument("field", - new ReferenceExpression("test", OpenSearchTextType.of()))); - assertThrows(SyntaxCheckException.class, - () -> matchPhraseQuery.build(new MatchPhraseExpression( - arguments, matchPhraseQueryName))); - + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("test", OpenSearchTextType.of()))); + assertThrows( + SyntaxCheckException.class, + () -> matchPhraseQuery.build(new MatchPhraseExpression(arguments, matchPhraseQueryName))); } @Test public void test_SyntaxCheckException_when_invalid_parameter_matchphrase_syntax() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("test", OpenSearchTextType.of())), - DSL.namedArgument("query", "test2"), - DSL.namedArgument("unsupported", "3")); - Assertions.assertThrows(SemanticCheckException.class, - () -> matchPhraseQuery.build(new MatchPhraseExpression( - arguments, matchPhraseQueryName))); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("test", OpenSearchTextType.of())), + DSL.namedArgument("query", "test2"), + DSL.namedArgument("unsupported", "3")); + Assertions.assertThrows( + SemanticCheckException.class, + () -> matchPhraseQuery.build(new MatchPhraseExpression(arguments, matchPhraseQueryName))); } @Test public void test_analyzer_parameter_matchphrase_syntax() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("t1", OpenSearchTextType.of())), - DSL.namedArgument("query", "t2"), - DSL.namedArgument("analyzer", "standard") - ); - Assertions.assertNotNull(matchPhraseQuery.build(new MatchPhraseExpression( - arguments, matchPhraseQueryName))); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("t1", OpenSearchTextType.of())), + DSL.namedArgument("query", "t2"), + DSL.namedArgument("analyzer", "standard")); + Assertions.assertNotNull( + matchPhraseQuery.build(new MatchPhraseExpression(arguments, matchPhraseQueryName))); } @Test public void build_succeeds_with_two_arguments_matchphrase_syntax() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("test", OpenSearchTextType.of())), - DSL.namedArgument("query", "test2")); - Assertions.assertNotNull(matchPhraseQuery.build(new MatchPhraseExpression( - arguments, matchPhraseQueryName))); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("test", OpenSearchTextType.of())), + DSL.namedArgument("query", "test2")); + Assertions.assertNotNull( + matchPhraseQuery.build(new MatchPhraseExpression(arguments, matchPhraseQueryName))); } @Test public void test_slop_parameter_matchphrase_syntax() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("t1", OpenSearchTextType.of())), - DSL.namedArgument("query", "t2"), - DSL.namedArgument("slop", "2") - ); - Assertions.assertNotNull(matchPhraseQuery.build(new MatchPhraseExpression( - arguments, matchPhraseQueryName))); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("t1", OpenSearchTextType.of())), + DSL.namedArgument("query", "t2"), + DSL.namedArgument("slop", "2")); + Assertions.assertNotNull( + matchPhraseQuery.build(new MatchPhraseExpression(arguments, matchPhraseQueryName))); } @Test public void test_zero_terms_query_parameter_matchphrase_syntax() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("t1", OpenSearchTextType.of())), - DSL.namedArgument("query", "t2"), - DSL.namedArgument("zero_terms_query", "ALL") - ); - Assertions.assertNotNull(matchPhraseQuery.build(new MatchPhraseExpression( - arguments, matchPhraseQueryName))); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("t1", OpenSearchTextType.of())), + DSL.namedArgument("query", "t2"), + DSL.namedArgument("zero_terms_query", "ALL")); + Assertions.assertNotNull( + matchPhraseQuery.build(new MatchPhraseExpression(arguments, matchPhraseQueryName))); } @Test public void test_zero_terms_query_parameter_lower_case_matchphrase_syntax() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("t1", OpenSearchTextType.of())), - DSL.namedArgument("query", "t2"), - DSL.namedArgument("zero_terms_query", "all") - ); - Assertions.assertNotNull(matchPhraseQuery.build(new MatchPhraseExpression( - arguments, matchPhraseQueryName))); + List arguments = + List.of( + DSL.namedArgument("field", new ReferenceExpression("t1", OpenSearchTextType.of())), + DSL.namedArgument("query", "t2"), + DSL.namedArgument("zero_terms_query", "all")); + Assertions.assertNotNull( + matchPhraseQuery.build(new MatchPhraseExpression(arguments, matchPhraseQueryName))); } private class MatchPhraseExpression extends FunctionExpression { diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/MatchQueryTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/MatchQueryTest.java index ddabb3820e..28b7878d63 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/MatchQueryTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/MatchQueryTest.java @@ -35,95 +35,81 @@ public class MatchQueryTest { private final FunctionName matchName = FunctionName.of("match"); private final FunctionName matchQueryName = FunctionName.of("matchquery"); private final FunctionName matchQueryWithUnderscoreName = FunctionName.of("match_query"); - private final FunctionName[] functionNames = - {matchName,matchQueryName, matchQueryWithUnderscoreName}; + private final FunctionName[] functionNames = { + matchName, matchQueryName, matchQueryWithUnderscoreName + }; static Stream> generateValidData() { return Stream.of( List.of( - DSL.namedArgument("field", - new ReferenceExpression("field_value", OpenSearchTextType.of())), - DSL.namedArgument("query", DSL.literal("query_value")) - ), + DSL.namedArgument( + "field", new ReferenceExpression("field_value", OpenSearchTextType.of())), + DSL.namedArgument("query", DSL.literal("query_value"))), List.of( - DSL.namedArgument("field", - new ReferenceExpression("field_value", OpenSearchTextType.of())), + DSL.namedArgument( + "field", new ReferenceExpression("field_value", OpenSearchTextType.of())), DSL.namedArgument("query", DSL.literal("query_value")), - DSL.namedArgument("analyzer", DSL.literal("standard")) - ), + DSL.namedArgument("analyzer", DSL.literal("standard"))), List.of( - DSL.namedArgument("field", - new ReferenceExpression("field_value", OpenSearchTextType.of())), + DSL.namedArgument( + "field", new ReferenceExpression("field_value", OpenSearchTextType.of())), DSL.namedArgument("query", DSL.literal("query_value")), - DSL.namedArgument("auto_generate_synonyms_phrase_query", DSL.literal("true")) - ), + DSL.namedArgument("auto_generate_synonyms_phrase_query", DSL.literal("true"))), List.of( - DSL.namedArgument("field", - new ReferenceExpression("field_value", OpenSearchTextType.of())), + DSL.namedArgument( + "field", new ReferenceExpression("field_value", OpenSearchTextType.of())), DSL.namedArgument("query", DSL.literal("query_value")), - DSL.namedArgument("fuzziness", DSL.literal("AUTO")) - ), + DSL.namedArgument("fuzziness", DSL.literal("AUTO"))), List.of( - DSL.namedArgument("field", - new ReferenceExpression("field_value", OpenSearchTextType.of())), + DSL.namedArgument( + "field", new ReferenceExpression("field_value", OpenSearchTextType.of())), DSL.namedArgument("query", DSL.literal("query_value")), - DSL.namedArgument("max_expansions", DSL.literal("50")) - ), + DSL.namedArgument("max_expansions", DSL.literal("50"))), List.of( - DSL.namedArgument("field", - new ReferenceExpression("field_value", OpenSearchTextType.of())), + DSL.namedArgument( + "field", new ReferenceExpression("field_value", OpenSearchTextType.of())), DSL.namedArgument("query", DSL.literal("query_value")), - DSL.namedArgument("prefix_length", DSL.literal("0")) - ), + DSL.namedArgument("prefix_length", DSL.literal("0"))), List.of( - DSL.namedArgument("field", - new ReferenceExpression("field_value", OpenSearchTextType.of())), + DSL.namedArgument( + "field", new ReferenceExpression("field_value", OpenSearchTextType.of())), DSL.namedArgument("query", DSL.literal("query_value")), - DSL.namedArgument("fuzzy_transpositions", DSL.literal("true")) - ), + DSL.namedArgument("fuzzy_transpositions", DSL.literal("true"))), List.of( - DSL.namedArgument("field", - new ReferenceExpression("field_value", OpenSearchTextType.of())), + DSL.namedArgument( + "field", new ReferenceExpression("field_value", OpenSearchTextType.of())), DSL.namedArgument("query", DSL.literal("query_value")), - DSL.namedArgument("fuzzy_rewrite", DSL.literal("constant_score")) - ), + DSL.namedArgument("fuzzy_rewrite", DSL.literal("constant_score"))), List.of( - DSL.namedArgument("field", - new ReferenceExpression("field_value", OpenSearchTextType.of())), + DSL.namedArgument( + "field", new ReferenceExpression("field_value", OpenSearchTextType.of())), DSL.namedArgument("query", DSL.literal("query_value")), - DSL.namedArgument("lenient", DSL.literal("false")) - ), + DSL.namedArgument("lenient", DSL.literal("false"))), List.of( - DSL.namedArgument("field", - new ReferenceExpression("field_value", OpenSearchTextType.of())), + DSL.namedArgument( + "field", new ReferenceExpression("field_value", OpenSearchTextType.of())), DSL.namedArgument("query", DSL.literal("query_value")), - DSL.namedArgument("operator", DSL.literal("OR")) - ), + DSL.namedArgument("operator", DSL.literal("OR"))), List.of( - DSL.namedArgument("field", - new ReferenceExpression("field_value", OpenSearchTextType.of())), + DSL.namedArgument( + "field", new ReferenceExpression("field_value", OpenSearchTextType.of())), DSL.namedArgument("query", DSL.literal("query_value")), - DSL.namedArgument("minimum_should_match", DSL.literal("3")) - ), + DSL.namedArgument("minimum_should_match", DSL.literal("3"))), List.of( - DSL.namedArgument("field", - new ReferenceExpression("field_value", OpenSearchTextType.of())), + DSL.namedArgument( + "field", new ReferenceExpression("field_value", OpenSearchTextType.of())), DSL.namedArgument("query", DSL.literal("query_value")), - DSL.namedArgument("zero_terms_query", DSL.literal("NONE")) - ), + DSL.namedArgument("zero_terms_query", DSL.literal("NONE"))), List.of( - DSL.namedArgument("field", - new ReferenceExpression("field_value", OpenSearchTextType.of())), + DSL.namedArgument( + "field", new ReferenceExpression("field_value", OpenSearchTextType.of())), DSL.namedArgument("query", DSL.literal("query_value")), - DSL.namedArgument("zero_terms_query", DSL.literal("none")) - ), + DSL.namedArgument("zero_terms_query", DSL.literal("none"))), List.of( - DSL.namedArgument("field", - new ReferenceExpression("field_value", OpenSearchTextType.of())), + DSL.namedArgument( + "field", new ReferenceExpression("field_value", OpenSearchTextType.of())), DSL.namedArgument("query", DSL.literal("query_value")), - DSL.namedArgument("boost", DSL.literal("1")) - ) - ); + DSL.namedArgument("boost", DSL.literal("1")))); } @ParameterizedTest @@ -135,99 +121,108 @@ public void test_valid_parameters(List validArgs) { @Test public void test_SyntaxCheckException_when_no_arguments() { List arguments = List.of(); - assertThrows(SyntaxCheckException.class, - () -> matchQuery.build(new MatchExpression(arguments))); + assertThrows( + SyntaxCheckException.class, () -> matchQuery.build(new MatchExpression(arguments))); } @Test public void test_SyntaxCheckException_when_one_argument() { List arguments = List.of(namedArgument("field", "field_value")); - assertThrows(SyntaxCheckException.class, - () -> matchQuery.build(new MatchExpression(arguments))); + assertThrows( + SyntaxCheckException.class, () -> matchQuery.build(new MatchExpression(arguments))); } @Test public void test_SemanticCheckException_when_invalid_parameter() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("field_value", OpenSearchTextType.of())), - namedArgument("query", "query_value"), - namedArgument("unsupported", "unsupported_value")); - Assertions.assertThrows(SemanticCheckException.class, - () -> matchQuery.build(new MatchExpression(arguments))); + List arguments = + List.of( + DSL.namedArgument( + "field", new ReferenceExpression("field_value", OpenSearchTextType.of())), + namedArgument("query", "query_value"), + namedArgument("unsupported", "unsupported_value")); + Assertions.assertThrows( + SemanticCheckException.class, () -> matchQuery.build(new MatchExpression(arguments))); } @ParameterizedTest @MethodSource("generateValidData") public void test_valid_parameters_matchquery_syntax(List validArgs) { - Assertions.assertNotNull(matchQuery.build( - new MatchExpression(validArgs, MatchQueryTest.this.matchQueryName))); + Assertions.assertNotNull( + matchQuery.build(new MatchExpression(validArgs, MatchQueryTest.this.matchQueryName))); } @Test public void test_SyntaxCheckException_when_no_arguments_matchquery_syntax() { List arguments = List.of(); - assertThrows(SyntaxCheckException.class, - () -> matchQuery.build( - new MatchExpression(arguments, MatchQueryTest.this.matchQueryName))); + assertThrows( + SyntaxCheckException.class, + () -> matchQuery.build(new MatchExpression(arguments, MatchQueryTest.this.matchQueryName))); } @Test public void test_SyntaxCheckException_when_one_argument_matchquery_syntax() { List arguments = List.of(namedArgument("field", "field_value")); - assertThrows(SyntaxCheckException.class, - () -> matchQuery.build( - new MatchExpression(arguments, MatchQueryTest.this.matchQueryName))); + assertThrows( + SyntaxCheckException.class, + () -> matchQuery.build(new MatchExpression(arguments, MatchQueryTest.this.matchQueryName))); } @Test public void test_SemanticCheckException_when_invalid_parameter_matchquery_syntax() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("field_value", OpenSearchTextType.of())), - namedArgument("query", "query_value"), - namedArgument("unsupported", "unsupported_value")); - Assertions.assertThrows(SemanticCheckException.class, - () -> matchQuery.build( - new MatchExpression(arguments, MatchQueryTest.this.matchQueryName))); + List arguments = + List.of( + DSL.namedArgument( + "field", new ReferenceExpression("field_value", OpenSearchTextType.of())), + namedArgument("query", "query_value"), + namedArgument("unsupported", "unsupported_value")); + Assertions.assertThrows( + SemanticCheckException.class, + () -> matchQuery.build(new MatchExpression(arguments, MatchQueryTest.this.matchQueryName))); } @ParameterizedTest @MethodSource("generateValidData") public void test_valid_parameters_match_query_syntax(List validArgs) { - Assertions.assertNotNull(matchQuery.build( - new MatchExpression(validArgs, MatchQueryTest.this.matchQueryWithUnderscoreName))); + Assertions.assertNotNull( + matchQuery.build( + new MatchExpression(validArgs, MatchQueryTest.this.matchQueryWithUnderscoreName))); } @Test public void test_SyntaxCheckException_when_no_arguments_match_query_syntax() { List arguments = List.of(); - assertThrows(SyntaxCheckException.class, - () -> matchQuery.build( - new MatchExpression(arguments, MatchQueryTest.this.matchQueryWithUnderscoreName))); + assertThrows( + SyntaxCheckException.class, + () -> + matchQuery.build( + new MatchExpression(arguments, MatchQueryTest.this.matchQueryWithUnderscoreName))); } @Test public void test_SyntaxCheckException_when_one_argument_match_query_syntax() { List arguments = List.of(namedArgument("field", "field_value")); - assertThrows(SyntaxCheckException.class, - () -> matchQuery.build( - new MatchExpression(arguments, MatchQueryTest.this.matchQueryWithUnderscoreName))); + assertThrows( + SyntaxCheckException.class, + () -> + matchQuery.build( + new MatchExpression(arguments, MatchQueryTest.this.matchQueryWithUnderscoreName))); } @Test public void test_SemanticCheckException_when_invalid_parameter_match_query_syntax() { - List arguments = List.of( - DSL.namedArgument("field", - new ReferenceExpression("field_value", OpenSearchTextType.of())), - namedArgument("query", "query_value"), - namedArgument("unsupported", "unsupported_value")); - Assertions.assertThrows(SemanticCheckException.class, - () -> matchQuery.build( - new MatchExpression(arguments, MatchQueryTest.this.matchQueryWithUnderscoreName))); + List arguments = + List.of( + DSL.namedArgument( + "field", new ReferenceExpression("field_value", OpenSearchTextType.of())), + namedArgument("query", "query_value"), + namedArgument("unsupported", "unsupported_value")); + Assertions.assertThrows( + SemanticCheckException.class, + () -> + matchQuery.build( + new MatchExpression(arguments, MatchQueryTest.this.matchQueryWithUnderscoreName))); } - private NamedArgumentExpression namedArgument(String name, String value) { return DSL.namedArgument(name, DSL.literal(value)); } @@ -244,14 +239,16 @@ public MatchExpression(List arguments, FunctionName funcName) { @Override public ExprValue valueOf(Environment valueEnv) { - throw new UnsupportedOperationException("Invalid function call, " - + "valueOf function need implementation only to support Expression interface"); + throw new UnsupportedOperationException( + "Invalid function call, " + + "valueOf function need implementation only to support Expression interface"); } @Override public ExprType type() { - throw new UnsupportedOperationException("Invalid function call, " - + "type function need implementation only to support Expression interface"); + throw new UnsupportedOperationException( + "Invalid function call, " + + "type function need implementation only to support Expression interface"); } } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/MultiMatchTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/MultiMatchTest.java index 93b0cdbc93..7fcc4a6430 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/MultiMatchTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/MultiMatchTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.filter.lucene; import static org.junit.jupiter.api.Assertions.assertThrows; @@ -39,186 +38,181 @@ class MultiMatchTest { private final FunctionName multiMatchName = FunctionName.of("multimatch"); private final FunctionName snakeCaseMultiMatchName = FunctionName.of("multi_match"); private final FunctionName multiMatchQueryName = FunctionName.of("multimatchquery"); - private static final LiteralExpression fields_value = DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "title", ExprValueUtils.floatValue(1.F), - "body", ExprValueUtils.floatValue(.3F))))); + private static final LiteralExpression fields_value = + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of( + "title", ExprValueUtils.floatValue(1.F), + "body", ExprValueUtils.floatValue(.3F))))); private static final LiteralExpression query_value = DSL.literal("query_value"); static Stream> generateValidData() { return Stream.of( - List.of( - DSL.namedArgument("fields", fields_value), - DSL.namedArgument("query", query_value) - ), + List.of(DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value)), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("analyzer", DSL.literal("simple")) - ), + DSL.namedArgument("analyzer", DSL.literal("simple"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("auto_generate_synonyms_phrase_query", DSL.literal("true")) - ), + DSL.namedArgument("auto_generate_synonyms_phrase_query", DSL.literal("true"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("boost", DSL.literal("1.3")) - ), + DSL.namedArgument("boost", DSL.literal("1.3"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("cutoff_frequency", DSL.literal("4.2")) - ), + DSL.namedArgument("cutoff_frequency", DSL.literal("4.2"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("fuzziness", DSL.literal("AUTO:2,4")) - ), + DSL.namedArgument("fuzziness", DSL.literal("AUTO:2,4"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("fuzzy_transpositions", DSL.literal("true")) - ), + DSL.namedArgument("fuzzy_transpositions", DSL.literal("true"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("lenient", DSL.literal("true")) - ), + DSL.namedArgument("lenient", DSL.literal("true"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("max_expansions", DSL.literal("7")) - ), + DSL.namedArgument("max_expansions", DSL.literal("7"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("minimum_should_match", DSL.literal("4")) - ), + DSL.namedArgument("minimum_should_match", DSL.literal("4"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("operator", DSL.literal("AND")) - ), + DSL.namedArgument("operator", DSL.literal("AND"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("prefix_length", DSL.literal("7")) - ), + DSL.namedArgument("prefix_length", DSL.literal("7"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("tie_breaker", DSL.literal("0.3")) - ), + DSL.namedArgument("tie_breaker", DSL.literal("0.3"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("type", DSL.literal("cross_fields")) - ), + DSL.namedArgument("type", DSL.literal("cross_fields"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("zero_terms_query", DSL.literal("ALL")) - ), + DSL.namedArgument("zero_terms_query", DSL.literal("ALL"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("zero_terms_query", DSL.literal("all")) - ) - ); + DSL.namedArgument("zero_terms_query", DSL.literal("all")))); } @ParameterizedTest @MethodSource("generateValidData") public void test_valid_parameters_multiMatch(List validArgs) { - Assertions.assertNotNull(multiMatchQuery.build( - new MultiMatchExpression(validArgs))); + Assertions.assertNotNull(multiMatchQuery.build(new MultiMatchExpression(validArgs))); } @ParameterizedTest @MethodSource("generateValidData") public void test_valid_parameters_multi_match(List validArgs) { - Assertions.assertNotNull(multiMatchQuery.build( - new MultiMatchExpression(validArgs, snakeCaseMultiMatchName))); + Assertions.assertNotNull( + multiMatchQuery.build(new MultiMatchExpression(validArgs, snakeCaseMultiMatchName))); } @ParameterizedTest @MethodSource("generateValidData") public void test_valid_parameters_multiMatchQuery(List validArgs) { - Assertions.assertNotNull(multiMatchQuery.build( - new MultiMatchExpression(validArgs, multiMatchQueryName))); + Assertions.assertNotNull( + multiMatchQuery.build(new MultiMatchExpression(validArgs, multiMatchQueryName))); } @Test public void test_SyntaxCheckException_when_no_arguments_multiMatch() { List arguments = List.of(); - assertThrows(SyntaxCheckException.class, + assertThrows( + SyntaxCheckException.class, () -> multiMatchQuery.build(new MultiMatchExpression(arguments))); } @Test public void test_SyntaxCheckException_when_no_arguments_multi_match() { List arguments = List.of(); - assertThrows(SyntaxCheckException.class, + assertThrows( + SyntaxCheckException.class, () -> multiMatchQuery.build(new MultiMatchExpression(arguments, multiMatchName))); } @Test public void test_SyntaxCheckException_when_no_arguments_multiMatchQuery() { List arguments = List.of(); - assertThrows(SyntaxCheckException.class, + assertThrows( + SyntaxCheckException.class, () -> multiMatchQuery.build(new MultiMatchExpression(arguments, multiMatchQueryName))); } @Test public void test_SyntaxCheckException_when_one_argument_multiMatch() { List arguments = List.of(namedArgument("fields", fields_value)); - assertThrows(SyntaxCheckException.class, + assertThrows( + SyntaxCheckException.class, () -> multiMatchQuery.build(new MultiMatchExpression(arguments))); } @Test public void test_SyntaxCheckException_when_one_argument_multi_match() { List arguments = List.of(namedArgument("fields", fields_value)); - assertThrows(SyntaxCheckException.class, + assertThrows( + SyntaxCheckException.class, () -> multiMatchQuery.build(new MultiMatchExpression(arguments, snakeCaseMultiMatchName))); } @Test public void test_SyntaxCheckException_when_one_argument_multiMatchQuery() { List arguments = List.of(namedArgument("fields", fields_value)); - assertThrows(SyntaxCheckException.class, + assertThrows( + SyntaxCheckException.class, () -> multiMatchQuery.build(new MultiMatchExpression(arguments, multiMatchQueryName))); } @Test public void test_SemanticCheckException_when_invalid_parameter_multiMatch() { - List arguments = List.of( - namedArgument("fields", fields_value), - namedArgument("query", query_value), - DSL.namedArgument("unsupported", "unsupported_value")); - Assertions.assertThrows(SemanticCheckException.class, + List arguments = + List.of( + namedArgument("fields", fields_value), + namedArgument("query", query_value), + DSL.namedArgument("unsupported", "unsupported_value")); + Assertions.assertThrows( + SemanticCheckException.class, () -> multiMatchQuery.build(new MultiMatchExpression(arguments))); } @Test public void test_SemanticCheckException_when_invalid_parameter_multi_match() { - List arguments = List.of( - namedArgument("fields", fields_value), - namedArgument("query", query_value), - DSL.namedArgument("unsupported", "unsupported_value")); - Assertions.assertThrows(SemanticCheckException.class, + List arguments = + List.of( + namedArgument("fields", fields_value), + namedArgument("query", query_value), + DSL.namedArgument("unsupported", "unsupported_value")); + Assertions.assertThrows( + SemanticCheckException.class, () -> multiMatchQuery.build(new MultiMatchExpression(arguments, snakeCaseMultiMatchName))); } @Test public void test_SemanticCheckException_when_invalid_parameter_multiMatchQuery() { - List arguments = List.of( - namedArgument("fields", fields_value), - namedArgument("query", query_value), - DSL.namedArgument("unsupported", "unsupported_value")); - Assertions.assertThrows(SemanticCheckException.class, + List arguments = + List.of( + namedArgument("fields", fields_value), + namedArgument("query", query_value), + DSL.namedArgument("unsupported", "unsupported_value")); + Assertions.assertThrows( + SemanticCheckException.class, () -> multiMatchQuery.build(new MultiMatchExpression(arguments, multiMatchQueryName))); } @@ -235,17 +229,18 @@ public MultiMatchExpression(List arguments, FunctionName funcName) { super(funcName, arguments); } - @Override public ExprValue valueOf(Environment valueEnv) { - throw new UnsupportedOperationException("Invalid function call, " - + "valueOf function need implementation only to support Expression interface"); + throw new UnsupportedOperationException( + "Invalid function call, " + + "valueOf function need implementation only to support Expression interface"); } @Override public ExprType type() { - throw new UnsupportedOperationException("Invalid function call, " - + "type function need implementation only to support Expression interface"); + throw new UnsupportedOperationException( + "Invalid function call, " + + "type function need implementation only to support Expression interface"); } } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/QueryStringTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/QueryStringTest.java index 32c02959b8..781e27d71a 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/QueryStringTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/QueryStringTest.java @@ -36,76 +36,82 @@ class QueryStringTest { private final QueryStringQuery queryStringQuery = new QueryStringQuery(); private final FunctionName queryStringFunc = FunctionName.of("query_string"); - private static final LiteralExpression fields_value = DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "title", ExprValueUtils.floatValue(1.F), - "body", ExprValueUtils.floatValue(.3F))))); + private static final LiteralExpression fields_value = + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of( + "title", ExprValueUtils.floatValue(1.F), + "body", ExprValueUtils.floatValue(.3F))))); private static final LiteralExpression query_value = DSL.literal("query_value"); static Stream> generateValidData() { Expression field = DSL.namedArgument("fields", fields_value); Expression query = DSL.namedArgument("query", query_value); return Stream.of( - DSL.namedArgument("analyzer", DSL.literal("standard")), - DSL.namedArgument("analyze_wildcard", DSL.literal("true")), - DSL.namedArgument("allow_leading_wildcard", DSL.literal("true")), - DSL.namedArgument("auto_generate_synonyms_phrase_query", DSL.literal("true")), - DSL.namedArgument("boost", DSL.literal("1")), - DSL.namedArgument("default_operator", DSL.literal("AND")), - DSL.namedArgument("default_operator", DSL.literal("and")), - DSL.namedArgument("enable_position_increments", DSL.literal("true")), - DSL.namedArgument("escape", DSL.literal("false")), - DSL.namedArgument("fuzziness", DSL.literal("1")), - DSL.namedArgument("fuzzy_rewrite", DSL.literal("constant_score")), - DSL.namedArgument("fuzzy_max_expansions", DSL.literal("42")), - DSL.namedArgument("fuzzy_prefix_length", DSL.literal("42")), - DSL.namedArgument("fuzzy_transpositions", DSL.literal("true")), - DSL.namedArgument("lenient", DSL.literal("true")), - DSL.namedArgument("max_determinized_states", DSL.literal("10000")), - DSL.namedArgument("minimum_should_match", DSL.literal("4")), - DSL.namedArgument("quote_analyzer", DSL.literal("standard")), - DSL.namedArgument("phrase_slop", DSL.literal("0")), - DSL.namedArgument("quote_field_suffix", DSL.literal(".exact")), - DSL.namedArgument("rewrite", DSL.literal("constant_score")), - DSL.namedArgument("type", DSL.literal("best_fields")), - DSL.namedArgument("tie_breaker", DSL.literal("0.3")), - DSL.namedArgument("time_zone", DSL.literal("Canada/Pacific")), - DSL.namedArgument("ANALYZER", DSL.literal("standard")), - DSL.namedArgument("ANALYZE_wildcard", DSL.literal("true")), - DSL.namedArgument("Allow_Leading_wildcard", DSL.literal("true")), - DSL.namedArgument("Auto_Generate_Synonyms_Phrase_Query", DSL.literal("true")), - DSL.namedArgument("Boost", DSL.literal("1")) - ).map(arg -> List.of(field, query, arg)); + DSL.namedArgument("analyzer", DSL.literal("standard")), + DSL.namedArgument("analyze_wildcard", DSL.literal("true")), + DSL.namedArgument("allow_leading_wildcard", DSL.literal("true")), + DSL.namedArgument("auto_generate_synonyms_phrase_query", DSL.literal("true")), + DSL.namedArgument("boost", DSL.literal("1")), + DSL.namedArgument("default_operator", DSL.literal("AND")), + DSL.namedArgument("default_operator", DSL.literal("and")), + DSL.namedArgument("enable_position_increments", DSL.literal("true")), + DSL.namedArgument("escape", DSL.literal("false")), + DSL.namedArgument("fuzziness", DSL.literal("1")), + DSL.namedArgument("fuzzy_rewrite", DSL.literal("constant_score")), + DSL.namedArgument("fuzzy_max_expansions", DSL.literal("42")), + DSL.namedArgument("fuzzy_prefix_length", DSL.literal("42")), + DSL.namedArgument("fuzzy_transpositions", DSL.literal("true")), + DSL.namedArgument("lenient", DSL.literal("true")), + DSL.namedArgument("max_determinized_states", DSL.literal("10000")), + DSL.namedArgument("minimum_should_match", DSL.literal("4")), + DSL.namedArgument("quote_analyzer", DSL.literal("standard")), + DSL.namedArgument("phrase_slop", DSL.literal("0")), + DSL.namedArgument("quote_field_suffix", DSL.literal(".exact")), + DSL.namedArgument("rewrite", DSL.literal("constant_score")), + DSL.namedArgument("type", DSL.literal("best_fields")), + DSL.namedArgument("tie_breaker", DSL.literal("0.3")), + DSL.namedArgument("time_zone", DSL.literal("Canada/Pacific")), + DSL.namedArgument("ANALYZER", DSL.literal("standard")), + DSL.namedArgument("ANALYZE_wildcard", DSL.literal("true")), + DSL.namedArgument("Allow_Leading_wildcard", DSL.literal("true")), + DSL.namedArgument("Auto_Generate_Synonyms_Phrase_Query", DSL.literal("true")), + DSL.namedArgument("Boost", DSL.literal("1"))) + .map(arg -> List.of(field, query, arg)); } @ParameterizedTest @MethodSource("generateValidData") void test_valid_parameters(List validArgs) { - Assertions.assertNotNull(queryStringQuery.build( - new QueryStringExpression(validArgs))); + Assertions.assertNotNull(queryStringQuery.build(new QueryStringExpression(validArgs))); } @Test void test_SyntaxCheckException_when_no_arguments() { List arguments = List.of(); - assertThrows(SyntaxCheckException.class, + assertThrows( + SyntaxCheckException.class, () -> queryStringQuery.build(new QueryStringExpression(arguments))); } @Test void test_SyntaxCheckException_when_one_argument() { List arguments = List.of(namedArgument("fields", fields_value)); - assertThrows(SyntaxCheckException.class, + assertThrows( + SyntaxCheckException.class, () -> queryStringQuery.build(new QueryStringExpression(arguments))); } @Test void test_SemanticCheckException_when_invalid_parameter() { - List arguments = List.of( - namedArgument("fields", fields_value), - namedArgument("query", query_value), - namedArgument("unsupported", "unsupported_value")); - Assertions.assertThrows(SemanticCheckException.class, + List arguments = + List.of( + namedArgument("fields", fields_value), + namedArgument("query", query_value), + namedArgument("unsupported", "unsupported_value")); + Assertions.assertThrows( + SemanticCheckException.class, () -> queryStringQuery.build(new QueryStringExpression(arguments))); } @@ -124,14 +130,16 @@ public QueryStringExpression(List arguments) { @Override public ExprValue valueOf(Environment valueEnv) { - throw new UnsupportedOperationException("Invalid function call, " - + "valueOf function need implementation only to support Expression interface"); + throw new UnsupportedOperationException( + "Invalid function call, " + + "valueOf function need implementation only to support Expression interface"); } @Override public ExprType type() { - throw new UnsupportedOperationException("Invalid function call, " - + "type function need implementation only to support Expression interface"); + throw new UnsupportedOperationException( + "Invalid function call, " + + "type function need implementation only to support Expression interface"); } } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/QueryTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/QueryTest.java index a61b47b7b1..d81218c0c3 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/QueryTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/QueryTest.java @@ -37,78 +37,78 @@ class QueryTest { static Stream> generateValidData() { Expression query = DSL.namedArgument("query", query_value); return List.of( - DSL.namedArgument("analyzer", DSL.literal("standard")), - DSL.namedArgument("analyze_wildcard", DSL.literal("true")), - DSL.namedArgument("allow_leading_wildcard", DSL.literal("true")), - DSL.namedArgument("auto_generate_synonyms_phrase_query", DSL.literal("true")), - DSL.namedArgument("boost", DSL.literal("1")), - DSL.namedArgument("default_operator", DSL.literal("AND")), - DSL.namedArgument("default_operator", DSL.literal("and")), - DSL.namedArgument("enable_position_increments", DSL.literal("true")), - DSL.namedArgument("escape", DSL.literal("false")), - DSL.namedArgument("fuzziness", DSL.literal("1")), - DSL.namedArgument("fuzzy_rewrite", DSL.literal("constant_score")), - DSL.namedArgument("fuzzy_max_expansions", DSL.literal("42")), - DSL.namedArgument("fuzzy_prefix_length", DSL.literal("42")), - DSL.namedArgument("fuzzy_transpositions", DSL.literal("true")), - DSL.namedArgument("lenient", DSL.literal("true")), - DSL.namedArgument("max_determinized_states", DSL.literal("10000")), - DSL.namedArgument("minimum_should_match", DSL.literal("4")), - DSL.namedArgument("quote_analyzer", DSL.literal("standard")), - DSL.namedArgument("phrase_slop", DSL.literal("0")), - DSL.namedArgument("quote_field_suffix", DSL.literal(".exact")), - DSL.namedArgument("rewrite", DSL.literal("constant_score")), - DSL.namedArgument("type", DSL.literal("best_fields")), - DSL.namedArgument("tie_breaker", DSL.literal("0.3")), - DSL.namedArgument("time_zone", DSL.literal("Canada/Pacific")), - DSL.namedArgument("ANALYZER", DSL.literal("standard")), - DSL.namedArgument("ANALYZE_wildcard", DSL.literal("true")), - DSL.namedArgument("Allow_Leading_wildcard", DSL.literal("true")), - DSL.namedArgument("Auto_Generate_Synonyms_Phrase_Query", DSL.literal("true")), - DSL.namedArgument("Boost", DSL.literal("1")) - ).stream().map(arg -> List.of(query, arg)); + DSL.namedArgument("analyzer", DSL.literal("standard")), + DSL.namedArgument("analyze_wildcard", DSL.literal("true")), + DSL.namedArgument("allow_leading_wildcard", DSL.literal("true")), + DSL.namedArgument("auto_generate_synonyms_phrase_query", DSL.literal("true")), + DSL.namedArgument("boost", DSL.literal("1")), + DSL.namedArgument("default_operator", DSL.literal("AND")), + DSL.namedArgument("default_operator", DSL.literal("and")), + DSL.namedArgument("enable_position_increments", DSL.literal("true")), + DSL.namedArgument("escape", DSL.literal("false")), + DSL.namedArgument("fuzziness", DSL.literal("1")), + DSL.namedArgument("fuzzy_rewrite", DSL.literal("constant_score")), + DSL.namedArgument("fuzzy_max_expansions", DSL.literal("42")), + DSL.namedArgument("fuzzy_prefix_length", DSL.literal("42")), + DSL.namedArgument("fuzzy_transpositions", DSL.literal("true")), + DSL.namedArgument("lenient", DSL.literal("true")), + DSL.namedArgument("max_determinized_states", DSL.literal("10000")), + DSL.namedArgument("minimum_should_match", DSL.literal("4")), + DSL.namedArgument("quote_analyzer", DSL.literal("standard")), + DSL.namedArgument("phrase_slop", DSL.literal("0")), + DSL.namedArgument("quote_field_suffix", DSL.literal(".exact")), + DSL.namedArgument("rewrite", DSL.literal("constant_score")), + DSL.namedArgument("type", DSL.literal("best_fields")), + DSL.namedArgument("tie_breaker", DSL.literal("0.3")), + DSL.namedArgument("time_zone", DSL.literal("Canada/Pacific")), + DSL.namedArgument("ANALYZER", DSL.literal("standard")), + DSL.namedArgument("ANALYZE_wildcard", DSL.literal("true")), + DSL.namedArgument("Allow_Leading_wildcard", DSL.literal("true")), + DSL.namedArgument("Auto_Generate_Synonyms_Phrase_Query", DSL.literal("true")), + DSL.namedArgument("Boost", DSL.literal("1"))) + .stream() + .map(arg -> List.of(query, arg)); } @ParameterizedTest @MethodSource("generateValidData") public void test_valid_parameters(List validArgs) { - Assertions.assertNotNull(queryQuery.build( - new QueryExpression(validArgs))); + Assertions.assertNotNull(queryQuery.build(new QueryExpression(validArgs))); } @Test public void test_SyntaxCheckException_when_no_arguments() { List arguments = List.of(); - assertThrows(SyntaxCheckException.class, - () -> queryQuery.build(new QueryExpression(arguments))); + assertThrows( + SyntaxCheckException.class, () -> queryQuery.build(new QueryExpression(arguments))); } @Test public void test_SyntaxCheckException_when_field_argument() { - List arguments = List.of( - namedArgument("fields", "invalid argument"), - namedArgument("query", query_value)); - assertThrows(SemanticCheckException.class, - () -> queryQuery.build(new QueryExpression(arguments))); + List arguments = + List.of(namedArgument("fields", "invalid argument"), namedArgument("query", query_value)); + assertThrows( + SemanticCheckException.class, () -> queryQuery.build(new QueryExpression(arguments))); } @Test public void test_SemanticCheckException_when_invalid_parameter() { - List arguments = List.of( - namedArgument("query", query_value), - namedArgument("unsupported", "unsupported_value")); - Assertions.assertThrows(SemanticCheckException.class, - () -> queryQuery.build(new QueryExpression(arguments))); + List arguments = + List.of( + namedArgument("query", query_value), namedArgument("unsupported", "unsupported_value")); + Assertions.assertThrows( + SemanticCheckException.class, () -> queryQuery.build(new QueryExpression(arguments))); } @Test public void test_SemanticCheckException_when_sending_parameter_multiple_times() { - List arguments = List.of( + List arguments = + List.of( namedArgument("query", query_value), namedArgument("allow_leading_wildcard", DSL.literal("true")), namedArgument("allow_leading_wildcard", DSL.literal("true"))); - Assertions.assertThrows(SemanticCheckException.class, - () -> queryQuery.build(new QueryExpression(arguments))); + Assertions.assertThrows( + SemanticCheckException.class, () -> queryQuery.build(new QueryExpression(arguments))); } private NamedArgumentExpression namedArgument(String name, String value) { @@ -126,14 +126,16 @@ public QueryExpression(List arguments) { @Override public ExprValue valueOf(Environment valueEnv) { - throw new UnsupportedOperationException("Invalid function call, " - + "valueOf function need implementation only to support Expression interface"); + throw new UnsupportedOperationException( + "Invalid function call, " + + "valueOf function need implementation only to support Expression interface"); } @Override public ExprType type() { - throw new UnsupportedOperationException("Invalid function call, " - + "type function need implementation only to support Expression interface"); + throw new UnsupportedOperationException( + "Invalid function call, " + + "type function need implementation only to support Expression interface"); } } @@ -141,7 +143,6 @@ public ExprType type() { public void test_can_get_query_name() { List arguments = List.of(namedArgument("query", query_value)); queryQuery.build(new QueryExpression(arguments)); - Assertions.assertEquals("query", - queryQuery.getQueryName()); + Assertions.assertEquals("query", queryQuery.getQueryName()); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/RangeQueryTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/RangeQueryTest.java index 208c782593..ca87f42900 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/RangeQueryTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/RangeQueryTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.filter.lucene; import static org.junit.jupiter.api.Assertions.assertThrows; @@ -21,9 +20,10 @@ class RangeQueryTest { @Test void should_throw_exception_for_unsupported_comparison() { // Note that since we do switch check on enum comparison, this should be impossible - assertThrows(IllegalStateException.class, () -> - new RangeQuery(Comparison.BETWEEN) - .doBuild("name", STRING, ExprValueUtils.stringValue("John"))); + assertThrows( + IllegalStateException.class, + () -> + new RangeQuery(Comparison.BETWEEN) + .doBuild("name", STRING, ExprValueUtils.stringValue("John"))); } - } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/SimpleQueryStringTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/SimpleQueryStringTest.java index f7129117a1..ea14461521 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/SimpleQueryStringTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/SimpleQueryStringTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.filter.lucene; import static org.junit.jupiter.api.Assertions.assertThrows; @@ -37,148 +36,129 @@ class SimpleQueryStringTest { private final SimpleQueryStringQuery simpleQueryStringQuery = new SimpleQueryStringQuery(); private final FunctionName simpleQueryString = FunctionName.of("simple_query_string"); - private static final LiteralExpression fields_value = DSL.literal( - new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "title", ExprValueUtils.floatValue(1.F), - "body", ExprValueUtils.floatValue(.3F))))); + private static final LiteralExpression fields_value = + DSL.literal( + new ExprTupleValue( + new LinkedHashMap<>( + ImmutableMap.of( + "title", ExprValueUtils.floatValue(1.F), + "body", ExprValueUtils.floatValue(.3F))))); private static final LiteralExpression query_value = DSL.literal("query_value"); static Stream> generateValidData() { return Stream.of( - List.of( - DSL.namedArgument("fields", fields_value), - DSL.namedArgument("query", query_value) - ), + List.of(DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value)), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("analyze_wildcard", DSL.literal("true")) - ), + DSL.namedArgument("analyze_wildcard", DSL.literal("true"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("analyzer", DSL.literal("standard")) - ), + DSL.namedArgument("analyzer", DSL.literal("standard"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("auto_generate_synonyms_phrase_query", DSL.literal("true")) - ), + DSL.namedArgument("auto_generate_synonyms_phrase_query", DSL.literal("true"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("flags", DSL.literal("PREFIX")) - ), + DSL.namedArgument("flags", DSL.literal("PREFIX"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("flags", DSL.literal("PREFIX|NOT|AND")) - ), + DSL.namedArgument("flags", DSL.literal("PREFIX|NOT|AND"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("flags", DSL.literal("NOT|AND")) - ), + DSL.namedArgument("flags", DSL.literal("NOT|AND"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("flags", DSL.literal("PREFIX|not|AND")) - ), + DSL.namedArgument("flags", DSL.literal("PREFIX|not|AND"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("flags", DSL.literal("not|and")) - ), + DSL.namedArgument("flags", DSL.literal("not|and"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("fuzzy_max_expansions", DSL.literal("42")) - ), + DSL.namedArgument("fuzzy_max_expansions", DSL.literal("42"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("fuzzy_prefix_length", DSL.literal("42")) - ), + DSL.namedArgument("fuzzy_prefix_length", DSL.literal("42"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("fuzzy_transpositions", DSL.literal("true")) - ), + DSL.namedArgument("fuzzy_transpositions", DSL.literal("true"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("lenient", DSL.literal("true")) - ), + DSL.namedArgument("lenient", DSL.literal("true"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("default_operator", DSL.literal("AND")) - ), + DSL.namedArgument("default_operator", DSL.literal("AND"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("default_operator", DSL.literal("and")) - ), + DSL.namedArgument("default_operator", DSL.literal("and"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("minimum_should_match", DSL.literal("4")) - ), + DSL.namedArgument("minimum_should_match", DSL.literal("4"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("quote_field_suffix", DSL.literal(".exact")) - ), + DSL.namedArgument("quote_field_suffix", DSL.literal(".exact"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("boost", DSL.literal("1")) - ), - List.of( - DSL.namedArgument("FIELDS", fields_value), - DSL.namedArgument("QUERY", query_value) - ), + DSL.namedArgument("boost", DSL.literal("1"))), + List.of(DSL.namedArgument("FIELDS", fields_value), DSL.namedArgument("QUERY", query_value)), List.of( DSL.namedArgument("FIELDS", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("ANALYZE_wildcard", DSL.literal("true")) - ), + DSL.namedArgument("ANALYZE_wildcard", DSL.literal("true"))), List.of( DSL.namedArgument("fields", fields_value), DSL.namedArgument("query", query_value), - DSL.namedArgument("analyZER", DSL.literal("standard")) - ) - ); + DSL.namedArgument("analyZER", DSL.literal("standard")))); } @ParameterizedTest @MethodSource("generateValidData") public void test_valid_parameters(List validArgs) { - Assertions.assertNotNull(simpleQueryStringQuery.build( - new SimpleQueryStringExpression(validArgs))); + Assertions.assertNotNull( + simpleQueryStringQuery.build(new SimpleQueryStringExpression(validArgs))); } @Test public void test_SyntaxCheckException_when_no_arguments() { List arguments = List.of(); - assertThrows(SyntaxCheckException.class, + assertThrows( + SyntaxCheckException.class, () -> simpleQueryStringQuery.build(new SimpleQueryStringExpression(arguments))); } @Test public void test_SyntaxCheckException_when_one_argument() { List arguments = List.of(namedArgument("fields", fields_value)); - assertThrows(SyntaxCheckException.class, + assertThrows( + SyntaxCheckException.class, () -> simpleQueryStringQuery.build(new SimpleQueryStringExpression(arguments))); } @Test public void test_SemanticCheckException_when_invalid_parameter() { - List arguments = List.of( - namedArgument("fields", fields_value), - namedArgument("query", query_value), - namedArgument("unsupported", "unsupported_value")); - Assertions.assertThrows(SemanticCheckException.class, + List arguments = + List.of( + namedArgument("fields", fields_value), + namedArgument("query", query_value), + namedArgument("unsupported", "unsupported_value")); + Assertions.assertThrows( + SemanticCheckException.class, () -> simpleQueryStringQuery.build(new SimpleQueryStringExpression(arguments))); } @@ -197,14 +177,16 @@ public SimpleQueryStringExpression(List arguments) { @Override public ExprValue valueOf(Environment valueEnv) { - throw new UnsupportedOperationException("Invalid function call, " - + "valueOf function need implementation only to support Expression interface"); + throw new UnsupportedOperationException( + "Invalid function call, " + + "valueOf function need implementation only to support Expression interface"); } @Override public ExprType type() { - throw new UnsupportedOperationException("Invalid function call, " - + "type function need implementation only to support Expression interface"); + throw new UnsupportedOperationException( + "Invalid function call, " + + "type function need implementation only to support Expression interface"); } } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/WildcardQueryTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/WildcardQueryTest.java index 98bd7c5784..7182626c02 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/WildcardQueryTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/WildcardQueryTest.java @@ -36,46 +36,45 @@ class WildcardQueryTest { static Stream> generateValidData() { return Stream.of( List.of( - namedArgument("field", - new ReferenceExpression("title", OpenSearchTextType.of())), + namedArgument("field", new ReferenceExpression("title", OpenSearchTextType.of())), namedArgument("query", "query_value*"), namedArgument("boost", "0.7"), namedArgument("case_insensitive", "false"), - namedArgument("rewrite", "constant_score_boolean") - ) - ); + namedArgument("rewrite", "constant_score_boolean"))); } @ParameterizedTest @MethodSource("generateValidData") public void test_valid_parameters(List validArgs) { - Assertions.assertNotNull(wildcardQueryQuery.build( - new WildcardQueryExpression(validArgs))); + Assertions.assertNotNull(wildcardQueryQuery.build(new WildcardQueryExpression(validArgs))); } @Test public void test_SyntaxCheckException_when_no_arguments() { List arguments = List.of(); - assertThrows(SyntaxCheckException.class, + assertThrows( + SyntaxCheckException.class, () -> wildcardQueryQuery.build(new WildcardQueryExpression(arguments))); } @Test public void test_SyntaxCheckException_when_one_argument() { - List arguments = List.of(namedArgument("field", - new ReferenceExpression("title", OpenSearchTextType.of()))); - assertThrows(SyntaxCheckException.class, + List arguments = + List.of(namedArgument("field", new ReferenceExpression("title", OpenSearchTextType.of()))); + assertThrows( + SyntaxCheckException.class, () -> wildcardQueryQuery.build(new WildcardQueryExpression(arguments))); } @Test public void test_SemanticCheckException_when_invalid_parameter() { - List arguments = List.of( - namedArgument("field", - new ReferenceExpression("title", OpenSearchTextType.of())), - namedArgument("query", "query_value*"), - namedArgument("unsupported", "unsupported_value")); - Assertions.assertThrows(SemanticCheckException.class, + List arguments = + List.of( + namedArgument("field", new ReferenceExpression("title", OpenSearchTextType.of())), + namedArgument("query", "query_value*"), + namedArgument("unsupported", "unsupported_value")); + Assertions.assertThrows( + SemanticCheckException.class, () -> wildcardQueryQuery.build(new WildcardQueryExpression(arguments))); } @@ -86,14 +85,16 @@ public WildcardQueryExpression(List arguments) { @Override public ExprValue valueOf(Environment valueEnv) { - throw new UnsupportedOperationException("Invalid function call, " - + "valueOf function need implementation only to support Expression interface"); + throw new UnsupportedOperationException( + "Invalid function call, " + + "valueOf function need implementation only to support Expression interface"); } @Override public ExprType type() { - throw new UnsupportedOperationException("Invalid function call, " - + "type function need implementation only to support Expression interface"); + throw new UnsupportedOperationException( + "Invalid function call, " + + "type function need implementation only to support Expression interface"); } } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MultiFieldQueryTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MultiFieldQueryTest.java index 01ec85d64d..9518136ff0 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MultiFieldQueryTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/MultiFieldQueryTest.java @@ -27,14 +27,17 @@ class MultiFieldQueryTest { MultiFieldQuery query; private final String testQueryName = "test_query"; - private final Map> actionMap - = ImmutableMap.of("paramA", (o, v) -> o); + private final Map> actionMap = + ImmutableMap.of("paramA", (o, v) -> o); @BeforeEach public void setUp() { - query = mock(MultiFieldQuery.class, - Mockito.withSettings().useConstructor(actionMap) - .defaultAnswer(Mockito.CALLS_REAL_METHODS)); + query = + mock( + MultiFieldQuery.class, + Mockito.withSettings() + .useConstructor(actionMap) + .defaultAnswer(Mockito.CALLS_REAL_METHODS)); when(query.getQueryName()).thenReturn(testQueryName); } @@ -44,17 +47,26 @@ void createQueryBuilderTest() { String sampleField = "fieldA"; float sampleValue = 34f; - var fieldSpec = ImmutableMap.builder().put(sampleField, - ExprValueUtils.floatValue(sampleValue)).build(); + var fieldSpec = + ImmutableMap.builder() + .put(sampleField, ExprValueUtils.floatValue(sampleValue)) + .build(); - query.createQueryBuilder(List.of(DSL.namedArgument("fields", - new LiteralExpression(ExprTupleValue.fromExprValueMap(fieldSpec))), - DSL.namedArgument("query", - new LiteralExpression(ExprValueUtils.stringValue(sampleQuery))))); + query.createQueryBuilder( + List.of( + DSL.namedArgument( + "fields", new LiteralExpression(ExprTupleValue.fromExprValueMap(fieldSpec))), + DSL.namedArgument( + "query", new LiteralExpression(ExprValueUtils.stringValue(sampleQuery))))); - verify(query).createBuilder(argThat( - (ArgumentMatcher>) map -> map.size() == 1 - && map.containsKey(sampleField) && map.containsValue(sampleValue)), - eq(sampleQuery)); + verify(query) + .createBuilder( + argThat( + (ArgumentMatcher>) + map -> + map.size() == 1 + && map.containsKey(sampleField) + && map.containsValue(sampleValue)), + eq(sampleQuery)); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/NoFieldQueryTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/NoFieldQueryTest.java index c4e4f1242a..17b775fa0b 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/NoFieldQueryTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/NoFieldQueryTest.java @@ -23,14 +23,17 @@ class NoFieldQueryTest { NoFieldQuery query; private final String testQueryName = "test_query"; - private final Map actionMap - = ImmutableMap.of("paramA", (o, v) -> o); + private final Map actionMap = + ImmutableMap.of("paramA", (o, v) -> o); @BeforeEach void setUp() { - query = mock(NoFieldQuery.class, - Mockito.withSettings().useConstructor(actionMap) - .defaultAnswer(Mockito.CALLS_REAL_METHODS)); + query = + mock( + NoFieldQuery.class, + Mockito.withSettings() + .useConstructor(actionMap) + .defaultAnswer(Mockito.CALLS_REAL_METHODS)); when(query.getQueryName()).thenReturn(testQueryName); } @@ -38,9 +41,10 @@ void setUp() { void createQueryBuilderTest() { String sampleQuery = "field:query"; - query.createQueryBuilder(List.of( - DSL.namedArgument("query", - new LiteralExpression(ExprValueUtils.stringValue(sampleQuery))))); + query.createQueryBuilder( + List.of( + DSL.namedArgument( + "query", new LiteralExpression(ExprValueUtils.stringValue(sampleQuery))))); verify(query).createBuilder(eq(sampleQuery)); } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/RelevanceQueryBuildTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/RelevanceQueryBuildTest.java index 5406f4cb58..a93a1e5fa4 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/RelevanceQueryBuildTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/RelevanceQueryBuildTest.java @@ -48,12 +48,17 @@ class RelevanceQueryBuildTest { private QueryBuilder queryBuilder; private final Map> queryBuildActions = ImmutableMap.>builder() - .put("boost", (k, v) -> k.boost(Float.parseFloat(v.stringValue()))).build(); + .put("boost", (k, v) -> k.boost(Float.parseFloat(v.stringValue()))) + .build(); @BeforeEach public void setUp() { - query = mock(RelevanceQuery.class, withSettings().useConstructor(queryBuildActions) - .defaultAnswer(Mockito.CALLS_REAL_METHODS)); + query = + mock( + RelevanceQuery.class, + withSettings() + .useConstructor(queryBuildActions) + .defaultAnswer(Mockito.CALLS_REAL_METHODS)); queryBuilder = mock(QueryBuilder.class); when(query.createQueryBuilder(any())).thenReturn(queryBuilder); String queryName = "mock_query"; @@ -64,9 +69,13 @@ public void setUp() { @Test void throws_SemanticCheckException_when_same_argument_twice() { - FunctionExpression expr = createCall(List.of(FIELD_ARG, QUERY_ARG, - namedArgument("boost", "2.3"), - namedArgument("boost", "2.4"))); + FunctionExpression expr = + createCall( + List.of( + FIELD_ARG, + QUERY_ARG, + namedArgument("boost", "2.3"), + namedArgument("boost", "2.4"))); SemanticCheckException exception = assertThrows(SemanticCheckException.class, () -> query.build(expr)); assertEquals("Parameter 'boost' can only be specified once.", exception.getMessage()); @@ -79,8 +88,7 @@ void throws_SemanticCheckException_when_wrong_argument_name() { SemanticCheckException exception = assertThrows(SemanticCheckException.class, () -> query.build(expr)); - assertEquals("Parameter wrongarg is invalid for mock_query function.", - exception.getMessage()); + assertEquals("Parameter wrongarg is invalid for mock_query function.", exception.getMessage()); } @Test @@ -95,14 +103,13 @@ void calls_action_when_correct_argument_name() { @ParameterizedTest @MethodSource("insufficientArguments") public void throws_SyntaxCheckException_when_no_required_arguments(List arguments) { - SyntaxCheckException exception = assertThrows(SyntaxCheckException.class, - () -> query.build(createCall(arguments))); + SyntaxCheckException exception = + assertThrows(SyntaxCheckException.class, () -> query.build(createCall(arguments))); assertEquals("mock_query requires at least two parameters", exception.getMessage()); } public static Stream> insufficientArguments() { - return Stream.of(List.of(), - List.of(namedArgument("field", "field_A"))); + return Stream.of(List.of(), List.of(namedArgument("field", "field_A"))); } private static NamedArgumentExpression namedArgument(String field, String fieldValue) { diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SingleFieldQueryTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SingleFieldQueryTest.java index 3628dc8abc..7234ee9275 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SingleFieldQueryTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/lucene/relevance/SingleFieldQueryTest.java @@ -26,14 +26,17 @@ class SingleFieldQueryTest { SingleFieldQuery query; private final String testQueryName = "test_query"; - private final Map actionMap - = ImmutableMap.of("paramA", (o, v) -> o); + private final Map actionMap = + ImmutableMap.of("paramA", (o, v) -> o); @BeforeEach void setUp() { - query = mock(SingleFieldQuery.class, - Mockito.withSettings().useConstructor(actionMap) - .defaultAnswer(Mockito.CALLS_REAL_METHODS)); + query = + mock( + SingleFieldQuery.class, + Mockito.withSettings() + .useConstructor(actionMap) + .defaultAnswer(Mockito.CALLS_REAL_METHODS)); when(query.getQueryName()).thenReturn(testQueryName); } @@ -42,15 +45,20 @@ void createQueryBuilderTestTypeTextKeyword() { String sampleQuery = "sample query"; String sampleField = "fieldA"; - query.createQueryBuilder(List.of(DSL.namedArgument("field", - new ReferenceExpression(sampleField, - OpenSearchTextType.of(Map.of("words", - OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword))))), - DSL.namedArgument("query", - new LiteralExpression(ExprValueUtils.stringValue(sampleQuery))))); + query.createQueryBuilder( + List.of( + DSL.namedArgument( + "field", + new ReferenceExpression( + sampleField, + OpenSearchTextType.of( + Map.of( + "words", + OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword))))), + DSL.namedArgument( + "query", new LiteralExpression(ExprValueUtils.stringValue(sampleQuery))))); - verify(query).createBuilder(eq(sampleField), - eq(sampleQuery)); + verify(query).createBuilder(eq(sampleField), eq(sampleQuery)); } @Test @@ -58,12 +66,13 @@ void createQueryBuilderTestTypeText() { String sampleQuery = "sample query"; String sampleField = "fieldA"; - query.createQueryBuilder(List.of(DSL.namedArgument("field", - new ReferenceExpression(sampleField, OpenSearchTextType.of())), - DSL.namedArgument("query", - new LiteralExpression(ExprValueUtils.stringValue(sampleQuery))))); + query.createQueryBuilder( + List.of( + DSL.namedArgument( + "field", new ReferenceExpression(sampleField, OpenSearchTextType.of())), + DSL.namedArgument( + "query", new LiteralExpression(ExprValueUtils.stringValue(sampleQuery))))); - verify(query).createBuilder(eq(sampleField), - eq(sampleQuery)); + verify(query).createBuilder(eq(sampleField), eq(sampleQuery)); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/sort/SortQueryBuilderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/sort/SortQueryBuilderTest.java index e84ed14e43..89a10ad563 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/sort/SortQueryBuilderTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/sort/SortQueryBuilderTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.script.sort; import static org.hamcrest.MatcherAssert.assertThat; @@ -32,10 +31,7 @@ void build_sortbuilder_from_reference() { void build_sortbuilder_from_nested_function() { assertNotNull( sortQueryBuilder.build( - DSL.nested(DSL.ref("message.info", STRING)), - Sort.SortOption.DEFAULT_ASC - ) - ); + DSL.nested(DSL.ref("message.info", STRING)), Sort.SortOption.DEFAULT_ASC)); } @Test @@ -43,63 +39,56 @@ void build_sortbuilder_from_nested_function_with_path_param() { assertNotNull( sortQueryBuilder.build( DSL.nested(DSL.ref("message.info", STRING), DSL.ref("message", STRING)), - Sort.SortOption.DEFAULT_ASC - ) - ); + Sort.SortOption.DEFAULT_ASC)); } @Test void nested_with_too_many_args_throws_exception() { assertThrows( IllegalArgumentException.class, - () -> sortQueryBuilder.build( - DSL.nested( - DSL.ref("message.info", STRING), - DSL.ref("message", STRING), - DSL.ref("message", STRING) - ), - Sort.SortOption.DEFAULT_ASC - ) - ); + () -> + sortQueryBuilder.build( + DSL.nested( + DSL.ref("message.info", STRING), + DSL.ref("message", STRING), + DSL.ref("message", STRING)), + Sort.SortOption.DEFAULT_ASC)); } @Test void nested_with_too_few_args_throws_exception() { assertThrows( IllegalArgumentException.class, - () -> sortQueryBuilder.build( - DSL.nested(), - Sort.SortOption.DEFAULT_ASC - ) - ); + () -> sortQueryBuilder.build(DSL.nested(), Sort.SortOption.DEFAULT_ASC)); } @Test void nested_with_invalid_arg_type_throws_exception() { assertThrows( IllegalArgumentException.class, - () -> sortQueryBuilder.build( - DSL.nested( - DSL.literal(1) - ), - Sort.SortOption.DEFAULT_ASC - ) - ); + () -> sortQueryBuilder.build(DSL.nested(DSL.literal(1)), Sort.SortOption.DEFAULT_ASC)); } @Test void build_sortbuilder_from_expression_should_throw_exception() { final IllegalStateException exception = - assertThrows(IllegalStateException.class, () -> sortQueryBuilder.build( - new LiteralExpression(new ExprShortValue(1)), Sort.SortOption.DEFAULT_ASC)); + assertThrows( + IllegalStateException.class, + () -> + sortQueryBuilder.build( + new LiteralExpression(new ExprShortValue(1)), Sort.SortOption.DEFAULT_ASC)); assertThat(exception.getMessage(), Matchers.containsString("unsupported expression")); } @Test void build_sortbuilder_from_function_should_throw_exception() { final IllegalStateException exception = - assertThrows(IllegalStateException.class, () -> sortQueryBuilder.build(DSL.equal(DSL.ref( - "intV", INTEGER), DSL.literal(1)), Sort.SortOption.DEFAULT_ASC)); + assertThrows( + IllegalStateException.class, + () -> + sortQueryBuilder.build( + DSL.equal(DSL.ref("intV", INTEGER), DSL.literal(1)), + Sort.SortOption.DEFAULT_ASC)); assertThat(exception.getMessage(), Matchers.containsString("unsupported expression")); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/serialization/DefaultExpressionSerializerTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/serialization/DefaultExpressionSerializerTest.java index 72a319dbfe..b70595c74b 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/serialization/DefaultExpressionSerializerTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/serialization/DefaultExpressionSerializerTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.serialization; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -57,23 +56,25 @@ public void can_serialize_and_deserialize_functions() { @Test public void cannot_serialize_illegal_expression() { - Expression illegalExpr = new Expression() { - private final Object object = new Object(); // non-serializable - @Override - public ExprValue valueOf(Environment valueEnv) { - return null; - } + Expression illegalExpr = + new Expression() { + private final Object object = new Object(); // non-serializable + + @Override + public ExprValue valueOf(Environment valueEnv) { + return null; + } - @Override - public ExprType type() { - return null; - } + @Override + public ExprType type() { + return null; + } - @Override - public T accept(ExpressionNodeVisitor visitor, C context) { - return null; - } - }; + @Override + public T accept(ExpressionNodeVisitor visitor, C context) { + return null; + } + }; assertThrows(IllegalStateException.class, () -> serializer.serialize(illegalExpr)); } @@ -81,5 +82,4 @@ public T accept(ExpressionNodeVisitor visitor, C context) { public void cannot_deserialize_illegal_expression_code() { assertThrows(IllegalStateException.class, () -> serializer.deserialize("hello world")); } - } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexScanTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexScanTest.java index 494f3ff2d0..00d1c9ecd1 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexScanTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexScanTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.system; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -21,8 +20,7 @@ @ExtendWith(MockitoExtension.class) class OpenSearchSystemIndexScanTest { - @Mock - private OpenSearchSystemRequest request; + @Mock private OpenSearchSystemRequest request; @Test public void queryData() { diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexTest.java index a483f2dad8..1afcfcdc86 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.storage.system; import static org.hamcrest.MatcherAssert.assertThat; @@ -35,29 +34,23 @@ @ExtendWith(MockitoExtension.class) class OpenSearchSystemIndexTest { - @Mock - private OpenSearchClient client; + @Mock private OpenSearchClient client; - @Mock - private Table table; + @Mock private Table table; @Test void testGetFieldTypesOfMetaTable() { OpenSearchSystemIndex systemIndex = new OpenSearchSystemIndex(client, TABLE_INFO); final Map fieldTypes = systemIndex.getFieldTypes(); - assertThat(fieldTypes, anyOf( - hasEntry("TABLE_CAT", STRING) - )); + assertThat(fieldTypes, anyOf(hasEntry("TABLE_CAT", STRING))); } @Test void testGetFieldTypesOfMappingTable() { - OpenSearchSystemIndex systemIndex = new OpenSearchSystemIndex(client, mappingTable( - "test_index")); + OpenSearchSystemIndex systemIndex = + new OpenSearchSystemIndex(client, mappingTable("test_index")); final Map fieldTypes = systemIndex.getFieldTypes(); - assertThat(fieldTypes, anyOf( - hasEntry("COLUMN_NAME", STRING) - )); + assertThat(fieldTypes, anyOf(hasEntry("COLUMN_NAME", STRING))); } @Test @@ -69,8 +62,7 @@ void testIsExist() { @Test void testCreateTable() { Table systemIndex = new OpenSearchSystemIndex(client, TABLE_INFO); - assertThrows(UnsupportedOperationException.class, - () -> systemIndex.create(ImmutableMap.of())); + assertThrows(UnsupportedOperationException.class, () -> systemIndex.create(ImmutableMap.of())); } @Test @@ -78,11 +70,8 @@ void implement() { OpenSearchSystemIndex systemIndex = new OpenSearchSystemIndex(client, TABLE_INFO); NamedExpression projectExpr = named("TABLE_NAME", ref("TABLE_NAME", STRING)); - final PhysicalPlan plan = systemIndex.implement( - project( - relation(TABLE_INFO, table), - projectExpr - )); + final PhysicalPlan plan = + systemIndex.implement(project(relation(TABLE_INFO, table), projectExpr)); assertTrue(plan instanceof ProjectOperator); assertTrue(plan.getChild().get(0) instanceof OpenSearchSystemIndexScan); } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/utils/Utils.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/utils/Utils.java index 85b8889de3..0db87f89d4 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/utils/Utils.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/utils/Utils.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.opensearch.utils; import com.google.common.collect.ImmutableSet; @@ -36,15 +35,13 @@ public static List group(NamedExpression... exprs) { return Arrays.asList(exprs); } - public static List> sort(Expression expr1, - Sort.SortOption option1) { + public static List> sort( + Expression expr1, Sort.SortOption option1) { return Collections.singletonList(Pair.of(option1, expr1)); } - public static List> sort(Expression expr1, - Sort.SortOption option1, - Expression expr2, - Sort.SortOption option2) { + public static List> sort( + Expression expr1, Sort.SortOption option1, Expression expr2, Sort.SortOption option2) { return Arrays.asList(Pair.of(option1, expr1), Pair.of(option2, expr2)); } diff --git a/plugin/src/main/java/org/opensearch/sql/plugin/SQLPlugin.java b/plugin/src/main/java/org/opensearch/sql/plugin/SQLPlugin.java index 5e156c2f5d..f20de87d61 100644 --- a/plugin/src/main/java/org/opensearch/sql/plugin/SQLPlugin.java +++ b/plugin/src/main/java/org/opensearch/sql/plugin/SQLPlugin.java @@ -94,10 +94,10 @@ public class SQLPlugin extends Plugin implements ActionPlugin, ScriptPlugin { private static final Logger LOGGER = LogManager.getLogger(SQLPlugin.class); private ClusterService clusterService; - /** - * Settings should be inited when bootstrap the plugin. - */ + + /** Settings should be inited when bootstrap the plugin. */ private org.opensearch.sql.common.setting.Settings pluginSettings; + private NodeClient client; private DataSourceServiceImpl dataSourceService; private Injector injector; @@ -134,23 +134,28 @@ public List getRestHandlers( new RestDataSourceQueryAction()); } - /** - * Register action and handler so that transportClient can find proxy for action. - */ + /** Register action and handler so that transportClient can find proxy for action. */ @Override public List> getActions() { return Arrays.asList( new ActionHandler<>( new ActionType<>(PPLQueryAction.NAME, TransportPPLQueryResponse::new), TransportPPLQueryAction.class), - new ActionHandler<>(new ActionType<>(TransportCreateDataSourceAction.NAME, - CreateDataSourceActionResponse::new), TransportCreateDataSourceAction.class), - new ActionHandler<>(new ActionType<>(TransportGetDataSourceAction.NAME, - GetDataSourceActionResponse::new), TransportGetDataSourceAction.class), - new ActionHandler<>(new ActionType<>(TransportUpdateDataSourceAction.NAME, - UpdateDataSourceActionResponse::new), TransportUpdateDataSourceAction.class), - new ActionHandler<>(new ActionType<>(TransportDeleteDataSourceAction.NAME, - DeleteDataSourceActionResponse::new), TransportDeleteDataSourceAction.class)); + new ActionHandler<>( + new ActionType<>( + TransportCreateDataSourceAction.NAME, CreateDataSourceActionResponse::new), + TransportCreateDataSourceAction.class), + new ActionHandler<>( + new ActionType<>(TransportGetDataSourceAction.NAME, GetDataSourceActionResponse::new), + TransportGetDataSourceAction.class), + new ActionHandler<>( + new ActionType<>( + TransportUpdateDataSourceAction.NAME, UpdateDataSourceActionResponse::new), + TransportUpdateDataSourceAction.class), + new ActionHandler<>( + new ActionType<>( + TransportDeleteDataSourceAction.NAME, DeleteDataSourceActionResponse::new), + TransportDeleteDataSourceAction.class)); } @Override @@ -176,11 +181,12 @@ public Collection createComponents( ModulesBuilder modules = new ModulesBuilder(); modules.add(new OpenSearchPluginModule()); - modules.add(b -> { - b.bind(NodeClient.class).toInstance((NodeClient) client); - b.bind(org.opensearch.sql.common.setting.Settings.class).toInstance(pluginSettings); - b.bind(DataSourceService.class).toInstance(dataSourceService); - }); + modules.add( + b -> { + b.bind(NodeClient.class).toInstance((NodeClient) client); + b.bind(org.opensearch.sql.common.setting.Settings.class).toInstance(pluginSettings); + b.bind(DataSourceService.class).toInstance(dataSourceService); + }); injector = modules.createInjector(); return ImmutableList.of(dataSourceService); @@ -212,30 +218,31 @@ public ScriptEngine getScriptEngine(Settings settings, Collection() - .add(new OpenSearchDataSourceFactory( - new OpenSearchNodeClient(this.client), pluginSettings)) + .add( + new OpenSearchDataSourceFactory( + new OpenSearchNodeClient(this.client), pluginSettings)) .add(new PrometheusStorageFactory(pluginSettings)) .add(new SparkStorageFactory(this.client, pluginSettings)) .build(), dataSourceMetadataStorage, dataSourceUserAuthorizationHelper); } - } diff --git a/plugin/src/main/java/org/opensearch/sql/plugin/config/OpenSearchPluginModule.java b/plugin/src/main/java/org/opensearch/sql/plugin/config/OpenSearchPluginModule.java index f301a242fb..33a785c498 100644 --- a/plugin/src/main/java/org/opensearch/sql/plugin/config/OpenSearchPluginModule.java +++ b/plugin/src/main/java/org/opensearch/sql/plugin/config/OpenSearchPluginModule.java @@ -45,8 +45,7 @@ public class OpenSearchPluginModule extends AbstractModule { BuiltinFunctionRepository.getInstance(); @Override - protected void configure() { - } + protected void configure() {} @Provides public OpenSearchClient openSearchClient(NodeClient nodeClient) { @@ -59,8 +58,8 @@ public StorageEngine storageEngine(OpenSearchClient client, Settings settings) { } @Provides - public ExecutionEngine executionEngine(OpenSearchClient client, ExecutionProtector protector, - PlanSerializer planSerializer) { + public ExecutionEngine executionEngine( + OpenSearchClient client, ExecutionProtector protector, PlanSerializer planSerializer) { return new OpenSearchExecutionEngine(client, protector, planSerializer); } @@ -95,18 +94,15 @@ public SQLService sqlService(QueryManager queryManager, QueryPlanFactory queryPl return new SQLService(new SQLSyntaxParser(), queryManager, queryPlanFactory); } - /** - * {@link QueryPlanFactory}. - */ + /** {@link QueryPlanFactory}. */ @Provides - public QueryPlanFactory queryPlanFactory(DataSourceService dataSourceService, - ExecutionEngine executionEngine) { + public QueryPlanFactory queryPlanFactory( + DataSourceService dataSourceService, ExecutionEngine executionEngine) { Analyzer analyzer = new Analyzer( new ExpressionAnalyzer(functionRepository), dataSourceService, functionRepository); Planner planner = new Planner(LogicalPlanOptimizer.create()); - QueryService queryService = new QueryService( - analyzer, executionEngine, planner); + QueryService queryService = new QueryService(analyzer, executionEngine, planner); return new QueryPlanFactory(queryService); } } diff --git a/plugin/src/main/java/org/opensearch/sql/plugin/request/PPLQueryRequestFactory.java b/plugin/src/main/java/org/opensearch/sql/plugin/request/PPLQueryRequestFactory.java index 730da0e923..ad734bf150 100644 --- a/plugin/src/main/java/org/opensearch/sql/plugin/request/PPLQueryRequestFactory.java +++ b/plugin/src/main/java/org/opensearch/sql/plugin/request/PPLQueryRequestFactory.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.plugin.request; import java.util.Map; @@ -15,9 +14,7 @@ import org.opensearch.sql.protocol.response.format.Format; import org.opensearch.sql.protocol.response.format.JsonResponseFormatter; -/** - * Factory of {@link PPLQueryRequest}. - */ +/** Factory of {@link PPLQueryRequest}. */ public class PPLQueryRequestFactory { private static final String PPL_URL_PARAM_KEY = "ppl"; private static final String PPL_FIELD_NAME = "query"; @@ -28,6 +25,7 @@ public class PPLQueryRequestFactory { /** * Build {@link PPLQueryRequest} from {@link RestRequest}. + * * @param request {@link PPLQueryRequest} * @return {@link RestRequest} */ @@ -63,8 +61,12 @@ private static PPLQueryRequest parsePPLRequestFromPayload(RestRequest restReques } catch (JSONException e) { throw new IllegalArgumentException("Failed to parse request payload", e); } - PPLQueryRequest pplRequest = new PPLQueryRequest(jsonContent.getString(PPL_FIELD_NAME), - jsonContent, restRequest.path(), format.getFormatName()); + PPLQueryRequest pplRequest = + new PPLQueryRequest( + jsonContent.getString(PPL_FIELD_NAME), + jsonContent, + restRequest.path(), + format.getFormatName()); // set sanitize option if csv format if (format.equals(Format.CSV)) { pplRequest.sanitize(getSanitizeOption(restRequest.params())); diff --git a/plugin/src/main/java/org/opensearch/sql/plugin/rest/RestPPLQueryAction.java b/plugin/src/main/java/org/opensearch/sql/plugin/rest/RestPPLQueryAction.java index 55f8dfdfef..996ae8c700 100644 --- a/plugin/src/main/java/org/opensearch/sql/plugin/rest/RestPPLQueryAction.java +++ b/plugin/src/main/java/org/opensearch/sql/plugin/rest/RestPPLQueryAction.java @@ -102,14 +102,17 @@ protected Set responseParams() { protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient nodeClient) { // TODO: need move to transport Action if (!pplEnabled.get()) { - return channel -> reportError(channel, new IllegalAccessException( - "Either plugins.ppl.enabled or rest.action.multi.allow_explicit_index setting is false"), - BAD_REQUEST); + return channel -> + reportError( + channel, + new IllegalAccessException( + "Either plugins.ppl.enabled or rest.action.multi.allow_explicit_index setting is" + + " false"), + BAD_REQUEST); } - TransportPPLQueryRequest transportPPLQueryRequest = new TransportPPLQueryRequest( - PPLQueryRequestFactory.getPPLRequest(request) - ); + TransportPPLQueryRequest transportPPLQueryRequest = + new TransportPPLQueryRequest(PPLQueryRequestFactory.getPPLRequest(request)); return channel -> nodeClient.execute( diff --git a/plugin/src/main/java/org/opensearch/sql/plugin/rest/RestPPLStatsAction.java b/plugin/src/main/java/org/opensearch/sql/plugin/rest/RestPPLStatsAction.java index ef9f68a2a7..7a51fc282b 100644 --- a/plugin/src/main/java/org/opensearch/sql/plugin/rest/RestPPLStatsAction.java +++ b/plugin/src/main/java/org/opensearch/sql/plugin/rest/RestPPLStatsAction.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.plugin.rest; import static org.opensearch.core.rest.RestStatus.SERVICE_UNAVAILABLE; @@ -26,17 +25,14 @@ import org.opensearch.sql.legacy.executor.format.ErrorMessageFactory; import org.opensearch.sql.legacy.metrics.Metrics; -/** - * PPL Node level status. - */ +/** PPL Node level status. */ public class RestPPLStatsAction extends BaseRestHandler { private static final Logger LOG = LogManager.getLogger(RestPPLStatsAction.class); - /** - * API endpoint path. - */ + /** API endpoint path. */ public static final String PPL_STATS_API_ENDPOINT = "/_plugins/_ppl/stats"; + public static final String PPL_LEGACY_STATS_API_ENDPOINT = "/_opendistro/_ppl/stats"; public RestPPLStatsAction(Settings settings, RestController restController) { @@ -70,13 +66,18 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli QueryContext.addRequestId(); try { - return channel -> channel.sendResponse(new BytesRestResponse(RestStatus.OK, - Metrics.getInstance().collectToJSON())); + return channel -> + channel.sendResponse( + new BytesRestResponse(RestStatus.OK, Metrics.getInstance().collectToJSON())); } catch (Exception e) { LOG.error("Failed during Query PPL STATS Action.", e); - return channel -> channel.sendResponse(new BytesRestResponse(SERVICE_UNAVAILABLE, - ErrorMessageFactory.createErrorMessage(e, SERVICE_UNAVAILABLE.getStatus()).toString())); + return channel -> + channel.sendResponse( + new BytesRestResponse( + SERVICE_UNAVAILABLE, + ErrorMessageFactory.createErrorMessage(e, SERVICE_UNAVAILABLE.getStatus()) + .toString())); } } diff --git a/plugin/src/main/java/org/opensearch/sql/plugin/rest/RestQuerySettingsAction.java b/plugin/src/main/java/org/opensearch/sql/plugin/rest/RestQuerySettingsAction.java index b15b4dddd6..885c953c17 100644 --- a/plugin/src/main/java/org/opensearch/sql/plugin/rest/RestQuerySettingsAction.java +++ b/plugin/src/main/java/org/opensearch/sql/plugin/rest/RestQuerySettingsAction.java @@ -39,9 +39,14 @@ public class RestQuerySettingsAction extends BaseRestHandler { private static final String LEGACY_SQL_SETTINGS_PREFIX = "opendistro.sql."; private static final String LEGACY_PPL_SETTINGS_PREFIX = "opendistro.ppl."; private static final String LEGACY_COMMON_SETTINGS_PREFIX = "opendistro.query."; - private static final List SETTINGS_PREFIX = ImmutableList.of( - SQL_SETTINGS_PREFIX, PPL_SETTINGS_PREFIX, COMMON_SETTINGS_PREFIX, - LEGACY_SQL_SETTINGS_PREFIX, LEGACY_PPL_SETTINGS_PREFIX, LEGACY_COMMON_SETTINGS_PREFIX); + private static final List SETTINGS_PREFIX = + ImmutableList.of( + SQL_SETTINGS_PREFIX, + PPL_SETTINGS_PREFIX, + COMMON_SETTINGS_PREFIX, + LEGACY_SQL_SETTINGS_PREFIX, + LEGACY_PPL_SETTINGS_PREFIX, + LEGACY_COMMON_SETTINGS_PREFIX); public static final String SETTINGS_API_ENDPOINT = "/_plugins/_query/settings"; public static final String LEGACY_SQL_SETTINGS_API_ENDPOINT = "/_opendistro/_sql/settings"; @@ -75,10 +80,11 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli QueryContext.addRequestId(); final ClusterUpdateSettingsRequest clusterUpdateSettingsRequest = Requests.clusterUpdateSettingsRequest(); - clusterUpdateSettingsRequest.timeout(request.paramAsTime( - "timeout", clusterUpdateSettingsRequest.timeout())); - clusterUpdateSettingsRequest.clusterManagerNodeTimeout(request.paramAsTime( - "cluster_manager_timeout", clusterUpdateSettingsRequest.clusterManagerNodeTimeout())); + clusterUpdateSettingsRequest.timeout( + request.paramAsTime("timeout", clusterUpdateSettingsRequest.timeout())); + clusterUpdateSettingsRequest.clusterManagerNodeTimeout( + request.paramAsTime( + "cluster_manager_timeout", clusterUpdateSettingsRequest.clusterManagerNodeTimeout())); Map source; try (XContentParser parser = request.contentParser()) { source = parser.map(); @@ -86,20 +92,27 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli try { if (source.containsKey(TRANSIENT)) { - clusterUpdateSettingsRequest.transientSettings(getAndFilterSettings( - (Map) source.get(TRANSIENT))); + clusterUpdateSettingsRequest.transientSettings( + getAndFilterSettings((Map) source.get(TRANSIENT))); } if (source.containsKey(PERSISTENT)) { - clusterUpdateSettingsRequest.persistentSettings(getAndFilterSettings( - (Map) source.get(PERSISTENT))); + clusterUpdateSettingsRequest.persistentSettings( + getAndFilterSettings((Map) source.get(PERSISTENT))); } - return channel -> client.admin().cluster().updateSettings( - clusterUpdateSettingsRequest, new RestToXContentListener<>(channel)); + return channel -> + client + .admin() + .cluster() + .updateSettings(clusterUpdateSettingsRequest, new RestToXContentListener<>(channel)); } catch (Exception e) { LOG.error("Error changing OpenSearch SQL plugin cluster settings", e); - return channel -> channel.sendResponse(new BytesRestResponse(INTERNAL_SERVER_ERROR, - ErrorMessageFactory.createErrorMessage(e, INTERNAL_SERVER_ERROR.getStatus()).toString())); + return channel -> + channel.sendResponse( + new BytesRestResponse( + INTERNAL_SERVER_ERROR, + ErrorMessageFactory.createErrorMessage(e, INTERNAL_SERVER_ERROR.getStatus()) + .toString())); } } @@ -107,16 +120,19 @@ private Settings getAndFilterSettings(Map source) { try { XContentBuilder builder = XContentFactory.jsonBuilder(); builder.map(source); - Settings.Builder settingsBuilder = Settings.builder() - .loadFromSource(builder.toString(), builder.contentType()); - settingsBuilder.keys().removeIf(key -> { - for (String prefix : SETTINGS_PREFIX) { - if (key.startsWith(prefix)) { - return false; - } - } - return true; - }); + Settings.Builder settingsBuilder = + Settings.builder().loadFromSource(builder.toString(), builder.contentType()); + settingsBuilder + .keys() + .removeIf( + key -> { + for (String prefix : SETTINGS_PREFIX) { + if (key.startsWith(prefix)) { + return false; + } + } + return true; + }); return settingsBuilder.build(); } catch (IOException e) { throw new OpenSearchGenerationException("Failed to generate [" + source + "]", e); diff --git a/plugin/src/main/java/org/opensearch/sql/plugin/transport/TransportPPLQueryAction.java b/plugin/src/main/java/org/opensearch/sql/plugin/transport/TransportPPLQueryAction.java index 8a9d276673..fde9e24f75 100644 --- a/plugin/src/main/java/org/opensearch/sql/plugin/transport/TransportPPLQueryAction.java +++ b/plugin/src/main/java/org/opensearch/sql/plugin/transport/TransportPPLQueryAction.java @@ -139,8 +139,8 @@ private ResponseListener createListener( @Override public void onResponse(ExecutionEngine.QueryResponse response) { String responseContent = - formatter.format(new QueryResult(response.getSchema(), response.getResults(), - response.getCursor())); + formatter.format( + new QueryResult(response.getSchema(), response.getResults(), response.getCursor())); listener.onResponse(new TransportPPLQueryResponse(responseContent)); } diff --git a/plugin/src/test/java/org/opensearch/sql/plugin/transport/TransportPPLQueryRequestTest.java b/plugin/src/test/java/org/opensearch/sql/plugin/transport/TransportPPLQueryRequestTest.java index 0e5d99ae35..286ac20fed 100644 --- a/plugin/src/test/java/org/opensearch/sql/plugin/transport/TransportPPLQueryRequestTest.java +++ b/plugin/src/test/java/org/opensearch/sql/plugin/transport/TransportPPLQueryRequestTest.java @@ -59,9 +59,7 @@ public void writeTo(StreamOutput out) throws IOException { @Test public void testCustomizedNullJSONContentActionRequestFromActionRequest() { - TransportPPLQueryRequest request = new TransportPPLQueryRequest( - "source=t a=1", null, null - ); + TransportPPLQueryRequest request = new TransportPPLQueryRequest("source=t a=1", null, null); ActionRequest actionRequest = new ActionRequest() { @Override diff --git a/ppl/src/main/java/org/opensearch/sql/ppl/PPLService.java b/ppl/src/main/java/org/opensearch/sql/ppl/PPLService.java index 40a7a85f78..7769f5dfae 100644 --- a/ppl/src/main/java/org/opensearch/sql/ppl/PPLService.java +++ b/ppl/src/main/java/org/opensearch/sql/ppl/PPLService.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.opensearch.sql.executor.ExecutionEngine.QueryResponse; @@ -27,9 +26,7 @@ import org.opensearch.sql.ppl.parser.AstStatementBuilder; import org.opensearch.sql.ppl.utils.PPLQueryDataAnonymizer; -/** - * PPLService. - */ +/** PPLService. */ @RequiredArgsConstructor public class PPLService { private final PPLSyntaxParser parser; @@ -45,7 +42,7 @@ public class PPLService { /** * Execute the {@link PPLQueryRequest}, using {@link ResponseListener} to get response. * - * @param request {@link PPLQueryRequest} + * @param request {@link PPLQueryRequest} * @param listener {@link ResponseListener} */ public void execute(PPLQueryRequest request, ResponseListener listener) { @@ -57,10 +54,10 @@ public void execute(PPLQueryRequest request, ResponseListener lis } /** - * Explain the query in {@link PPLQueryRequest} using {@link ResponseListener} to - * get and format explain response. + * Explain the query in {@link PPLQueryRequest} using {@link ResponseListener} to get and format + * explain response. * - * @param request {@link PPLQueryRequest} + * @param request {@link PPLQueryRequest} * @param listener {@link ResponseListener} for explain response */ public void explain(PPLQueryRequest request, ResponseListener listener) { @@ -90,7 +87,6 @@ private AbstractPlan plan( QueryContext.getRequestId(), anonymizer.anonymizeStatement(statement)); - return queryExecutionFactory.create( - statement, queryListener, explainListener); + return queryExecutionFactory.create(statement, queryListener, explainListener); } } diff --git a/ppl/src/main/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParser.java b/ppl/src/main/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParser.java index 168ba33a8a..1d4485e749 100644 --- a/ppl/src/main/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParser.java +++ b/ppl/src/main/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParser.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.antlr; import org.antlr.v4.runtime.CommonTokenStream; @@ -15,13 +14,9 @@ import org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLLexer; import org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLParser; -/** - * PPL Syntax Parser. - */ +/** PPL Syntax Parser. */ public class PPLSyntaxParser implements Parser { - /** - * Analyze the query syntax. - */ + /** Analyze the query syntax. */ @Override public ParseTree parse(String query) { OpenSearchPPLParser parser = createParser(createLexer(query)); @@ -30,12 +25,10 @@ public ParseTree parse(String query) { } private OpenSearchPPLParser createParser(Lexer lexer) { - return new OpenSearchPPLParser( - new CommonTokenStream(lexer)); + return new OpenSearchPPLParser(new CommonTokenStream(lexer)); } private OpenSearchPPLLexer createLexer(String query) { - return new OpenSearchPPLLexer( - new CaseInsensitiveCharStream(query)); + return new OpenSearchPPLLexer(new CaseInsensitiveCharStream(query)); } } diff --git a/ppl/src/main/java/org/opensearch/sql/ppl/domain/PPLQueryRequest.java b/ppl/src/main/java/org/opensearch/sql/ppl/domain/PPLQueryRequest.java index 87532e01d0..ca351fcc0a 100644 --- a/ppl/src/main/java/org/opensearch/sql/ppl/domain/PPLQueryRequest.java +++ b/ppl/src/main/java/org/opensearch/sql/ppl/domain/PPLQueryRequest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.domain; import java.util.Locale; @@ -22,12 +21,9 @@ public class PPLQueryRequest { public static final PPLQueryRequest NULL = new PPLQueryRequest("", null, DEFAULT_PPL_PATH, ""); private final String pplQuery; - @Getter - private final JSONObject jsonContent; - @Getter - private final String path; - @Getter - private String format = ""; + @Getter private final JSONObject jsonContent; + @Getter private final String path; + @Getter private String format = ""; @Setter @Getter @@ -43,9 +39,7 @@ public PPLQueryRequest(String pplQuery, JSONObject jsonContent, String path) { this(pplQuery, jsonContent, path, ""); } - /** - * Constructor of PPLQueryRequest. - */ + /** Constructor of PPLQueryRequest. */ public PPLQueryRequest(String pplQuery, JSONObject jsonContent, String path, String format) { this.pplQuery = pplQuery; this.jsonContent = jsonContent; @@ -59,23 +53,21 @@ public String getRequest() { /** * Check if request is to explain rather than execute the query. - * @return true if it is a explain request + * + * @return true if it is a explain request */ public boolean isExplainRequest() { return path.endsWith("/_explain"); } - /** - * Decide on the formatter by the requested format. - */ + /** Decide on the formatter by the requested format. */ public Format format() { Optional optionalFormat = Format.of(format); if (optionalFormat.isPresent()) { return optionalFormat.get(); } else { throw new IllegalArgumentException( - String.format(Locale.ROOT,"response in %s format is not supported.", format)); + String.format(Locale.ROOT, "response in %s format is not supported.", format)); } } - } diff --git a/ppl/src/main/java/org/opensearch/sql/ppl/domain/PPLQueryResponse.java b/ppl/src/main/java/org/opensearch/sql/ppl/domain/PPLQueryResponse.java index 483726702a..5cae8e8f06 100644 --- a/ppl/src/main/java/org/opensearch/sql/ppl/domain/PPLQueryResponse.java +++ b/ppl/src/main/java/org/opensearch/sql/ppl/domain/PPLQueryResponse.java @@ -3,8 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.domain; -public class PPLQueryResponse { -} +public class PPLQueryResponse {} diff --git a/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstBuilder.java b/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstBuilder.java index 323f99a5af..9419d8110b 100644 --- a/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstBuilder.java +++ b/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.parser; import static org.opensearch.sql.ast.dsl.AstDSL.qualifiedName; @@ -74,33 +73,25 @@ import org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLParserBaseVisitor; import org.opensearch.sql.ppl.utils.ArgumentFactory; -/** - * Class of building the AST. - * Refines the visit path and build the AST nodes - */ +/** Class of building the AST. Refines the visit path and build the AST nodes */ @RequiredArgsConstructor public class AstBuilder extends OpenSearchPPLParserBaseVisitor { private final AstExpressionBuilder expressionBuilder; /** - * PPL query to get original token text. This is necessary because token.getText() returns - * text without whitespaces or other characters discarded by lexer. + * PPL query to get original token text. This is necessary because token.getText() returns text + * without whitespaces or other characters discarded by lexer. */ private final String query; @Override public UnresolvedPlan visitQueryStatement(OpenSearchPPLParser.QueryStatementContext ctx) { UnresolvedPlan pplCommand = visit(ctx.pplCommands()); - return ctx.commands() - .stream() - .map(this::visit) - .reduce(pplCommand, (r, e) -> e.attach(r)); + return ctx.commands().stream().map(this::visit).reduce(pplCommand, (r, e) -> e.attach(r)); } - /** - * Search command. - */ + /** Search command. */ @Override public UnresolvedPlan visitSearchFrom(SearchFromContext ctx) { return visitFromClause(ctx.fromClause()); @@ -108,23 +99,21 @@ public UnresolvedPlan visitSearchFrom(SearchFromContext ctx) { @Override public UnresolvedPlan visitSearchFromFilter(SearchFromFilterContext ctx) { - return new Filter(internalVisitExpression(ctx.logicalExpression())).attach( - visit(ctx.fromClause())); + return new Filter(internalVisitExpression(ctx.logicalExpression())) + .attach(visit(ctx.fromClause())); } @Override public UnresolvedPlan visitSearchFilterFrom(SearchFilterFromContext ctx) { - return new Filter(internalVisitExpression(ctx.logicalExpression())).attach( - visit(ctx.fromClause())); + return new Filter(internalVisitExpression(ctx.logicalExpression())) + .attach(visit(ctx.fromClause())); } /** - * Describe command. - * Current logic separates table and metadata info about table by adding - * MAPPING_ODFE_SYS_TABLE as suffix. - * Even with the introduction of datasource and schema name in fully qualified table name, - * we do the same thing by appending MAPPING_ODFE_SYS_TABLE as syffix to the last part - * of qualified name. + * Describe command. Current logic separates table and metadata info about table by adding + * MAPPING_ODFE_SYS_TABLE as suffix. Even with the introduction of datasource and schema name in + * fully qualified table name, we do the same thing by appending MAPPING_ODFE_SYS_TABLE as syffix + * to the last part of qualified name. */ @Override public UnresolvedPlan visitDescribeCommand(DescribeCommandContext ctx) { @@ -135,63 +124,52 @@ public UnresolvedPlan visitDescribeCommand(DescribeCommandContext ctx) { return new Relation(new QualifiedName(parts)); } - /** - * Show command. - */ + /** Show command. */ @Override public UnresolvedPlan visitShowDataSourcesCommand( OpenSearchPPLParser.ShowDataSourcesCommandContext ctx) { return new Relation(qualifiedName(DATASOURCES_TABLE_NAME)); } - - /** - * Where command. - */ + /** Where command. */ @Override public UnresolvedPlan visitWhereCommand(WhereCommandContext ctx) { return new Filter(internalVisitExpression(ctx.logicalExpression())); } - /** - * Fields command. - */ + /** Fields command. */ @Override public UnresolvedPlan visitFieldsCommand(FieldsCommandContext ctx) { return new Project( - ctx.fieldList() - .fieldExpression() - .stream() + ctx.fieldList().fieldExpression().stream() .map(this::internalVisitExpression) .collect(Collectors.toList()), - ArgumentFactory.getArgumentList(ctx) - ); + ArgumentFactory.getArgumentList(ctx)); } - /** - * Rename command. - */ + /** Rename command. */ @Override public UnresolvedPlan visitRenameCommand(RenameCommandContext ctx) { return new Rename( - ctx.renameClasue() - .stream() - .map(ct -> new Map(internalVisitExpression(ct.orignalField), - internalVisitExpression(ct.renamedField))) - .collect(Collectors.toList()) - ); + ctx.renameClasue().stream() + .map( + ct -> + new Map( + internalVisitExpression(ct.orignalField), + internalVisitExpression(ct.renamedField))) + .collect(Collectors.toList())); } - /** - * Stats command. - */ + /** Stats command. */ @Override public UnresolvedPlan visitStatsCommand(StatsCommandContext ctx) { ImmutableList.Builder aggListBuilder = new ImmutableList.Builder<>(); for (OpenSearchPPLParser.StatsAggTermContext aggCtx : ctx.statsAggTerm()) { UnresolvedExpression aggExpression = internalVisitExpression(aggCtx.statsFunction()); - String name = aggCtx.alias == null ? getTextInQuery(aggCtx) : StringUtils - .unquoteIdentifier(aggCtx.alias.getText()); + String name = + aggCtx.alias == null + ? getTextInQuery(aggCtx) + : StringUtils.unquoteIdentifier(aggCtx.alias.getText()); Alias alias = new Alias(name, aggExpression); aggListBuilder.add(alias); } @@ -199,12 +177,16 @@ public UnresolvedPlan visitStatsCommand(StatsCommandContext ctx) { List groupList = Optional.ofNullable(ctx.statsByClause()) .map(OpenSearchPPLParser.StatsByClauseContext::fieldList) - .map(expr -> expr.fieldExpression().stream() - .map(groupCtx -> - (UnresolvedExpression) new Alias( - StringUtils.unquoteIdentifier(getTextInQuery(groupCtx)), - internalVisitExpression(groupCtx))) - .collect(Collectors.toList())) + .map( + expr -> + expr.fieldExpression().stream() + .map( + groupCtx -> + (UnresolvedExpression) + new Alias( + StringUtils.unquoteIdentifier(getTextInQuery(groupCtx)), + internalVisitExpression(groupCtx))) + .collect(Collectors.toList())) .orElse(Collections.emptyList()); UnresolvedExpression span = @@ -213,30 +195,23 @@ public UnresolvedPlan visitStatsCommand(StatsCommandContext ctx) { .map(this::internalVisitExpression) .orElse(null); - Aggregation aggregation = new Aggregation( - aggListBuilder.build(), - Collections.emptyList(), - groupList, - span, - ArgumentFactory.getArgumentList(ctx) - ); + Aggregation aggregation = + new Aggregation( + aggListBuilder.build(), + Collections.emptyList(), + groupList, + span, + ArgumentFactory.getArgumentList(ctx)); return aggregation; } - /** - * Dedup command. - */ + /** Dedup command. */ @Override public UnresolvedPlan visitDedupCommand(DedupCommandContext ctx) { - return new Dedupe( - ArgumentFactory.getArgumentList(ctx), - getFieldList(ctx.fieldList()) - ); + return new Dedupe(ArgumentFactory.getArgumentList(ctx), getFieldList(ctx.fieldList())); } - /** - * Head command visitor. - */ + /** Head command visitor. */ @Override public UnresolvedPlan visitHeadCommand(HeadCommandContext ctx) { Integer size = ctx.number != null ? Integer.parseInt(ctx.number.getText()) : 10; @@ -244,58 +219,46 @@ public UnresolvedPlan visitHeadCommand(HeadCommandContext ctx) { return new Head(size, from); } - /** - * Sort command. - */ + /** Sort command. */ @Override public UnresolvedPlan visitSortCommand(SortCommandContext ctx) { return new Sort( - ctx.sortbyClause() - .sortField() - .stream() + ctx.sortbyClause().sortField().stream() .map(sort -> (Field) internalVisitExpression(sort)) - .collect(Collectors.toList()) - ); + .collect(Collectors.toList())); } - /** - * Eval command. - */ + /** Eval command. */ @Override public UnresolvedPlan visitEvalCommand(EvalCommandContext ctx) { return new Eval( - ctx.evalClause() - .stream() + ctx.evalClause().stream() .map(ct -> (Let) internalVisitExpression(ct)) - .collect(Collectors.toList()) - ); + .collect(Collectors.toList())); } private List getGroupByList(ByClauseContext ctx) { - return ctx.fieldList().fieldExpression().stream().map(this::internalVisitExpression) + return ctx.fieldList().fieldExpression().stream() + .map(this::internalVisitExpression) .collect(Collectors.toList()); } private List getFieldList(FieldListContext ctx) { - return ctx.fieldExpression() - .stream() + return ctx.fieldExpression().stream() .map(field -> (Field) internalVisitExpression(field)) .collect(Collectors.toList()); } - /** - * Rare command. - */ + /** Rare command. */ @Override public UnresolvedPlan visitRareCommand(RareCommandContext ctx) { - List groupList = ctx.byClause() == null ? Collections.emptyList() : - getGroupByList(ctx.byClause()); + List groupList = + ctx.byClause() == null ? Collections.emptyList() : getGroupByList(ctx.byClause()); return new RareTopN( CommandType.RARE, ArgumentFactory.getArgumentList(ctx), getFieldList(ctx.fieldList()), - groupList - ); + groupList); } @Override @@ -319,34 +282,31 @@ public UnresolvedPlan visitPatternsCommand(OpenSearchPPLParser.PatternsCommandCo UnresolvedExpression sourceField = internalVisitExpression(ctx.source_field); ImmutableMap.Builder builder = ImmutableMap.builder(); ctx.patternsParameter() - .forEach(x -> { - builder.put(x.children.get(0).toString(), - (Literal) internalVisitExpression(x.children.get(2))); - }); + .forEach( + x -> { + builder.put( + x.children.get(0).toString(), + (Literal) internalVisitExpression(x.children.get(2))); + }); java.util.Map arguments = builder.build(); Literal pattern = arguments.getOrDefault("pattern", AstDSL.stringLiteral("")); return new Parse(ParseMethod.PATTERNS, sourceField, pattern, arguments); } - /** - * Top command. - */ + /** Top command. */ @Override public UnresolvedPlan visitTopCommand(TopCommandContext ctx) { - List groupList = ctx.byClause() == null ? Collections.emptyList() : - getGroupByList(ctx.byClause()); + List groupList = + ctx.byClause() == null ? Collections.emptyList() : getGroupByList(ctx.byClause()); return new RareTopN( CommandType.TOP, ArgumentFactory.getArgumentList(ctx), getFieldList(ctx.fieldList()), - groupList - ); + groupList); } - /** - * From clause. - */ + /** From clause. */ @Override public UnresolvedPlan visitFromClause(FromClauseContext ctx) { if (ctx.tableFunction() != null) { @@ -358,34 +318,31 @@ public UnresolvedPlan visitFromClause(FromClauseContext ctx) { @Override public UnresolvedPlan visitTableSourceClause(TableSourceClauseContext ctx) { - return new Relation(ctx.tableSource() - .stream().map(this::internalVisitExpression) - .collect(Collectors.toList())); + return new Relation( + ctx.tableSource().stream().map(this::internalVisitExpression).collect(Collectors.toList())); } @Override public UnresolvedPlan visitTableFunction(TableFunctionContext ctx) { ImmutableList.Builder builder = ImmutableList.builder(); - ctx.functionArgs().functionArg().forEach(arg - -> { - String argName = (arg.ident() != null) ? arg.ident().getText() : null; - builder.add( - new UnresolvedArgument(argName, - this.internalVisitExpression(arg.valueExpression()))); - }); + ctx.functionArgs() + .functionArg() + .forEach( + arg -> { + String argName = (arg.ident() != null) ? arg.ident().getText() : null; + builder.add( + new UnresolvedArgument( + argName, this.internalVisitExpression(arg.valueExpression()))); + }); return new TableFunction(this.internalVisitExpression(ctx.qualifiedName()), builder.build()); } - /** - * Navigate to & build AST expression. - */ + /** Navigate to & build AST expression. */ private UnresolvedExpression internalVisitExpression(ParseTree tree) { return expressionBuilder.visit(tree); } - /** - * Simply return non-default value for now. - */ + /** Simply return non-default value for now. */ @Override protected UnresolvedPlan aggregateResult(UnresolvedPlan aggregate, UnresolvedPlan nextResult) { if (nextResult != defaultResult()) { @@ -394,52 +351,48 @@ protected UnresolvedPlan aggregateResult(UnresolvedPlan aggregate, UnresolvedPla return aggregate; } - /** - * Kmeans command. - */ + /** Kmeans command. */ @Override public UnresolvedPlan visitKmeansCommand(KmeansCommandContext ctx) { ImmutableMap.Builder builder = ImmutableMap.builder(); ctx.kmeansParameter() - .forEach(x -> { - builder.put(x.children.get(0).toString(), - (Literal) internalVisitExpression(x.children.get(2))); - }); + .forEach( + x -> { + builder.put( + x.children.get(0).toString(), + (Literal) internalVisitExpression(x.children.get(2))); + }); return new Kmeans(builder.build()); } - /** - * AD command. - */ + /** AD command. */ @Override public UnresolvedPlan visitAdCommand(AdCommandContext ctx) { ImmutableMap.Builder builder = ImmutableMap.builder(); ctx.adParameter() - .forEach(x -> { - builder.put(x.children.get(0).toString(), - (Literal) internalVisitExpression(x.children.get(2))); - }); + .forEach( + x -> { + builder.put( + x.children.get(0).toString(), + (Literal) internalVisitExpression(x.children.get(2))); + }); return new AD(builder.build()); } - /** - * ml command. - */ + /** ml command. */ @Override public UnresolvedPlan visitMlCommand(OpenSearchPPLParser.MlCommandContext ctx) { ImmutableMap.Builder builder = ImmutableMap.builder(); ctx.mlArg() - .forEach(x -> { - builder.put(x.argName.getText(), - (Literal) internalVisitExpression(x.argValue)); - }); + .forEach( + x -> { + builder.put(x.argName.getText(), (Literal) internalVisitExpression(x.argValue)); + }); return new ML(builder.build()); } - /** - * Get original text in query. - */ + /** Get original text in query. */ private String getTextInQuery(ParserRuleContext ctx) { Token start = ctx.getStart(); Token stop = ctx.getStop(); diff --git a/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstExpressionBuilder.java b/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstExpressionBuilder.java index c775747ec4..690e45d67c 100644 --- a/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstExpressionBuilder.java +++ b/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstExpressionBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.parser; import static org.opensearch.sql.expression.function.BuiltinFunctionName.IS_NOT_NULL; @@ -83,33 +82,25 @@ import org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLParserBaseVisitor; import org.opensearch.sql.ppl.utils.ArgumentFactory; -/** - * Class of building AST Expression nodes. - */ +/** Class of building AST Expression nodes. */ public class AstExpressionBuilder extends OpenSearchPPLParserBaseVisitor { private static final int DEFAULT_TAKE_FUNCTION_SIZE_VALUE = 10; - /** - * The function name mapping between fronted and core engine. - */ + /** The function name mapping between fronted and core engine. */ private static Map FUNCTION_NAME_MAPPING = new ImmutableMap.Builder() .put("isnull", IS_NULL.getName().getFunctionName()) .put("isnotnull", IS_NOT_NULL.getName().getFunctionName()) .build(); - /** - * Eval clause. - */ + /** Eval clause. */ @Override public UnresolvedExpression visitEvalClause(EvalClauseContext ctx) { return new Let((Field) visit(ctx.fieldExpression()), visit(ctx.expression())); } - /** - * Logical expression excluding boolean, comparison. - */ + /** Logical expression excluding boolean, comparison. */ @Override public UnresolvedExpression visitLogicalNot(LogicalNotContext ctx) { return new Not(visit(ctx.logicalExpression())); @@ -130,9 +121,7 @@ public UnresolvedExpression visitLogicalXor(LogicalXorContext ctx) { return new Xor(visit(ctx.left), visit(ctx.right)); } - /** - * Comparison expression. - */ + /** Comparison expression. */ @Override public UnresolvedExpression visitCompareExpr(CompareExprContext ctx) { return new Compare(ctx.comparisonOperator().getText(), visit(ctx.left), visit(ctx.right)); @@ -142,22 +131,16 @@ public UnresolvedExpression visitCompareExpr(CompareExprContext ctx) { public UnresolvedExpression visitInExpr(InExprContext ctx) { return new In( visit(ctx.valueExpression()), - ctx.valueList() - .literalValue() - .stream() + ctx.valueList().literalValue().stream() .map(this::visitLiteralValue) .collect(Collectors.toList())); } - /** - * Value Expression. - */ + /** Value Expression. */ @Override public UnresolvedExpression visitBinaryArithmetic(BinaryArithmeticContext ctx) { return new Function( - ctx.binaryOperator.getText(), - Arrays.asList(visit(ctx.left), visit(ctx.right)) - ); + ctx.binaryOperator.getText(), Arrays.asList(visit(ctx.left), visit(ctx.right))); } @Override @@ -165,9 +148,7 @@ public UnresolvedExpression visitParentheticValueExpr(ParentheticValueExprContex return visit(ctx.valueExpression()); // Discard parenthesis around } - /** - * Field expression. - */ + /** Field expression. */ @Override public UnresolvedExpression visitFieldExpression(FieldExpressionContext ctx) { return new Field((QualifiedName) visit(ctx.qualifiedName())); @@ -182,13 +163,10 @@ public UnresolvedExpression visitWcFieldExpression(WcFieldExpressionContext ctx) public UnresolvedExpression visitSortField(SortFieldContext ctx) { return new Field( visit(ctx.sortFieldExpression().fieldExpression().qualifiedName()), - ArgumentFactory.getArgumentList(ctx) - ); + ArgumentFactory.getArgumentList(ctx)); } - /** - * Aggregation function. - */ + /** Aggregation function. */ @Override public UnresolvedExpression visitStatsFunctionCall(StatsFunctionCallContext ctx) { return new AggregateFunction(ctx.statsFunctionName().getText(), visit(ctx.valueExpression())); @@ -206,7 +184,9 @@ public UnresolvedExpression visitDistinctCountFunctionCall(DistinctCountFunction @Override public UnresolvedExpression visitPercentileAggFunction(PercentileAggFunctionContext ctx) { - return new AggregateFunction(ctx.PERCENTILE().getText(), visit(ctx.aggField), + return new AggregateFunction( + ctx.PERCENTILE().getText(), + visit(ctx.aggField), Collections.singletonList(new Argument("rank", (Literal) visit(ctx.value)))); } @@ -214,34 +194,32 @@ public UnresolvedExpression visitPercentileAggFunction(PercentileAggFunctionCont public UnresolvedExpression visitTakeAggFunctionCall( OpenSearchPPLParser.TakeAggFunctionCallContext ctx) { ImmutableList.Builder builder = ImmutableList.builder(); - builder.add(new UnresolvedArgument("size", - ctx.takeAggFunction().size != null ? visit(ctx.takeAggFunction().size) : - AstDSL.intLiteral(DEFAULT_TAKE_FUNCTION_SIZE_VALUE))); - return new AggregateFunction("take", visit(ctx.takeAggFunction().fieldExpression()), - builder.build()); + builder.add( + new UnresolvedArgument( + "size", + ctx.takeAggFunction().size != null + ? visit(ctx.takeAggFunction().size) + : AstDSL.intLiteral(DEFAULT_TAKE_FUNCTION_SIZE_VALUE))); + return new AggregateFunction( + "take", visit(ctx.takeAggFunction().fieldExpression()), builder.build()); } - /** - * Eval function. - */ + /** Eval function. */ @Override public UnresolvedExpression visitBooleanFunctionCall(BooleanFunctionCallContext ctx) { final String functionName = ctx.conditionFunctionBase().getText(); - return buildFunction(FUNCTION_NAME_MAPPING.getOrDefault(functionName, functionName), + return buildFunction( + FUNCTION_NAME_MAPPING.getOrDefault(functionName, functionName), ctx.functionArgs().functionArg()); } - /** - * Eval function. - */ + /** Eval function. */ @Override public UnresolvedExpression visitEvalFunctionCall(EvalFunctionCallContext ctx) { return buildFunction(ctx.evalFunctionName().getText(), ctx.functionArgs().functionArg()); } - /** - * Cast function. - */ + /** Cast function. */ @Override public UnresolvedExpression visitDataTypeFunctionCall(DataTypeFunctionCallContext ctx) { return new Cast(visit(ctx.expression()), visit(ctx.convertedDataType())); @@ -252,15 +230,10 @@ public UnresolvedExpression visitConvertedDataType(ConvertedDataTypeContext ctx) return AstDSL.stringLiteral(ctx.getText()); } - private Function buildFunction(String functionName, - List args) { + private Function buildFunction( + String functionName, List args) { return new Function( - functionName, - args - .stream() - .map(this::visitFunctionArg) - .collect(Collectors.toList()) - ); + functionName, args.stream().map(this::visitFunctionArg).collect(Collectors.toList())); } @Override @@ -290,70 +263,62 @@ public UnresolvedExpression visitTableSource(TableSourceContext ctx) { @Override public UnresolvedExpression visitPositionFunction( - OpenSearchPPLParser.PositionFunctionContext ctx) { + OpenSearchPPLParser.PositionFunctionContext ctx) { return new Function( - POSITION.getName().getFunctionName(), - Arrays.asList(visitFunctionArg(ctx.functionArg(0)), - visitFunctionArg(ctx.functionArg(1)))); + POSITION.getName().getFunctionName(), + Arrays.asList(visitFunctionArg(ctx.functionArg(0)), visitFunctionArg(ctx.functionArg(1)))); } @Override public UnresolvedExpression visitExtractFunctionCall( - OpenSearchPPLParser.ExtractFunctionCallContext ctx) { + OpenSearchPPLParser.ExtractFunctionCallContext ctx) { return new Function( - ctx.extractFunction().EXTRACT().toString(), - getExtractFunctionArguments(ctx)); + ctx.extractFunction().EXTRACT().toString(), getExtractFunctionArguments(ctx)); } private List getExtractFunctionArguments( - OpenSearchPPLParser.ExtractFunctionCallContext ctx) { - List args = Arrays.asList( + OpenSearchPPLParser.ExtractFunctionCallContext ctx) { + List args = + Arrays.asList( new Literal(ctx.extractFunction().datetimePart().getText(), DataType.STRING), - visitFunctionArg(ctx.extractFunction().functionArg()) - ); + visitFunctionArg(ctx.extractFunction().functionArg())); return args; } @Override public UnresolvedExpression visitGetFormatFunctionCall( - OpenSearchPPLParser.GetFormatFunctionCallContext ctx) { + OpenSearchPPLParser.GetFormatFunctionCallContext ctx) { return new Function( - ctx.getFormatFunction().GET_FORMAT().toString(), - getFormatFunctionArguments(ctx)); + ctx.getFormatFunction().GET_FORMAT().toString(), getFormatFunctionArguments(ctx)); } private List getFormatFunctionArguments( - OpenSearchPPLParser.GetFormatFunctionCallContext ctx) { - List args = Arrays.asList( + OpenSearchPPLParser.GetFormatFunctionCallContext ctx) { + List args = + Arrays.asList( new Literal(ctx.getFormatFunction().getFormatType().getText(), DataType.STRING), - visitFunctionArg(ctx.getFormatFunction().functionArg()) - ); + visitFunctionArg(ctx.getFormatFunction().functionArg())); return args; } @Override public UnresolvedExpression visitTimestampFunctionCall( - OpenSearchPPLParser.TimestampFunctionCallContext ctx) { + OpenSearchPPLParser.TimestampFunctionCallContext ctx) { return new Function( - ctx.timestampFunction().timestampFunctionName().getText(), - timestampFunctionArguments(ctx)); + ctx.timestampFunction().timestampFunctionName().getText(), timestampFunctionArguments(ctx)); } private List timestampFunctionArguments( - OpenSearchPPLParser.TimestampFunctionCallContext ctx) { - List args = Arrays.asList( - new Literal( - ctx.timestampFunction().simpleDateTimePart().getText(), - DataType.STRING), + OpenSearchPPLParser.TimestampFunctionCallContext ctx) { + List args = + Arrays.asList( + new Literal(ctx.timestampFunction().simpleDateTimePart().getText(), DataType.STRING), visitFunctionArg(ctx.timestampFunction().firstArg), - visitFunctionArg(ctx.timestampFunction().secondArg) - ); + visitFunctionArg(ctx.timestampFunction().secondArg)); return args; } - /** - * Literal and value. - */ + /** Literal and value. */ @Override public UnresolvedExpression visitIdentsAsQualifiedName(IdentsAsQualifiedNameContext ctx) { return visitIdentifiers(ctx.ident()); @@ -406,8 +371,10 @@ public UnresolvedExpression visitBooleanLiteral(BooleanLiteralContext ctx) { @Override public UnresolvedExpression visitBySpanClause(BySpanClauseContext ctx) { String name = ctx.spanClause().getText(); - return ctx.alias != null ? new Alias(name, visit(ctx.spanClause()), StringUtils - .unquoteIdentifier(ctx.alias.getText())) : new Alias(name, visit(ctx.spanClause())); + return ctx.alias != null + ? new Alias( + name, visit(ctx.spanClause()), StringUtils.unquoteIdentifier(ctx.alias.getText())) + : new Alias(name, visit(ctx.spanClause())); } @Override @@ -421,8 +388,7 @@ private QualifiedName visitIdentifiers(List ctx) { ctx.stream() .map(RuleContext::getText) .map(StringUtils::unquoteIdentifier) - .collect(Collectors.toList()) - ); + .collect(Collectors.toList())); } private List singleFieldRelevanceArguments( @@ -430,13 +396,21 @@ private List singleFieldRelevanceArguments( // all the arguments are defaulted to string values // to skip environment resolving and function signature resolving ImmutableList.Builder builder = ImmutableList.builder(); - builder.add(new UnresolvedArgument("field", - new QualifiedName(StringUtils.unquoteText(ctx.field.getText())))); - builder.add(new UnresolvedArgument("query", - new Literal(StringUtils.unquoteText(ctx.query.getText()), DataType.STRING))); - ctx.relevanceArg().forEach(v -> builder.add(new UnresolvedArgument( - v.relevanceArgName().getText().toLowerCase(), new Literal(StringUtils.unquoteText( - v.relevanceArgValue().getText()), DataType.STRING)))); + builder.add( + new UnresolvedArgument( + "field", new QualifiedName(StringUtils.unquoteText(ctx.field.getText())))); + builder.add( + new UnresolvedArgument( + "query", new Literal(StringUtils.unquoteText(ctx.query.getText()), DataType.STRING))); + ctx.relevanceArg() + .forEach( + v -> + builder.add( + new UnresolvedArgument( + v.relevanceArgName().getText().toLowerCase(), + new Literal( + StringUtils.unquoteText(v.relevanceArgValue().getText()), + DataType.STRING)))); return builder.build(); } @@ -445,19 +419,26 @@ private List multiFieldRelevanceArguments( // all the arguments are defaulted to string values // to skip environment resolving and function signature resolving ImmutableList.Builder builder = ImmutableList.builder(); - var fields = new RelevanceFieldList(ctx - .getRuleContexts(OpenSearchPPLParser.RelevanceFieldAndWeightContext.class) - .stream() - .collect(Collectors.toMap( - f -> StringUtils.unquoteText(f.field.getText()), - f -> (f.weight == null) ? 1F : Float.parseFloat(f.weight.getText())))); + var fields = + new RelevanceFieldList( + ctx.getRuleContexts(OpenSearchPPLParser.RelevanceFieldAndWeightContext.class).stream() + .collect( + Collectors.toMap( + f -> StringUtils.unquoteText(f.field.getText()), + f -> (f.weight == null) ? 1F : Float.parseFloat(f.weight.getText())))); builder.add(new UnresolvedArgument("fields", fields)); - builder.add(new UnresolvedArgument("query", - new Literal(StringUtils.unquoteText(ctx.query.getText()), DataType.STRING))); - ctx.relevanceArg().forEach(v -> builder.add(new UnresolvedArgument( - v.relevanceArgName().getText().toLowerCase(), new Literal(StringUtils.unquoteText( - v.relevanceArgValue().getText()), DataType.STRING)))); + builder.add( + new UnresolvedArgument( + "query", new Literal(StringUtils.unquoteText(ctx.query.getText()), DataType.STRING))); + ctx.relevanceArg() + .forEach( + v -> + builder.add( + new UnresolvedArgument( + v.relevanceArgName().getText().toLowerCase(), + new Literal( + StringUtils.unquoteText(v.relevanceArgValue().getText()), + DataType.STRING)))); return builder.build(); } - } diff --git a/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstStatementBuilder.java b/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstStatementBuilder.java index 3b7e5a78dd..e276e6d523 100644 --- a/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstStatementBuilder.java +++ b/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstStatementBuilder.java @@ -21,9 +21,7 @@ import org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLParser; import org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLParserBaseVisitor; -/** - * Build {@link Statement} from PPL Query. - */ +/** Build {@link Statement} from PPL Query. */ @RequiredArgsConstructor public class AstStatementBuilder extends OpenSearchPPLParserBaseVisitor { diff --git a/ppl/src/main/java/org/opensearch/sql/ppl/utils/ArgumentFactory.java b/ppl/src/main/java/org/opensearch/sql/ppl/utils/ArgumentFactory.java index 09afd2075f..f89ecf9c6e 100644 --- a/ppl/src/main/java/org/opensearch/sql/ppl/utils/ArgumentFactory.java +++ b/ppl/src/main/java/org/opensearch/sql/ppl/utils/ArgumentFactory.java @@ -3,11 +3,9 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.utils; import static org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLParser.BooleanLiteralContext; -import static org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLParser.DecimalLiteralContext; import static org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLParser.DedupCommandContext; import static org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLParser.FieldsCommandContext; import static org.opensearch.sql.ppl.antlr.parser.OpenSearchPPLParser.IntegerLiteralContext; @@ -25,9 +23,7 @@ import org.opensearch.sql.ast.expression.Literal; import org.opensearch.sql.common.utils.StringUtils; -/** - * Util class to get all arguments as a list from the PPL command. - */ +/** Util class to get all arguments as a list from the PPL command. */ public class ArgumentFactory { /** @@ -40,8 +36,7 @@ public static List getArgumentList(FieldsCommandContext ctx) { return Collections.singletonList( ctx.MINUS() != null ? new Argument("exclude", new Literal(true, DataType.BOOLEAN)) - : new Argument("exclude", new Literal(false, DataType.BOOLEAN)) - ); + : new Argument("exclude", new Literal(false, DataType.BOOLEAN))); } /** @@ -63,8 +58,7 @@ public static List getArgumentList(StatsCommandContext ctx) { : new Argument("delim", new Literal(" ", DataType.STRING)), ctx.dedupsplit != null ? new Argument("dedupsplit", getArgumentValue(ctx.dedupsplit)) - : new Argument("dedupsplit", new Literal(false, DataType.BOOLEAN)) - ); + : new Argument("dedupsplit", new Literal(false, DataType.BOOLEAN))); } /** @@ -83,8 +77,7 @@ public static List getArgumentList(DedupCommandContext ctx) { : new Argument("keepempty", new Literal(false, DataType.BOOLEAN)), ctx.consecutive != null ? new Argument("consecutive", getArgumentValue(ctx.consecutive)) - : new Argument("consecutive", new Literal(false, DataType.BOOLEAN)) - ); + : new Argument("consecutive", new Literal(false, DataType.BOOLEAN))); } /** @@ -101,13 +94,12 @@ public static List getArgumentList(SortFieldContext ctx) { ctx.sortFieldExpression().AUTO() != null ? new Argument("type", new Literal("auto", DataType.STRING)) : ctx.sortFieldExpression().IP() != null - ? new Argument("type", new Literal("ip", DataType.STRING)) - : ctx.sortFieldExpression().NUM() != null - ? new Argument("type", new Literal("num", DataType.STRING)) - : ctx.sortFieldExpression().STR() != null - ? new Argument("type", new Literal("str", DataType.STRING)) - : new Argument("type", new Literal(null, DataType.NULL)) - ); + ? new Argument("type", new Literal("ip", DataType.STRING)) + : ctx.sortFieldExpression().NUM() != null + ? new Argument("type", new Literal("num", DataType.STRING)) + : ctx.sortFieldExpression().STR() != null + ? new Argument("type", new Literal("str", DataType.STRING)) + : new Argument("type", new Literal(null, DataType.NULL))); } /** @@ -120,8 +112,7 @@ public static List getArgumentList(TopCommandContext ctx) { return Collections.singletonList( ctx.number != null ? new Argument("noOfResults", getArgumentValue(ctx.number)) - : new Argument("noOfResults", new Literal(10, DataType.INTEGER)) - ); + : new Argument("noOfResults", new Literal(10, DataType.INTEGER))); } /** @@ -131,21 +122,21 @@ public static List getArgumentList(TopCommandContext ctx) { * @return the list of argument with default number of results for the rare command */ public static List getArgumentList(RareCommandContext ctx) { - return Collections - .singletonList(new Argument("noOfResults", new Literal(10, DataType.INTEGER))); + return Collections.singletonList( + new Argument("noOfResults", new Literal(10, DataType.INTEGER))); } /** * parse argument value into Literal. + * * @param ctx ParserRuleContext instance * @return Literal */ private static Literal getArgumentValue(ParserRuleContext ctx) { return ctx instanceof IntegerLiteralContext - ? new Literal(Integer.parseInt(ctx.getText()), DataType.INTEGER) - : ctx instanceof BooleanLiteralContext + ? new Literal(Integer.parseInt(ctx.getText()), DataType.INTEGER) + : ctx instanceof BooleanLiteralContext ? new Literal(Boolean.valueOf(ctx.getText()), DataType.BOOLEAN) : new Literal(StringUtils.unquoteText(ctx.getText()), DataType.STRING); } - } diff --git a/ppl/src/main/java/org/opensearch/sql/ppl/utils/PPLQueryDataAnonymizer.java b/ppl/src/main/java/org/opensearch/sql/ppl/utils/PPLQueryDataAnonymizer.java index 2f520b55c6..c52c30b182 100644 --- a/ppl/src/main/java/org/opensearch/sql/ppl/utils/PPLQueryDataAnonymizer.java +++ b/ppl/src/main/java/org/opensearch/sql/ppl/utils/PPLQueryDataAnonymizer.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.utils; import com.google.common.base.Strings; @@ -11,7 +10,6 @@ import com.google.common.collect.ImmutableMap; import java.util.List; import java.util.stream.Collectors; -import lombok.Generated; import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.Pair; import org.opensearch.sql.ast.AbstractNodeVisitor; @@ -55,9 +53,7 @@ import org.opensearch.sql.planner.logical.LogicalRename; import org.opensearch.sql.planner.logical.LogicalSort; -/** - * Utility class to mask sensitive information in incoming PPL queries. - */ +/** Utility class to mask sensitive information in incoming PPL queries. */ public class PPLQueryDataAnonymizer extends AbstractNodeVisitor { private static final String MASK_LITERAL = "***"; @@ -69,8 +65,8 @@ public PPLQueryDataAnonymizer() { } /** - * This method is used to anonymize sensitive data in PPL query. - * Sensitive data includes user data., + * This method is used to anonymize sensitive data in PPL query. Sensitive data includes user + * data., * * @return ppl query string with all user data replace with "***" */ @@ -82,9 +78,7 @@ public String anonymizeStatement(Statement plan) { return plan.accept(this, null); } - /** - * Handle Query Statement. - */ + /** Handle Query Statement. */ @Override public String visitQuery(Query node, String context) { return node.getPlan().accept(this, null); @@ -104,8 +98,9 @@ public String visitRelation(Relation node, String context) { public String visitTableFunction(TableFunction node, String context) { String arguments = node.getArguments().stream() - .map(unresolvedExpression - -> this.expressionAnalyzer.analyze(unresolvedExpression, context)) + .map( + unresolvedExpression -> + this.expressionAnalyzer.analyze(unresolvedExpression, context)) .collect(Collectors.joining(",")); return StringUtils.format("source=%s(%s)", node.getFunctionName().toString(), arguments); } @@ -117,37 +112,34 @@ public String visitFilter(Filter node, String context) { return StringUtils.format("%s | where %s", child, condition); } - /** - * Build {@link LogicalRename}. - */ + /** Build {@link LogicalRename}. */ @Override public String visitRename(Rename node, String context) { String child = node.getChild().get(0).accept(this, context); ImmutableMap.Builder renameMapBuilder = new ImmutableMap.Builder<>(); for (Map renameMap : node.getRenameList()) { - renameMapBuilder.put(visitExpression(renameMap.getOrigin()), + renameMapBuilder.put( + visitExpression(renameMap.getOrigin()), ((Field) renameMap.getTarget()).getField().toString()); } String renames = - renameMapBuilder.build().entrySet().stream().map(entry -> StringUtils.format("%s as %s", - entry.getKey(), entry.getValue())).collect(Collectors.joining(",")); + renameMapBuilder.build().entrySet().stream() + .map(entry -> StringUtils.format("%s as %s", entry.getKey(), entry.getValue())) + .collect(Collectors.joining(",")); return StringUtils.format("%s | rename %s", child, renames); } - /** - * Build {@link LogicalAggregation}. - */ + /** Build {@link LogicalAggregation}. */ @Override public String visitAggregation(Aggregation node, String context) { String child = node.getChild().get(0).accept(this, context); final String group = visitExpressionList(node.getGroupExprList()); - return StringUtils.format("%s | stats %s", child, - String.join(" ", visitExpressionList(node.getAggExprList()), groupBy(group)).trim()); + return StringUtils.format( + "%s | stats %s", + child, String.join(" ", visitExpressionList(node.getAggExprList()), groupBy(group)).trim()); } - /** - * Build {@link LogicalRareTopN}. - */ + /** Build {@link LogicalRareTopN}. */ @Override public String visitRareTopN(RareTopN node, String context) { final String child = node.getChild().get(0).accept(this, context); @@ -155,16 +147,15 @@ public String visitRareTopN(RareTopN node, String context) { Integer noOfResults = (Integer) options.get(0).getValue().getValue(); String fields = visitFieldList(node.getFields()); String group = visitExpressionList(node.getGroupExprList()); - return StringUtils.format("%s | %s %d %s", child, + return StringUtils.format( + "%s | %s %d %s", + child, node.getCommandType().name().toLowerCase(), noOfResults, - String.join(" ", fields, groupBy(group)).trim() - ); + String.join(" ", fields, groupBy(group)).trim()); } - /** - * Build {@link LogicalProject} or {@link LogicalRemove} from {@link Field}. - */ + /** Build {@link LogicalProject} or {@link LogicalRemove} from {@link Field}. */ @Override public String visitProject(Project node, String context) { String child = node.getChild().get(0).accept(this, context); @@ -181,9 +172,7 @@ public String visitProject(Project node, String context) { return StringUtils.format("%s | fields %s %s", child, arg, fields); } - /** - * Build {@link LogicalEval}. - */ + /** Build {@link LogicalEval}. */ @Override public String visitEval(Eval node, String context) { String child = node.getChild().get(0).accept(this, context); @@ -193,14 +182,14 @@ public String visitEval(Eval node, String context) { String target = let.getVar().getField().toString(); expressionsBuilder.add(ImmutablePair.of(target, expression)); } - String expressions = expressionsBuilder.build().stream().map(pair -> StringUtils.format("%s" - + "=%s", pair.getLeft(), pair.getRight())).collect(Collectors.joining(" ")); + String expressions = + expressionsBuilder.build().stream() + .map(pair -> StringUtils.format("%s" + "=%s", pair.getLeft(), pair.getRight())) + .collect(Collectors.joining(" ")); return StringUtils.format("%s | eval %s", child, expressions); } - /** - * Build {@link LogicalSort}. - */ + /** Build {@link LogicalSort}. */ @Override public String visitSort(Sort node, String context) { String child = node.getChild().get(0).accept(this, context); @@ -209,9 +198,7 @@ public String visitSort(Sort node, String context) { return StringUtils.format("%s | sort %s", child, sortList); } - /** - * Build {@link LogicalDedupe}. - */ + /** Build {@link LogicalDedupe}. */ @Override public String visitDedupe(Dedupe node, String context) { String child = node.getChild().get(0).accept(this, context); @@ -221,10 +208,9 @@ public String visitDedupe(Dedupe node, String context) { Boolean keepEmpty = (Boolean) options.get(1).getValue().getValue(); Boolean consecutive = (Boolean) options.get(2).getValue().getValue(); - return StringUtils - .format("%s | dedup %s %d keepempty=%b consecutive=%b", child, fields, allowedDuplication, - keepEmpty, - consecutive); + return StringUtils.format( + "%s | dedup %s %d keepempty=%b consecutive=%b", + child, fields, allowedDuplication, keepEmpty, consecutive); } @Override @@ -239,8 +225,9 @@ private String visitFieldList(List fieldList) { } private String visitExpressionList(List expressionList) { - return expressionList.isEmpty() ? "" : - expressionList.stream().map(this::visitExpression).collect(Collectors.joining(",")); + return expressionList.isEmpty() + ? "" + : expressionList.stream().map(this::visitExpression).collect(Collectors.joining(",")); } private String visitExpression(UnresolvedExpression expression) { @@ -251,11 +238,8 @@ private String groupBy(String groupBy) { return Strings.isNullOrEmpty(groupBy) ? "" : StringUtils.format("by %s", groupBy); } - /** - * Expression Anonymizer. - */ - private static class AnonymizerExpressionAnalyzer extends AbstractNodeVisitor { + /** Expression Anonymizer. */ + private static class AnonymizerExpressionAnalyzer extends AbstractNodeVisitor { public String analyze(UnresolvedExpression unresolved, String context) { return unresolved.accept(this, context); diff --git a/ppl/src/main/java/org/opensearch/sql/ppl/utils/UnresolvedPlanHelper.java b/ppl/src/main/java/org/opensearch/sql/ppl/utils/UnresolvedPlanHelper.java index 4fb9eee6a0..a502f2d769 100644 --- a/ppl/src/main/java/org/opensearch/sql/ppl/utils/UnresolvedPlanHelper.java +++ b/ppl/src/main/java/org/opensearch/sql/ppl/utils/UnresolvedPlanHelper.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.utils; import com.google.common.collect.ImmutableList; @@ -12,15 +11,11 @@ import org.opensearch.sql.ast.tree.Project; import org.opensearch.sql.ast.tree.UnresolvedPlan; -/** - * The helper to add select to {@link UnresolvedPlan} if needed. - */ +/** The helper to add select to {@link UnresolvedPlan} if needed. */ @UtilityClass public class UnresolvedPlanHelper { - /** - * Attach Select All to PPL commands if required. - */ + /** Attach Select All to PPL commands if required. */ public UnresolvedPlan addSelectAll(UnresolvedPlan plan) { if ((plan instanceof Project) && !((Project) plan).isExcluded()) { return plan; diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/PPLServiceTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/PPLServiceTest.java index c14eb3dba1..598f6691cb 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/PPLServiceTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/PPLServiceTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl; import static org.mockito.ArgumentMatchers.any; @@ -41,21 +40,17 @@ public class PPLServiceTest { private DefaultQueryManager queryManager; - @Mock - private QueryService queryService; + @Mock private QueryService queryService; - @Mock - private ExecutionEngine.Schema schema; + @Mock private ExecutionEngine.Schema schema; - /** - * Setup the test context. - */ + /** Setup the test context. */ @Before public void setUp() { queryManager = DefaultQueryManager.defaultQueryManager(); - pplService = new PPLService(new PPLSyntaxParser(), queryManager, - new QueryPlanFactory(queryService)); + pplService = + new PPLService(new PPLSyntaxParser(), queryManager, new QueryPlanFactory(queryService)); } @After @@ -65,18 +60,20 @@ public void cleanup() throws InterruptedException { @Test public void testExecuteShouldPass() { - doAnswer(invocation -> { - ResponseListener listener = invocation.getArgument(1); - listener.onResponse(new QueryResponse(schema, Collections.emptyList(), Cursor.None)); - return null; - }).when(queryService).execute(any(), any()); - - pplService.execute(new PPLQueryRequest("search source=t a=1", null, QUERY), + doAnswer( + invocation -> { + ResponseListener listener = invocation.getArgument(1); + listener.onResponse(new QueryResponse(schema, Collections.emptyList(), Cursor.None)); + return null; + }) + .when(queryService) + .execute(any(), any()); + + pplService.execute( + new PPLQueryRequest("search source=t a=1", null, QUERY), new ResponseListener() { @Override - public void onResponse(QueryResponse pplQueryResponse) { - - } + public void onResponse(QueryResponse pplQueryResponse) {} @Override public void onFailure(Exception e) { @@ -87,17 +84,20 @@ public void onFailure(Exception e) { @Test public void testExecuteCsvFormatShouldPass() { - doAnswer(invocation -> { - ResponseListener listener = invocation.getArgument(1); - listener.onResponse(new QueryResponse(schema, Collections.emptyList(), Cursor.None)); - return null; - }).when(queryService).execute(any(), any()); - - pplService.execute(new PPLQueryRequest("search source=t a=1", null, QUERY, "csv"), + doAnswer( + invocation -> { + ResponseListener listener = invocation.getArgument(1); + listener.onResponse(new QueryResponse(schema, Collections.emptyList(), Cursor.None)); + return null; + }) + .when(queryService) + .execute(any(), any()); + + pplService.execute( + new PPLQueryRequest("search source=t a=1", null, QUERY, "csv"), new ResponseListener() { @Override - public void onResponse(QueryResponse pplQueryResponse) { - } + public void onResponse(QueryResponse pplQueryResponse) {} @Override public void onFailure(Exception e) { @@ -108,17 +108,20 @@ public void onFailure(Exception e) { @Test public void testExplainShouldPass() { - doAnswer(invocation -> { - ResponseListener listener = invocation.getArgument(1); - listener.onResponse(new ExplainResponse(new ExplainResponseNode("test"))); - return null; - }).when(queryService).explain(any(), any()); - - pplService.explain(new PPLQueryRequest("search source=t a=1", null, EXPLAIN), + doAnswer( + invocation -> { + ResponseListener listener = invocation.getArgument(1); + listener.onResponse(new ExplainResponse(new ExplainResponseNode("test"))); + return null; + }) + .when(queryService) + .explain(any(), any()); + + pplService.explain( + new PPLQueryRequest("search source=t a=1", null, EXPLAIN), new ResponseListener() { @Override - public void onResponse(ExplainResponse pplQueryResponse) { - } + public void onResponse(ExplainResponse pplQueryResponse) {} @Override public void onFailure(Exception e) { @@ -129,7 +132,8 @@ public void onFailure(Exception e) { @Test public void testExecuteWithIllegalQueryShouldBeCaughtByHandler() { - pplService.execute(new PPLQueryRequest("search", null, QUERY), + pplService.execute( + new PPLQueryRequest("search", null, QUERY), new ResponseListener() { @Override public void onResponse(QueryResponse pplQueryResponse) { @@ -137,15 +141,14 @@ public void onResponse(QueryResponse pplQueryResponse) { } @Override - public void onFailure(Exception e) { - - } + public void onFailure(Exception e) {} }); } @Test public void testExplainWithIllegalQueryShouldBeCaughtByHandler() { - pplService.explain(new PPLQueryRequest("search", null, QUERY), + pplService.explain( + new PPLQueryRequest("search", null, QUERY), new ResponseListener<>() { @Override public void onResponse(ExplainResponse pplQueryResponse) { @@ -153,26 +156,26 @@ public void onResponse(ExplainResponse pplQueryResponse) { } @Override - public void onFailure(Exception e) { - - } + public void onFailure(Exception e) {} }); } @Test public void testPrometheusQuery() { - doAnswer(invocation -> { - ResponseListener listener = invocation.getArgument(1); - listener.onResponse(new QueryResponse(schema, Collections.emptyList(), Cursor.None)); - return null; - }).when(queryService).execute(any(), any()); - - pplService.execute(new PPLQueryRequest("source = prometheus.http_requests_total", null, QUERY), + doAnswer( + invocation -> { + ResponseListener listener = invocation.getArgument(1); + listener.onResponse(new QueryResponse(schema, Collections.emptyList(), Cursor.None)); + return null; + }) + .when(queryService) + .execute(any(), any()); + + pplService.execute( + new PPLQueryRequest("source = prometheus.http_requests_total", null, QUERY), new ResponseListener<>() { @Override - public void onResponse(QueryResponse pplQueryResponse) { - - } + public void onResponse(QueryResponse pplQueryResponse) {} @Override public void onFailure(Exception e) { @@ -183,7 +186,8 @@ public void onFailure(Exception e) { @Test public void testInvalidPPLQuery() { - pplService.execute(new PPLQueryRequest("search", null, QUERY), + pplService.execute( + new PPLQueryRequest("search", null, QUERY), new ResponseListener() { @Override public void onResponse(QueryResponse pplQueryResponse) { @@ -191,9 +195,7 @@ public void onResponse(QueryResponse pplQueryResponse) { } @Override - public void onFailure(Exception e) { - - } + public void onFailure(Exception e) {} }); } } diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/antlr/NowLikeFunctionParserTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/antlr/NowLikeFunctionParserTest.java index 9f635fdd81..f6a04983e2 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/antlr/NowLikeFunctionParserTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/antlr/NowLikeFunctionParserTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.antlr; import static org.junit.Assert.assertNotEquals; @@ -21,6 +20,7 @@ public class NowLikeFunctionParserTest { /** * Set parameterized values used in test. + * * @param name Function name * @param hasFsp Whether function has fsp argument * @param hasShortcut Whether function has shortcut (call without `()`) @@ -33,24 +33,26 @@ public NowLikeFunctionParserTest(String name, Boolean hasFsp, Boolean hasShortcu /** * Returns function data to test. + * * @return An iterable. */ @Parameterized.Parameters(name = "{0}") public static Iterable functionNames() { - return List.of(new Object[][]{ - {"now", true, false}, - {"current_timestamp", true, true}, - {"localtimestamp", true, true}, - {"localtime", true, true}, - {"sysdate", true, false}, - {"curtime", true, false}, - {"current_time", true, true}, - {"curdate", false, false}, - {"current_date", false, true}, - {"utc_date", false, false}, - {"utc_time", false, false}, - {"utc_timestamp", false, false} - }); + return List.of( + new Object[][] { + {"now", true, false}, + {"current_timestamp", true, true}, + {"localtimestamp", true, true}, + {"localtime", true, true}, + {"sysdate", true, false}, + {"curtime", true, false}, + {"current_time", true, true}, + {"curdate", false, false}, + {"current_date", false, true}, + {"utc_date", false, false}, + {"utc_time", false, false}, + {"utc_timestamp", false, false} + }); } private final String name; diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParserMatchBoolPrefixSamplesTests.java b/ppl/src/test/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParserMatchBoolPrefixSamplesTests.java index dd146ea2cf..7de197028e 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParserMatchBoolPrefixSamplesTests.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParserMatchBoolPrefixSamplesTests.java @@ -13,26 +13,24 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; - @RunWith(Parameterized.class) public class PPLSyntaxParserMatchBoolPrefixSamplesTests { - - /** Returns sample queries that the PPLSyntaxParser is expected to parse successfully. + /** + * Returns sample queries that the PPLSyntaxParser is expected to parse successfully. + * * @return an Iterable of sample queries. */ @Parameterized.Parameters(name = "{0}") public static Iterable sampleQueries() { return List.of( "source=t a= 1 | where match_bool_prefix(a, 'hello world')", - "source=t a = 1 | where match_bool_prefix(a, 'hello world'," - + " minimum_should_match = 3)", + "source=t a = 1 | where match_bool_prefix(a, 'hello world'," + " minimum_should_match = 3)", "source=t a = 1 | where match_bool_prefix(a, 'hello world', fuzziness='AUTO')", "source=t a = 1 | where match_bool_prefix(a, 'hello world', fuzziness='AUTO:4,6')", "source=t a= 1 | where match_bool_prefix(a, 'hello world', prefix_length=0)", "source=t a= 1 | where match_bool_prefix(a, 'hello world', max_expansions=1)", - "source=t a= 1 | where match_bool_prefix(a, 'hello world'," - + " fuzzy_transpositions=true)", + "source=t a= 1 | where match_bool_prefix(a, 'hello world'," + " fuzzy_transpositions=true)", "source=t a= 1 | where match_bool_prefix(a, 'hello world'," + " fuzzy_rewrite=constant_score)", "source=t a= 1 | where match_bool_prefix(a, 'hello world'," @@ -43,8 +41,7 @@ public static Iterable sampleQueries() { + " fuzzy_rewrite=top_terms_blended_freqs_1)", "source=t a= 1 | where match_bool_prefix(a, 'hello world'," + " fuzzy_rewrite=top_terms_boost_1)", - "source=t a= 1 | where match_bool_prefix(a, 'hello world'," - + " fuzzy_rewrite=top_terms_1)", + "source=t a= 1 | where match_bool_prefix(a, 'hello world'," + " fuzzy_rewrite=top_terms_1)", "source=t a= 1 | where match_bool_prefix(a, 'hello world', boost=1)", "source=t a = 1 | where match_bool_prefix(a, 'hello world', analyzer = 'standard'," + "prefix_length = '0', boost = 1)"); diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParserMatchPhraseSamplesTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParserMatchPhraseSamplesTest.java index aef6d1d69e..94222ec103 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParserMatchPhraseSamplesTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParserMatchPhraseSamplesTest.java @@ -13,22 +13,22 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; - @RunWith(Parameterized.class) public class PPLSyntaxParserMatchPhraseSamplesTest { - - /** Returns sample queries that the PPLSyntaxParser is expected to parse successfully. + /** + * Returns sample queries that the PPLSyntaxParser is expected to parse successfully. + * * @return an Iterable of sample queries. */ @Parameterized.Parameters(name = "{0}") public static Iterable sampleQueries() { return List.of( - "source=t a= 1 | where match_phrase(a, 'hello world')", - "source=t a = 1 | where match_phrase(a, 'hello world', slop = 3)", - "source=t a = 1 | where match_phrase(a, 'hello world', analyzer = 'standard'," - + "zero_terms_query = 'none', slop = 3)", - "source=t a = 1 | where match_phrase(a, 'hello world', zero_terms_query = all)"); + "source=t a= 1 | where match_phrase(a, 'hello world')", + "source=t a = 1 | where match_phrase(a, 'hello world', slop = 3)", + "source=t a = 1 | where match_phrase(a, 'hello world', analyzer = 'standard'," + + "zero_terms_query = 'none', slop = 3)", + "source=t a = 1 | where match_phrase(a, 'hello world', zero_terms_query = all)"); } private final String query; diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParserTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParserTest.java index 57cee7fa1d..943953d416 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParserTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/antlr/PPLSyntaxParserTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.antlr; import static org.junit.Assert.assertNotEquals; @@ -19,8 +18,7 @@ public class PPLSyntaxParserTest { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); + @Rule public ExpectedException exceptionRule = ExpectedException.none(); @Test public void testSearchCommandShouldPass() { @@ -140,99 +138,170 @@ public void testTopCommandWithoutNAndGroupByShouldPass() { @Test public void testCanParseMultiMatchRelevanceFunction() { - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE multi_match(['address'], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE multi_match(['address', 'notes'], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE multi_match([\"*\"], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE multi_match([\"address\"], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE multi_match([`address`], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE multi_match([address], 'query')")); - - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE multi_match(['address' ^ 1.0, 'notes' ^ 2.2], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE multi_match(['address' ^ 1.1, 'notes'], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE multi_match(['address', 'notes' ^ 1.5], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE multi_match(['address', 'notes' 3], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE multi_match(['address' ^ .3, 'notes' 3], 'query')")); - - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE multi_match([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE multi_match([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query'," - + "analyzer=keyword, quote_field_suffix=\".exact\", fuzzy_prefix_length = 4)")); + assertNotEquals( + null, new PPLSyntaxParser().parse("SOURCE=test | WHERE multi_match(['address'], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE multi_match(['address', 'notes'], 'query')")); + assertNotEquals( + null, new PPLSyntaxParser().parse("SOURCE=test | WHERE multi_match([\"*\"], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser().parse("SOURCE=test | WHERE multi_match([\"address\"], 'query')")); + assertNotEquals( + null, new PPLSyntaxParser().parse("SOURCE=test | WHERE multi_match([`address`], 'query')")); + assertNotEquals( + null, new PPLSyntaxParser().parse("SOURCE=test | WHERE multi_match([address], 'query')")); + + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE multi_match(['address' ^ 1.0, 'notes' ^ 2.2], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE multi_match(['address' ^ 1.1, 'notes'], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE multi_match(['address', 'notes' ^ 1.5], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE multi_match(['address', 'notes' 3], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE multi_match(['address' ^ .3, 'notes' 3], 'query')")); + + assertNotEquals( + null, + new PPLSyntaxParser() + .parse( + "SOURCE=test | WHERE multi_match([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse( + "SOURCE=test | WHERE multi_match([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query'," + + "analyzer=keyword, quote_field_suffix=\".exact\", fuzzy_prefix_length = 4)")); } @Test public void testCanParseSimpleQueryStringRelevanceFunction() { - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE simple_query_string(['address'], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE simple_query_string(['address', 'notes'], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE simple_query_string([\"*\"], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE simple_query_string([\"address\"], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE simple_query_string([`address`], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE simple_query_string([address], 'query')")); - - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE simple_query_string(['address' ^ 1.0, 'notes' ^ 2.2], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE simple_query_string(['address' ^ 1.1, 'notes'], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE simple_query_string(['address', 'notes' ^ 1.5], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE simple_query_string(['address', 'notes' 3], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE simple_query_string(['address' ^ .3, 'notes' 3], 'query')")); - - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE simple_query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE simple_query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query'," - + "analyzer=keyword, quote_field_suffix=\".exact\", fuzzy_prefix_length = 4)")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE simple_query_string(['address'], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE simple_query_string(['address', 'notes'], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser().parse("SOURCE=test | WHERE simple_query_string([\"*\"], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE simple_query_string([\"address\"], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE simple_query_string([`address`], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser().parse("SOURCE=test | WHERE simple_query_string([address], 'query')")); + + assertNotEquals( + null, + new PPLSyntaxParser() + .parse( + "SOURCE=test | WHERE simple_query_string(['address' ^ 1.0, 'notes' ^ 2.2]," + + " 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE simple_query_string(['address' ^ 1.1, 'notes'], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE simple_query_string(['address', 'notes' ^ 1.5], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE simple_query_string(['address', 'notes' 3], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse( + "SOURCE=test | WHERE simple_query_string(['address' ^ .3, 'notes' 3], 'query')")); + + assertNotEquals( + null, + new PPLSyntaxParser() + .parse( + "SOURCE=test | WHERE simple_query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2]," + + " 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse( + "SOURCE=test | WHERE simple_query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2]," + + " 'query',analyzer=keyword, quote_field_suffix=\".exact\"," + + " fuzzy_prefix_length = 4)")); } @Test public void testCanParseQueryStringRelevanceFunction() { - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE query_string(['address'], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE query_string(['address', 'notes'], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE query_string([\"*\"], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE query_string([\"address\"], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE query_string([`address`], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE query_string([address], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE query_string(['address' ^ 1.0, 'notes' ^ 2.2], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE query_string(['address' ^ 1.1, 'notes'], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE query_string(['address', 'notes' ^ 1.5], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE query_string(['address', 'notes' 3], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE query_string(['address' ^ .3, 'notes' 3], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query')")); - assertNotEquals(null, new PPLSyntaxParser().parse( - "SOURCE=test | WHERE query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query'," - + "analyzer=keyword, quote_field_suffix=\".exact\", fuzzy_prefix_length = 4)")); + assertNotEquals( + null, + new PPLSyntaxParser().parse("SOURCE=test | WHERE query_string(['address'], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE query_string(['address', 'notes'], 'query')")); + assertNotEquals( + null, new PPLSyntaxParser().parse("SOURCE=test | WHERE query_string([\"*\"], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser().parse("SOURCE=test | WHERE query_string([\"address\"], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser().parse("SOURCE=test | WHERE query_string([`address`], 'query')")); + assertNotEquals( + null, new PPLSyntaxParser().parse("SOURCE=test | WHERE query_string([address], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE query_string(['address' ^ 1.0, 'notes' ^ 2.2], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE query_string(['address' ^ 1.1, 'notes'], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE query_string(['address', 'notes' ^ 1.5], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE query_string(['address', 'notes' 3], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse("SOURCE=test | WHERE query_string(['address' ^ .3, 'notes' 3], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse( + "SOURCE=test | WHERE query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query')")); + assertNotEquals( + null, + new PPLSyntaxParser() + .parse( + "SOURCE=test | WHERE query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query'," + + "analyzer=keyword, quote_field_suffix=\".exact\", fuzzy_prefix_length = 4)")); } @Test @@ -275,15 +344,35 @@ public void testDescribeCommandWithSourceShouldFail() { @Test public void testCanParseExtractFunction() { - String[] parts = List.of("MICROSECOND", "SECOND", "MINUTE", "HOUR", "DAY", - "WEEK", "MONTH", "QUARTER", "YEAR", "SECOND_MICROSECOND", - "MINUTE_MICROSECOND", "MINUTE_SECOND", "HOUR_MICROSECOND", - "HOUR_SECOND", "HOUR_MINUTE", "DAY_MICROSECOND", - "DAY_SECOND", "DAY_MINUTE", "DAY_HOUR", "YEAR_MONTH").toArray(new String[0]); + String[] parts = + List.of( + "MICROSECOND", + "SECOND", + "MINUTE", + "HOUR", + "DAY", + "WEEK", + "MONTH", + "QUARTER", + "YEAR", + "SECOND_MICROSECOND", + "MINUTE_MICROSECOND", + "MINUTE_SECOND", + "HOUR_MICROSECOND", + "HOUR_SECOND", + "HOUR_MINUTE", + "DAY_MICROSECOND", + "DAY_SECOND", + "DAY_MINUTE", + "DAY_HOUR", + "YEAR_MONTH") + .toArray(new String[0]); for (String part : parts) { - assertNotNull(new PPLSyntaxParser().parse( - String.format("SOURCE=test | eval k = extract(%s FROM \"2023-02-06\")", part))); + assertNotNull( + new PPLSyntaxParser() + .parse( + String.format("SOURCE=test | eval k = extract(%s FROM \"2023-02-06\")", part))); } } @@ -294,8 +383,9 @@ public void testCanParseGetFormatFunction() { for (String type : types) { for (String format : formats) { - assertNotNull(new PPLSyntaxParser().parse( - String.format("SOURCE=test | eval k = get_format(%s, %s)", type, format))); + assertNotNull( + new PPLSyntaxParser() + .parse(String.format("SOURCE=test | eval k = get_format(%s, %s)", type, format))); } } } @@ -303,24 +393,28 @@ public void testCanParseGetFormatFunction() { @Test public void testCannotParseGetFormatFunctionWithBadArg() { assertThrows( - SyntaxCheckException.class, - () -> new PPLSyntaxParser().parse( - "SOURCE=test | eval k = GET_FORMAT(NONSENSE_ARG,'INTERNAL')")); + SyntaxCheckException.class, + () -> + new PPLSyntaxParser() + .parse("SOURCE=test | eval k = GET_FORMAT(NONSENSE_ARG,'INTERNAL')")); } @Test public void testCanParseTimestampaddFunction() { - assertNotNull(new PPLSyntaxParser().parse( - "SOURCE=test | eval k = TIMESTAMPADD(MINUTE, 1, '2003-01-02')")); - assertNotNull(new PPLSyntaxParser().parse( - "SOURCE=test | eval k = TIMESTAMPADD(WEEK,1,'2003-01-02')")); + assertNotNull( + new PPLSyntaxParser() + .parse("SOURCE=test | eval k = TIMESTAMPADD(MINUTE, 1, '2003-01-02')")); + assertNotNull( + new PPLSyntaxParser().parse("SOURCE=test | eval k = TIMESTAMPADD(WEEK,1,'2003-01-02')")); } @Test public void testCanParseTimestampdiffFunction() { - assertNotNull(new PPLSyntaxParser().parse( - "SOURCE=test | eval k = TIMESTAMPDIFF(MINUTE, '2003-01-02', '2003-01-02')")); - assertNotNull(new PPLSyntaxParser().parse( - "SOURCE=test | eval k = TIMESTAMPDIFF(WEEK,'2003-01-02','2003-01-02')")); + assertNotNull( + new PPLSyntaxParser() + .parse("SOURCE=test | eval k = TIMESTAMPDIFF(MINUTE, '2003-01-02', '2003-01-02')")); + assertNotNull( + new PPLSyntaxParser() + .parse("SOURCE=test | eval k = TIMESTAMPDIFF(WEEK,'2003-01-02','2003-01-02')")); } } diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/domain/PPLQueryRequestTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/domain/PPLQueryRequestTest.java index b53656e252..29e6ff3298 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/domain/PPLQueryRequestTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/domain/PPLQueryRequestTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.domain; import static org.junit.Assert.assertEquals; @@ -16,8 +15,7 @@ public class PPLQueryRequestTest { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); + @Rule public ExpectedException exceptionRule = ExpectedException.none(); @Test public void getRequestShouldPass() { @@ -27,40 +25,34 @@ public void getRequestShouldPass() { @Test public void testExplainRequest() { - PPLQueryRequest request = new PPLQueryRequest( - "source=t a=1", null, "/_plugins/_ppl/_explain"); + PPLQueryRequest request = new PPLQueryRequest("source=t a=1", null, "/_plugins/_ppl/_explain"); assertTrue(request.isExplainRequest()); } @Test public void testDefaultFormat() { - PPLQueryRequest request = new PPLQueryRequest( - "source=test", null, "/_plugins/_ppl"); + PPLQueryRequest request = new PPLQueryRequest("source=test", null, "/_plugins/_ppl"); assertEquals(request.format(), Format.JDBC); } @Test public void testJDBCFormat() { - PPLQueryRequest request = new PPLQueryRequest( - "source=test", null, "/_plugins/_ppl", "jdbc"); + PPLQueryRequest request = new PPLQueryRequest("source=test", null, "/_plugins/_ppl", "jdbc"); assertEquals(request.format(), Format.JDBC); } @Test public void testCSVFormat() { - PPLQueryRequest request = new PPLQueryRequest( - "source=test", null, "/_plugins/_ppl", "csv"); + PPLQueryRequest request = new PPLQueryRequest("source=test", null, "/_plugins/_ppl", "csv"); assertEquals(request.format(), Format.CSV); } @Test public void testUnsupportedFormat() { String format = "notsupport"; - PPLQueryRequest request = new PPLQueryRequest( - "source=test", null, "/_plugins/_ppl", format); + PPLQueryRequest request = new PPLQueryRequest("source=test", null, "/_plugins/_ppl", format); exceptionRule.expect(IllegalArgumentException.class); exceptionRule.expectMessage("response in " + format + " format is not supported."); request.format(); } - } diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/domain/PPLQueryResponseTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/domain/PPLQueryResponseTest.java index 03eaaf22f4..50be4efa2e 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/domain/PPLQueryResponseTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/domain/PPLQueryResponseTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.domain; import org.junit.Test; diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstBuilderTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstBuilderTest.java index 599f6bdd75..c9989a49c4 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstBuilderTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstBuilderTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.parser; import static java.util.Collections.emptyList; @@ -62,353 +61,264 @@ public class AstBuilderTest { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); + @Rule public ExpectedException exceptionRule = ExpectedException.none(); private PPLSyntaxParser parser = new PPLSyntaxParser(); @Test public void testSearchCommand() { - assertEqual("search source=t a=1", - filter( - relation("t"), - compare("=", field("a"), intLiteral(1)) - ) - ); + assertEqual( + "search source=t a=1", filter(relation("t"), compare("=", field("a"), intLiteral(1)))); } @Test public void testSearchCrossClusterCommand() { - assertEqual("search source=c:t", - relation(qualifiedName("c:t")) - ); + assertEqual("search source=c:t", relation(qualifiedName("c:t"))); } @Test public void testSearchMatchAllCrossClusterCommand() { - assertEqual("search source=*:t", - relation(qualifiedName("*:t")) - ); + assertEqual("search source=*:t", relation(qualifiedName("*:t"))); } @Test public void testPrometheusSearchCommand() { - assertEqual("search source = prometheus.http_requests_total", - relation(qualifiedName("prometheus", "http_requests_total")) - ); + assertEqual( + "search source = prometheus.http_requests_total", + relation(qualifiedName("prometheus", "http_requests_total"))); } @Test public void testSearchCommandWithDataSourceEscape() { - assertEqual("search source = `prometheus.http_requests_total`", - relation("prometheus.http_requests_total") - ); + assertEqual( + "search source = `prometheus.http_requests_total`", + relation("prometheus.http_requests_total")); } @Test public void testSearchCommandWithDotInIndexName() { - assertEqual("search source = http_requests_total.test", - relation(qualifiedName("http_requests_total","test")) - ); + assertEqual( + "search source = http_requests_total.test", + relation(qualifiedName("http_requests_total", "test"))); } @Test public void testSearchWithPrometheusQueryRangeWithPositionedArguments() { - assertEqual("search source = prometheus.query_range(\"test{code='200'}\",1234, 12345, 3)", - tableFunction(Arrays.asList("prometheus", "query_range"), + assertEqual( + "search source = prometheus.query_range(\"test{code='200'}\",1234, 12345, 3)", + tableFunction( + Arrays.asList("prometheus", "query_range"), unresolvedArg(null, stringLiteral("test{code='200'}")), unresolvedArg(null, intLiteral(1234)), unresolvedArg(null, intLiteral(12345)), - unresolvedArg(null, intLiteral(3)) - )); + unresolvedArg(null, intLiteral(3)))); } @Test public void testSearchWithPrometheusQueryRangeWithNamedArguments() { - assertEqual("search source = prometheus.query_range(query = \"test{code='200'}\", " + assertEqual( + "search source = prometheus.query_range(query = \"test{code='200'}\", " + "starttime = 1234, step=3, endtime=12345)", - tableFunction(Arrays.asList("prometheus", "query_range"), + tableFunction( + Arrays.asList("prometheus", "query_range"), unresolvedArg("query", stringLiteral("test{code='200'}")), unresolvedArg("starttime", intLiteral(1234)), unresolvedArg("step", intLiteral(3)), - unresolvedArg("endtime", intLiteral(12345)) - )); + unresolvedArg("endtime", intLiteral(12345)))); } @Test public void testSearchCommandString() { - assertEqual("search source=t a=\"a\"", - filter( - relation("t"), - compare("=", field("a"), stringLiteral("a")) - ) - ); + assertEqual( + "search source=t a=\"a\"", + filter(relation("t"), compare("=", field("a"), stringLiteral("a")))); } @Test public void testSearchCommandWithoutSearch() { - assertEqual("source=t a=1", - filter( - relation("t"), - compare("=", field("a"), intLiteral(1)) - ) - ); + assertEqual("source=t a=1", filter(relation("t"), compare("=", field("a"), intLiteral(1)))); } @Test public void testSearchCommandWithFilterBeforeSource() { - assertEqual("search a=1 source=t", - filter( - relation("t"), - compare("=", field("a"), intLiteral(1)) - )); + assertEqual( + "search a=1 source=t", filter(relation("t"), compare("=", field("a"), intLiteral(1)))); } @Test public void testWhereCommand() { - assertEqual("search source=t | where a=1", - filter( - relation("t"), - compare("=", field("a"), intLiteral(1)) - ) - ); + assertEqual( + "search source=t | where a=1", + filter(relation("t"), compare("=", field("a"), intLiteral(1)))); } @Test public void testWhereCommandWithQualifiedName() { - assertEqual("search source=t | where a.v=1", - filter( - relation("t"), - compare("=", field(qualifiedName("a", "v")), intLiteral(1)) - ) - ); + assertEqual( + "search source=t | where a.v=1", + filter(relation("t"), compare("=", field(qualifiedName("a", "v")), intLiteral(1)))); } @Test public void testFieldsCommandWithoutArguments() { - assertEqual("source=t | fields f, g", - projectWithArg( - relation("t"), - defaultFieldsArgs(), - field("f"), field("g") - )); + assertEqual( + "source=t | fields f, g", + projectWithArg(relation("t"), defaultFieldsArgs(), field("f"), field("g"))); } @Test public void testFieldsCommandWithIncludeArguments() { - assertEqual("source=t | fields + f, g", - projectWithArg( - relation("t"), - defaultFieldsArgs(), - field("f"), field("g") - )); + assertEqual( + "source=t | fields + f, g", + projectWithArg(relation("t"), defaultFieldsArgs(), field("f"), field("g"))); } @Test public void testFieldsCommandWithExcludeArguments() { - assertEqual("source=t | fields - f, g", + assertEqual( + "source=t | fields - f, g", projectWithArg( relation("t"), exprList(argument("exclude", booleanLiteral(true))), - field("f"), field("g") - )); + field("f"), + field("g"))); } @Test public void testSearchCommandWithQualifiedName() { - assertEqual("source=t | fields f.v, g.v", + assertEqual( + "source=t | fields f.v, g.v", projectWithArg( relation("t"), defaultFieldsArgs(), - field(qualifiedName("f", "v")), field(qualifiedName("g", "v")) - )); + field(qualifiedName("f", "v")), + field(qualifiedName("g", "v")))); } @Test public void testRenameCommand() { - assertEqual("source=t | rename f as g", - rename( - relation("t"), - map("f", "g") - )); + assertEqual("source=t | rename f as g", rename(relation("t"), map("f", "g"))); } @Test public void testRenameCommandWithMultiFields() { - assertEqual("source=t | rename f as g, h as i, j as k", - rename( - relation("t"), - map("f", "g"), - map("h", "i"), - map("j", "k") - )); + assertEqual( + "source=t | rename f as g, h as i, j as k", + rename(relation("t"), map("f", "g"), map("h", "i"), map("j", "k"))); } @Test public void testStatsCommand() { - assertEqual("source=t | stats count(a)", + assertEqual( + "source=t | stats count(a)", agg( relation("t"), - exprList( - alias( - "count(a)", - aggregate("count", field("a")) - ) - ), + exprList(alias("count(a)", aggregate("count", field("a")))), emptyList(), emptyList(), - defaultStatsArgs() - )); + defaultStatsArgs())); } @Test public void testStatsCommandWithByClause() { - assertEqual("source=t | stats count(a) by b DEDUP_SPLITVALUES=false", + assertEqual( + "source=t | stats count(a) by b DEDUP_SPLITVALUES=false", agg( relation("t"), - exprList( - alias( - "count(a)", - aggregate("count", field("a")) - ) - ), + exprList(alias("count(a)", aggregate("count", field("a")))), emptyList(), - exprList( - alias( - "b", - field("b") - )), - defaultStatsArgs() - )); + exprList(alias("b", field("b"))), + defaultStatsArgs())); } @Test public void testStatsCommandWithByClauseInBackticks() { - assertEqual("source=t | stats count(a) by `b` DEDUP_SPLITVALUES=false", + assertEqual( + "source=t | stats count(a) by `b` DEDUP_SPLITVALUES=false", agg( relation("t"), - exprList( - alias( - "count(a)", - aggregate("count", field("a")) - ) - ), + exprList(alias("count(a)", aggregate("count", field("a")))), emptyList(), - exprList( - alias( - "b", - field("b") - )), - defaultStatsArgs() - )); + exprList(alias("b", field("b"))), + defaultStatsArgs())); } @Test public void testStatsCommandWithAlias() { - assertEqual("source=t | stats count(a) as alias", + assertEqual( + "source=t | stats count(a) as alias", agg( relation("t"), - exprList( - alias( - "alias", - aggregate("count", field("a")) - ) - ), + exprList(alias("alias", aggregate("count", field("a")))), emptyList(), emptyList(), - defaultStatsArgs() - ) - ); + defaultStatsArgs())); } @Test public void testStatsCommandWithNestedFunctions() { - assertEqual("source=t | stats sum(a+b)", + assertEqual( + "source=t | stats sum(a+b)", agg( relation("t"), - exprList( - alias( - "sum(a+b)", - aggregate( - "sum", - function("+", field("a"), field("b")) - )) - ), + exprList(alias("sum(a+b)", aggregate("sum", function("+", field("a"), field("b"))))), emptyList(), emptyList(), - defaultStatsArgs() - )); - assertEqual("source=t | stats sum(abs(a)/2)", + defaultStatsArgs())); + assertEqual( + "source=t | stats sum(abs(a)/2)", agg( relation("t"), exprList( alias( "sum(abs(a)/2)", - aggregate( - "sum", - function( - "/", - function("abs", field("a")), - intLiteral(2) - ) - ) - ) - ), + aggregate("sum", function("/", function("abs", field("a")), intLiteral(2))))), emptyList(), emptyList(), - defaultStatsArgs() - )); + defaultStatsArgs())); } @Test public void testStatsCommandWithSpan() { - assertEqual("source=t | stats avg(price) by span(timestamp, 1h)", + assertEqual( + "source=t | stats avg(price) by span(timestamp, 1h)", agg( relation("t"), - exprList( - alias("avg(price)", aggregate("avg", field("price"))) - ), + exprList(alias("avg(price)", aggregate("avg", field("price")))), emptyList(), emptyList(), alias("span(timestamp,1h)", span(field("timestamp"), intLiteral(1), SpanUnit.H)), - defaultStatsArgs() - )); + defaultStatsArgs())); - assertEqual("source=t | stats count(a) by span(age, 10)", + assertEqual( + "source=t | stats count(a) by span(age, 10)", agg( relation("t"), - exprList( - alias("count(a)", aggregate("count", field("a"))) - ), + exprList(alias("count(a)", aggregate("count", field("a")))), emptyList(), emptyList(), alias("span(age,10)", span(field("age"), intLiteral(10), SpanUnit.NONE)), - defaultStatsArgs() - )); + defaultStatsArgs())); - assertEqual("source=t | stats avg(price) by span(timestamp, 1h), b", + assertEqual( + "source=t | stats avg(price) by span(timestamp, 1h), b", agg( relation("t"), - exprList( - alias("avg(price)", aggregate("avg", field("price"))) - ), + exprList(alias("avg(price)", aggregate("avg", field("price")))), emptyList(), exprList(alias("b", field("b"))), alias("span(timestamp,1h)", span(field("timestamp"), intLiteral(1), SpanUnit.H)), - defaultStatsArgs() - )); + defaultStatsArgs())); - assertEqual("source=t | stats avg(price) by span(timestamp, 1h), f1, f2", + assertEqual( + "source=t | stats avg(price) by span(timestamp, 1h), f1, f2", agg( relation("t"), - exprList( - alias("avg(price)", aggregate("avg", field("price"))) - ), + exprList(alias("avg(price)", aggregate("avg", field("price")))), emptyList(), exprList(alias("f1", field("f1")), alias("f2", field("f2"))), alias("span(timestamp,1h)", span(field("timestamp"), intLiteral(1), SpanUnit.H)), - defaultStatsArgs() - )); + defaultStatsArgs())); } @Test(expected = org.opensearch.sql.common.antlr.SyntaxCheckException.class) @@ -423,152 +333,128 @@ public void throwExceptionWithEmptyGroupByList() { @Test public void testStatsSpanWithAlias() { - assertEqual("source=t | stats avg(price) by span(timestamp, 1h) as time_span", + assertEqual( + "source=t | stats avg(price) by span(timestamp, 1h) as time_span", agg( relation("t"), - exprList( - alias("avg(price)", aggregate("avg", field("price"))) - ), + exprList(alias("avg(price)", aggregate("avg", field("price")))), emptyList(), emptyList(), - alias("span(timestamp,1h)", span( - field("timestamp"), intLiteral(1), SpanUnit.H), "time_span"), - defaultStatsArgs() - )); + alias( + "span(timestamp,1h)", + span(field("timestamp"), intLiteral(1), SpanUnit.H), + "time_span"), + defaultStatsArgs())); - assertEqual("source=t | stats count(a) by span(age, 10) as numeric_span", + assertEqual( + "source=t | stats count(a) by span(age, 10) as numeric_span", agg( relation("t"), - exprList( - alias("count(a)", aggregate("count", field("a"))) - ), + exprList(alias("count(a)", aggregate("count", field("a")))), emptyList(), emptyList(), - alias("span(age,10)", span( - field("age"), intLiteral(10), SpanUnit.NONE), "numeric_span"), - defaultStatsArgs() - )); + alias( + "span(age,10)", span(field("age"), intLiteral(10), SpanUnit.NONE), "numeric_span"), + defaultStatsArgs())); } @Test public void testDedupCommand() { - assertEqual("source=t | dedup f1, f2", - dedupe( - relation("t"), - defaultDedupArgs(), - field("f1"), field("f2") - )); + assertEqual( + "source=t | dedup f1, f2", + dedupe(relation("t"), defaultDedupArgs(), field("f1"), field("f2"))); } - /** - * disable sortby from the dedup command syntax. - */ + /** disable sortby from the dedup command syntax. */ @Ignore(value = "disable sortby from the dedup command syntax") public void testDedupCommandWithSortby() { - assertEqual("source=t | dedup f1, f2 sortby f3", + assertEqual( + "source=t | dedup f1, f2 sortby f3", agg( relation("t"), exprList(field("f1"), field("f2")), exprList(field("f3", defaultSortFieldArgs())), null, - defaultDedupArgs() - )); + defaultDedupArgs())); } @Test public void testHeadCommand() { - assertEqual("source=t | head", - head(relation("t"), 10, 0)); + assertEqual("source=t | head", head(relation("t"), 10, 0)); } @Test public void testHeadCommandWithNumber() { - assertEqual("source=t | head 3", - head(relation("t"), 3, 0)); + assertEqual("source=t | head 3", head(relation("t"), 3, 0)); } @Test public void testHeadCommandWithNumberAndOffset() { - assertEqual("source=t | head 3 from 4", - head(relation("t"), 3, 4)); + assertEqual("source=t | head 3 from 4", head(relation("t"), 3, 4)); } @Test public void testSortCommand() { - assertEqual("source=t | sort f1, f2", + assertEqual( + "source=t | sort f1, f2", sort( relation("t"), field("f1", defaultSortFieldArgs()), - field("f2", defaultSortFieldArgs()) - )); + field("f2", defaultSortFieldArgs()))); } @Test public void testSortCommandWithOptions() { - assertEqual("source=t | sort - f1, + f2", + assertEqual( + "source=t | sort - f1, + f2", sort( relation("t"), - field("f1", exprList(argument("asc", booleanLiteral(false)), - argument("type", nullLiteral()))), - field("f2", defaultSortFieldArgs()) - )); + field( + "f1", + exprList(argument("asc", booleanLiteral(false)), argument("type", nullLiteral()))), + field("f2", defaultSortFieldArgs()))); } @Test public void testEvalCommand() { - assertEqual("source=t | eval r=abs(f)", - eval( - relation("t"), - let( - field("r"), - function("abs", field("f")) - ) - )); + assertEqual( + "source=t | eval r=abs(f)", + eval(relation("t"), let(field("r"), function("abs", field("f"))))); } @Test public void testIndexName() { - assertEqual("source=`log.2020.04.20.` a=1", - filter( - relation("log.2020.04.20."), - compare("=", field("a"), intLiteral(1)) - )); - assertEqual("describe `log.2020.04.20.`", - relation(mappingTable("log.2020.04.20."))); + assertEqual( + "source=`log.2020.04.20.` a=1", + filter(relation("log.2020.04.20."), compare("=", field("a"), intLiteral(1)))); + assertEqual("describe `log.2020.04.20.`", relation(mappingTable("log.2020.04.20."))); } @Test public void testIdentifierAsIndexNameStartWithDot() { - assertEqual("source=.opensearch_dashboards", - relation(".opensearch_dashboards")); - assertEqual("describe .opensearch_dashboards", - relation(mappingTable(".opensearch_dashboards"))); + assertEqual("source=.opensearch_dashboards", relation(".opensearch_dashboards")); + assertEqual( + "describe .opensearch_dashboards", relation(mappingTable(".opensearch_dashboards"))); } @Test public void testIdentifierAsIndexNameWithDotInTheMiddle() { assertEqual("source=log.2020.10.10", relation("log.2020.10.10")); assertEqual("source=log-7.10-2020.10.10", relation("log-7.10-2020.10.10")); - assertEqual("describe log.2020.10.10", - relation(mappingTable("log.2020.10.10"))); - assertEqual("describe log-7.10-2020.10.10", - relation(mappingTable("log-7.10-2020.10.10"))); + assertEqual("describe log.2020.10.10", relation(mappingTable("log.2020.10.10"))); + assertEqual("describe log-7.10-2020.10.10", relation(mappingTable("log-7.10-2020.10.10"))); } @Test public void testIdentifierAsIndexNameWithSlashInTheMiddle() { - assertEqual("source=log-2020", - relation("log-2020")); - assertEqual("describe log-2020", - relation(mappingTable("log-2020"))); + assertEqual("source=log-2020", relation("log-2020")); + assertEqual("describe log-2020", relation(mappingTable("log-2020"))); } @Test public void testIdentifierAsIndexNameContainStar() { - assertEqual("source=log-2020-10-*", - relation("log-2020-10-*")); - assertEqual("describe log-2020-10-*", - relation(mappingTable("log-2020-10-*"))); + assertEqual("source=log-2020-10-*", relation("log-2020-10-*")); + assertEqual("describe log-2020-10-*", relation(mappingTable("log-2020-10-*"))); } @Test @@ -576,138 +462,132 @@ public void testIdentifierAsIndexNameContainStarAndDots() { assertEqual("source=log-2020.10.*", relation("log-2020.10.*")); assertEqual("source=log-2020.*.01", relation("log-2020.*.01")); assertEqual("source=log-2020.*.*", relation("log-2020.*.*")); - assertEqual("describe log-2020.10.*", - relation(mappingTable("log-2020.10.*"))); - assertEqual("describe log-2020.*.01", - relation(mappingTable("log-2020.*.01"))); - assertEqual("describe log-2020.*.*", - relation(mappingTable("log-2020.*.*"))); + assertEqual("describe log-2020.10.*", relation(mappingTable("log-2020.10.*"))); + assertEqual("describe log-2020.*.01", relation(mappingTable("log-2020.*.01"))); + assertEqual("describe log-2020.*.*", relation(mappingTable("log-2020.*.*"))); } @Test public void testIdentifierAsFieldNameStartWithAt() { - assertEqual("source=log-2020 | fields @timestamp", - projectWithArg( - relation("log-2020"), - defaultFieldsArgs(), - field("@timestamp") - )); + assertEqual( + "source=log-2020 | fields @timestamp", + projectWithArg(relation("log-2020"), defaultFieldsArgs(), field("@timestamp"))); } @Test public void testRareCommand() { - assertEqual("source=t | rare a", + assertEqual( + "source=t | rare a", rareTopN( relation("t"), CommandType.RARE, exprList(argument("noOfResults", intLiteral(10))), emptyList(), - field("a") - )); + field("a"))); } @Test public void testRareCommandWithGroupBy() { - assertEqual("source=t | rare a by b", + assertEqual( + "source=t | rare a by b", rareTopN( relation("t"), CommandType.RARE, exprList(argument("noOfResults", intLiteral(10))), exprList(field("b")), - field("a") - )); + field("a"))); } @Test public void testRareCommandWithMultipleFields() { - assertEqual("source=t | rare `a`, `b` by `c`", + assertEqual( + "source=t | rare `a`, `b` by `c`", rareTopN( relation("t"), CommandType.RARE, exprList(argument("noOfResults", intLiteral(10))), exprList(field("c")), field("a"), - field("b") - )); + field("b"))); } @Test public void testTopCommandWithN() { - assertEqual("source=t | top 1 a", + assertEqual( + "source=t | top 1 a", rareTopN( relation("t"), CommandType.TOP, exprList(argument("noOfResults", intLiteral(1))), emptyList(), - field("a") - )); + field("a"))); } @Test public void testTopCommandWithoutNAndGroupBy() { - assertEqual("source=t | top a", + assertEqual( + "source=t | top a", rareTopN( relation("t"), CommandType.TOP, exprList(argument("noOfResults", intLiteral(10))), emptyList(), - field("a") - )); + field("a"))); } @Test public void testTopCommandWithNAndGroupBy() { - assertEqual("source=t | top 1 a by b", + assertEqual( + "source=t | top 1 a by b", rareTopN( relation("t"), CommandType.TOP, exprList(argument("noOfResults", intLiteral(1))), exprList(field("b")), - field("a") - )); + field("a"))); } @Test public void testTopCommandWithMultipleFields() { - assertEqual("source=t | top 1 `a`, `b` by `c`", + assertEqual( + "source=t | top 1 `a`, `b` by `c`", rareTopN( relation("t"), CommandType.TOP, exprList(argument("noOfResults", intLiteral(1))), exprList(field("c")), field("a"), - field("b") - )); + field("b"))); } @Test public void testGrokCommand() { - assertEqual("source=t | grok raw \"pattern\"", + assertEqual( + "source=t | grok raw \"pattern\"", parse( relation("t"), ParseMethod.GROK, field("raw"), stringLiteral("pattern"), - ImmutableMap.of() - )); + ImmutableMap.of())); } @Test public void testParseCommand() { - assertEqual("source=t | parse raw \"pattern\"", + assertEqual( + "source=t | parse raw \"pattern\"", parse( relation("t"), ParseMethod.REGEX, field("raw"), stringLiteral("pattern"), - ImmutableMap.of() - )); + ImmutableMap.of())); } @Test public void testPatternsCommand() { - assertEqual("source=t | patterns new_field=\"custom_field\" " - + "pattern=\"custom_pattern\" raw", + assertEqual( + "source=t | patterns new_field=\"custom_field\" " + "pattern=\"custom_pattern\" raw", parse( relation("t"), ParseMethod.PATTERNS, @@ -716,8 +596,7 @@ public void testPatternsCommand() { ImmutableMap.builder() .put("new_field", stringLiteral("custom_field")) .put("pattern", stringLiteral("custom_pattern")) - .build() - )); + .build())); } @Test @@ -734,114 +613,118 @@ public void testPatternsCommandWithoutArguments() { @Test public void testKmeansCommand() { - assertEqual("source=t | kmeans centroids=3 iterations=2 distance_type='l1'", - new Kmeans(relation("t"), ImmutableMap.builder() - .put("centroids", new Literal(3, DataType.INTEGER)) - .put("iterations", new Literal(2, DataType.INTEGER)) - .put("distance_type", new Literal("l1", DataType.STRING)) - .build() - )); + assertEqual( + "source=t | kmeans centroids=3 iterations=2 distance_type='l1'", + new Kmeans( + relation("t"), + ImmutableMap.builder() + .put("centroids", new Literal(3, DataType.INTEGER)) + .put("iterations", new Literal(2, DataType.INTEGER)) + .put("distance_type", new Literal("l1", DataType.STRING)) + .build())); } @Test public void testKmeansCommandWithoutParameter() { - assertEqual("source=t | kmeans", - new Kmeans(relation("t"), ImmutableMap.of())); + assertEqual("source=t | kmeans", new Kmeans(relation("t"), ImmutableMap.of())); } @Test public void testMLCommand() { - assertEqual("source=t | ml action='trainandpredict' " - + "algorithm='kmeans' centroid=3 iteration=2 dist_type='l1'", - new ML(relation("t"), ImmutableMap.builder() - .put("action", new Literal("trainandpredict", DataType.STRING)) - .put("algorithm", new Literal("kmeans", DataType.STRING)) - .put("centroid", new Literal(3, DataType.INTEGER)) - .put("iteration", new Literal(2, DataType.INTEGER)) - .put("dist_type", new Literal("l1", DataType.STRING)) - .build() - )); + assertEqual( + "source=t | ml action='trainandpredict' " + + "algorithm='kmeans' centroid=3 iteration=2 dist_type='l1'", + new ML( + relation("t"), + ImmutableMap.builder() + .put("action", new Literal("trainandpredict", DataType.STRING)) + .put("algorithm", new Literal("kmeans", DataType.STRING)) + .put("centroid", new Literal(3, DataType.INTEGER)) + .put("iteration", new Literal(2, DataType.INTEGER)) + .put("dist_type", new Literal("l1", DataType.STRING)) + .build())); } @Test public void testDescribeCommand() { - assertEqual("describe t", - relation(mappingTable("t"))); + assertEqual("describe t", relation(mappingTable("t"))); } @Test public void testDescribeMatchAllCrossClusterSearchCommand() { - assertEqual("describe *:t", - relation(mappingTable("*:t"))); + assertEqual("describe *:t", relation(mappingTable("*:t"))); } @Test public void testDescribeCommandWithMultipleIndices() { - assertEqual("describe t,u", - relation(mappingTable("t,u"))); + assertEqual("describe t,u", relation(mappingTable("t,u"))); } @Test public void testDescribeCommandWithFullyQualifiedTableName() { - assertEqual("describe prometheus.http_metric", + assertEqual( + "describe prometheus.http_metric", relation(qualifiedName("prometheus", mappingTable("http_metric")))); - assertEqual("describe prometheus.schema.http_metric", + assertEqual( + "describe prometheus.schema.http_metric", relation(qualifiedName("prometheus", "schema", mappingTable("http_metric")))); } @Test public void test_fitRCFADCommand_withoutDataFormat() { - assertEqual("source=t | AD shingle_size=10 time_decay=0.0001 time_field='timestamp' " + assertEqual( + "source=t | AD shingle_size=10 time_decay=0.0001 time_field='timestamp' " + "anomaly_rate=0.1 anomaly_score_threshold=0.1 sample_size=256 " + "number_of_trees=256 time_zone='PST' output_after=256 " + "training_data_size=256", - new AD(relation("t"), ImmutableMap.builder() - .put("anomaly_rate", new Literal(0.1, DataType.DOUBLE)) - .put("anomaly_score_threshold", new Literal(0.1, DataType.DOUBLE)) - .put("sample_size", new Literal(256, DataType.INTEGER)) - .put("number_of_trees", new Literal(256, DataType.INTEGER)) - .put("time_zone", new Literal("PST", DataType.STRING)) - .put("output_after", new Literal(256, DataType.INTEGER)) - .put("shingle_size", new Literal(10, DataType.INTEGER)) - .put("time_decay", new Literal(0.0001, DataType.DOUBLE)) - .put("time_field", new Literal("timestamp", DataType.STRING)) - .put("training_data_size", new Literal(256, DataType.INTEGER)) - .build() - )); + new AD( + relation("t"), + ImmutableMap.builder() + .put("anomaly_rate", new Literal(0.1, DataType.DOUBLE)) + .put("anomaly_score_threshold", new Literal(0.1, DataType.DOUBLE)) + .put("sample_size", new Literal(256, DataType.INTEGER)) + .put("number_of_trees", new Literal(256, DataType.INTEGER)) + .put("time_zone", new Literal("PST", DataType.STRING)) + .put("output_after", new Literal(256, DataType.INTEGER)) + .put("shingle_size", new Literal(10, DataType.INTEGER)) + .put("time_decay", new Literal(0.0001, DataType.DOUBLE)) + .put("time_field", new Literal("timestamp", DataType.STRING)) + .put("training_data_size", new Literal(256, DataType.INTEGER)) + .build())); } @Test public void test_fitRCFADCommand_withDataFormat() { - assertEqual("source=t | AD shingle_size=10 time_decay=0.0001 time_field='timestamp' " + assertEqual( + "source=t | AD shingle_size=10 time_decay=0.0001 time_field='timestamp' " + "anomaly_rate=0.1 anomaly_score_threshold=0.1 sample_size=256 " + "number_of_trees=256 time_zone='PST' output_after=256 " + "training_data_size=256 date_format='HH:mm:ss yyyy-MM-dd'", - new AD(relation("t"), ImmutableMap.builder() - .put("anomaly_rate", new Literal(0.1, DataType.DOUBLE)) - .put("anomaly_score_threshold", new Literal(0.1, DataType.DOUBLE)) - .put("sample_size", new Literal(256, DataType.INTEGER)) - .put("number_of_trees", new Literal(256, DataType.INTEGER)) - .put("date_format", new Literal("HH:mm:ss yyyy-MM-dd", DataType.STRING)) - .put("time_zone", new Literal("PST", DataType.STRING)) - .put("output_after", new Literal(256, DataType.INTEGER)) - .put("shingle_size", new Literal(10, DataType.INTEGER)) - .put("time_decay", new Literal(0.0001, DataType.DOUBLE)) - .put("time_field", new Literal("timestamp", DataType.STRING)) - .put("training_data_size", new Literal(256, DataType.INTEGER)) - .build() - )); + new AD( + relation("t"), + ImmutableMap.builder() + .put("anomaly_rate", new Literal(0.1, DataType.DOUBLE)) + .put("anomaly_score_threshold", new Literal(0.1, DataType.DOUBLE)) + .put("sample_size", new Literal(256, DataType.INTEGER)) + .put("number_of_trees", new Literal(256, DataType.INTEGER)) + .put("date_format", new Literal("HH:mm:ss yyyy-MM-dd", DataType.STRING)) + .put("time_zone", new Literal("PST", DataType.STRING)) + .put("output_after", new Literal(256, DataType.INTEGER)) + .put("shingle_size", new Literal(10, DataType.INTEGER)) + .put("time_decay", new Literal(0.0001, DataType.DOUBLE)) + .put("time_field", new Literal("timestamp", DataType.STRING)) + .put("training_data_size", new Literal(256, DataType.INTEGER)) + .build())); } @Test public void test_batchRCFADCommand() { - assertEqual("source=t | AD", - new AD(relation("t"), ImmutableMap.of())); + assertEqual("source=t | AD", new AD(relation("t"), ImmutableMap.of())); } @Test public void testShowDataSourcesCommand() { - assertEqual("show datasources", - relation(DATASOURCES_TABLE_NAME)); + assertEqual("show datasources", relation(DATASOURCES_TABLE_NAME)); } protected void assertEqual(String query, Node expectedPlan) { diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstExpressionBuilderTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstExpressionBuilderTest.java index a6e130eed3..c549a20f3e 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstExpressionBuilderTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstExpressionBuilderTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.parser; import static java.util.Collections.emptyList; @@ -16,7 +15,6 @@ import static org.opensearch.sql.ast.dsl.AstDSL.booleanLiteral; import static org.opensearch.sql.ast.dsl.AstDSL.cast; import static org.opensearch.sql.ast.dsl.AstDSL.compare; -import static org.opensearch.sql.ast.dsl.AstDSL.dateLiteral; import static org.opensearch.sql.ast.dsl.AstDSL.defaultFieldsArgs; import static org.opensearch.sql.ast.dsl.AstDSL.defaultSortFieldArgs; import static org.opensearch.sql.ast.dsl.AstDSL.defaultStatsArgs; @@ -41,7 +39,6 @@ import static org.opensearch.sql.ast.dsl.AstDSL.relation; import static org.opensearch.sql.ast.dsl.AstDSL.sort; import static org.opensearch.sql.ast.dsl.AstDSL.stringLiteral; -import static org.opensearch.sql.ast.dsl.AstDSL.timestampLiteral; import static org.opensearch.sql.ast.dsl.AstDSL.unresolvedArg; import static org.opensearch.sql.ast.dsl.AstDSL.xor; @@ -60,464 +57,313 @@ public class AstExpressionBuilderTest extends AstBuilderTest { @Test public void testLogicalNotExpr() { - assertEqual("source=t not a=1", - filter( - relation("t"), - not( - compare("=", field("a"), intLiteral(1)) - ) - )); + assertEqual( + "source=t not a=1", filter(relation("t"), not(compare("=", field("a"), intLiteral(1))))); } @Test public void testLogicalOrExpr() { - assertEqual("source=t a=1 or b=2", + assertEqual( + "source=t a=1 or b=2", filter( relation("t"), - or( - compare("=", field("a"), intLiteral(1)), - compare("=", field("b"), intLiteral(2)) - ) - )); + or(compare("=", field("a"), intLiteral(1)), compare("=", field("b"), intLiteral(2))))); } @Test public void testLogicalAndExpr() { - assertEqual("source=t a=1 and b=2", + assertEqual( + "source=t a=1 and b=2", filter( relation("t"), - and( - compare("=", field("a"), intLiteral(1)), - compare("=", field("b"), intLiteral(2)) - ) - )); + and(compare("=", field("a"), intLiteral(1)), compare("=", field("b"), intLiteral(2))))); } @Test public void testLogicalAndExprWithoutKeywordAnd() { - assertEqual("source=t a=1 b=2", + assertEqual( + "source=t a=1 b=2", filter( relation("t"), - and( - compare("=", field("a"), intLiteral(1)), - compare("=", field("b"), intLiteral(2)) - ) - )); + and(compare("=", field("a"), intLiteral(1)), compare("=", field("b"), intLiteral(2))))); } @Test public void testLogicalXorExpr() { - assertEqual("source=t a=1 xor b=2", + assertEqual( + "source=t a=1 xor b=2", filter( relation("t"), - xor( - compare("=", field("a"), intLiteral(1)), - compare("=", field("b"), intLiteral(2)) - ) - )); + xor(compare("=", field("a"), intLiteral(1)), compare("=", field("b"), intLiteral(2))))); } @Test public void testLogicalLikeExpr() { - assertEqual("source=t like(a, '_a%b%c_d_')", - filter( - relation("t"), - function("like", field("a"), stringLiteral("_a%b%c_d_")) - )); + assertEqual( + "source=t like(a, '_a%b%c_d_')", + filter(relation("t"), function("like", field("a"), stringLiteral("_a%b%c_d_")))); } @Test public void testBooleanIsNullFunction() { - assertEqual("source=t isnull(a)", - filter( - relation("t"), - function("is null", field("a")) - )); + assertEqual("source=t isnull(a)", filter(relation("t"), function("is null", field("a")))); } @Test public void testBooleanIsNotNullFunction() { - assertEqual("source=t isnotnull(a)", - filter( - relation("t"), - function("is not null", field("a")) - )); + assertEqual( + "source=t isnotnull(a)", filter(relation("t"), function("is not null", field("a")))); } - /** - * Todo. search operator should not include functionCall, need to change antlr. - */ + /** Todo. search operator should not include functionCall, need to change antlr. */ @Ignore("search operator should not include functionCall, need to change antlr") public void testEvalExpr() { - assertEqual("source=t f=abs(a)", - filter( - relation("t"), - equalTo( - field("f"), - function("abs", field("a")) - ) - )); + assertEqual( + "source=t f=abs(a)", + filter(relation("t"), equalTo(field("f"), function("abs", field("a"))))); } @Test public void testEvalFunctionExpr() { - assertEqual("source=t | eval f=abs(a)", - eval( - relation("t"), - let( - field("f"), - function("abs", field("a")) - ) - )); + assertEqual( + "source=t | eval f=abs(a)", + eval(relation("t"), let(field("f"), function("abs", field("a"))))); } @Test public void testEvalFunctionExprNoArgs() { - assertEqual("source=t | eval f=PI()", - eval( - relation("t"), - let( - field("f"), - function("PI") - ) - )); + assertEqual("source=t | eval f=PI()", eval(relation("t"), let(field("f"), function("PI")))); } @Test public void testPositionFunctionExpr() { - assertEqual("source=t | eval f=position('substr' IN 'str')", + assertEqual( + "source=t | eval f=position('substr' IN 'str')", eval( relation("t"), - let( - field("f"), - function("position", - stringLiteral("substr"), stringLiteral("str")) - ) - )); + let(field("f"), function("position", stringLiteral("substr"), stringLiteral("str"))))); } @Test public void testEvalBinaryOperationExpr() { - assertEqual("source=t | eval f=a+b", - eval( - relation("t"), - let( - field("f"), - function("+", field("a"), field("b")) - ) - )); - assertEqual("source=t | eval f=(a+b)", - eval( - relation("t"), - let( - field("f"), - function("+", field("a"), field("b")) - ) - )); + assertEqual( + "source=t | eval f=a+b", + eval(relation("t"), let(field("f"), function("+", field("a"), field("b"))))); + assertEqual( + "source=t | eval f=(a+b)", + eval(relation("t"), let(field("f"), function("+", field("a"), field("b"))))); } @Test public void testLiteralValueBinaryOperationExpr() { - assertEqual("source=t | eval f=3+2", - eval( - relation("t"), - let( - field("f"), - function("+", intLiteral(3), intLiteral(2)) - ) - )); + assertEqual( + "source=t | eval f=3+2", + eval(relation("t"), let(field("f"), function("+", intLiteral(3), intLiteral(2))))); } @Test public void testBinaryOperationExprWithParentheses() { - assertEqual("source = t | where a = (1 + 2) * 3", + assertEqual( + "source = t | where a = (1 + 2) * 3", filter( relation("t"), - compare("=", + compare( + "=", field("a"), - function("*", - function("+", intLiteral(1), intLiteral(2)), - intLiteral(3))))); + function("*", function("+", intLiteral(1), intLiteral(2)), intLiteral(3))))); } @Test public void testBinaryOperationExprPrecedence() { - assertEqual("source = t | where a = 1 + 2 * 3", + assertEqual( + "source = t | where a = 1 + 2 * 3", filter( relation("t"), - compare("=", + compare( + "=", field("a"), - function("+", - intLiteral(1), - function("*", intLiteral(2), intLiteral(3)))))); + function("+", intLiteral(1), function("*", intLiteral(2), intLiteral(3)))))); } @Test public void testCompareExpr() { - assertEqual("source=t a='b'", - filter( - relation("t"), - compare("=", field("a"), stringLiteral("b")) - )); + assertEqual( + "source=t a='b'", filter(relation("t"), compare("=", field("a"), stringLiteral("b")))); } @Test public void testCompareFieldsExpr() { - assertEqual("source=t a>b", - filter( - relation("t"), - compare(">", field("a"), field("b")) - )); + assertEqual("source=t a>b", filter(relation("t"), compare(">", field("a"), field("b")))); } @Test public void testInExpr() { - assertEqual("source=t f in (1, 2, 3)", - filter( - relation("t"), - in( - field("f"), - intLiteral(1), intLiteral(2), intLiteral(3)) - )); + assertEqual( + "source=t f in (1, 2, 3)", + filter(relation("t"), in(field("f"), intLiteral(1), intLiteral(2), intLiteral(3)))); } @Test public void testFieldExpr() { - assertEqual("source=t | sort + f", - sort( - relation("t"), - field("f", defaultSortFieldArgs()) - )); + assertEqual("source=t | sort + f", sort(relation("t"), field("f", defaultSortFieldArgs()))); } @Test public void testSortFieldWithMinusKeyword() { - assertEqual("source=t | sort - f", + assertEqual( + "source=t | sort - f", sort( relation("t"), - field( - "f", - argument("asc", booleanLiteral(false)), - argument("type", nullLiteral()) - ) - )); + field("f", argument("asc", booleanLiteral(false)), argument("type", nullLiteral())))); } @Test public void testSortFieldWithBackticks() { - assertEqual("source=t | sort `f`", - sort( - relation("t"), - field("f", defaultSortFieldArgs()) - )); + assertEqual("source=t | sort `f`", sort(relation("t"), field("f", defaultSortFieldArgs()))); } @Test public void testSortFieldWithAutoKeyword() { - assertEqual("source=t | sort auto(f)", + assertEqual( + "source=t | sort auto(f)", sort( relation("t"), field( "f", argument("asc", booleanLiteral(true)), - argument("type", stringLiteral("auto")) - ) - )); + argument("type", stringLiteral("auto"))))); } @Test public void testSortFieldWithIpKeyword() { - assertEqual("source=t | sort ip(f)", + assertEqual( + "source=t | sort ip(f)", sort( relation("t"), field( "f", argument("asc", booleanLiteral(true)), - argument("type", stringLiteral("ip")) - ) - )); + argument("type", stringLiteral("ip"))))); } @Test public void testSortFieldWithNumKeyword() { - assertEqual("source=t | sort num(f)", + assertEqual( + "source=t | sort num(f)", sort( relation("t"), field( "f", argument("asc", booleanLiteral(true)), - argument("type", stringLiteral("num")) - ) - )); + argument("type", stringLiteral("num"))))); } @Test public void testSortFieldWithStrKeyword() { - assertEqual("source=t | sort str(f)", + assertEqual( + "source=t | sort str(f)", sort( relation("t"), field( "f", argument("asc", booleanLiteral(true)), - argument("type", stringLiteral("str")) - ) - )); + argument("type", stringLiteral("str"))))); } @Test public void testAggFuncCallExpr() { - assertEqual("source=t | stats avg(a) by b", + assertEqual( + "source=t | stats avg(a) by b", agg( relation("t"), - exprList( - alias( - "avg(a)", - aggregate("avg", field("a")) - ) - ), + exprList(alias("avg(a)", aggregate("avg", field("a")))), emptyList(), - exprList( - alias( - "b", - field("b") - )), - defaultStatsArgs() - )); + exprList(alias("b", field("b"))), + defaultStatsArgs())); } @Test public void testVarAggregationShouldPass() { - assertEqual("source=t | stats var_samp(a) by b", + assertEqual( + "source=t | stats var_samp(a) by b", agg( relation("t"), - exprList( - alias( - "var_samp(a)", - aggregate("var_samp", field("a")) - ) - ), + exprList(alias("var_samp(a)", aggregate("var_samp", field("a")))), emptyList(), - exprList( - alias( - "b", - field("b") - )), - defaultStatsArgs() - )); + exprList(alias("b", field("b"))), + defaultStatsArgs())); } @Test public void testVarpAggregationShouldPass() { - assertEqual("source=t | stats var_pop(a) by b", + assertEqual( + "source=t | stats var_pop(a) by b", agg( relation("t"), - exprList( - alias( - "var_pop(a)", - aggregate("var_pop", field("a")) - ) - ), + exprList(alias("var_pop(a)", aggregate("var_pop", field("a")))), emptyList(), - exprList( - alias( - "b", - field("b") - )), - defaultStatsArgs() - )); + exprList(alias("b", field("b"))), + defaultStatsArgs())); } @Test public void testStdDevAggregationShouldPass() { - assertEqual("source=t | stats stddev_samp(a) by b", + assertEqual( + "source=t | stats stddev_samp(a) by b", agg( relation("t"), - exprList( - alias( - "stddev_samp(a)", - aggregate("stddev_samp", field("a")) - ) - ), + exprList(alias("stddev_samp(a)", aggregate("stddev_samp", field("a")))), emptyList(), - exprList( - alias( - "b", - field("b") - )), - defaultStatsArgs() - )); + exprList(alias("b", field("b"))), + defaultStatsArgs())); } @Test public void testStdDevPAggregationShouldPass() { - assertEqual("source=t | stats stddev_pop(a) by b", + assertEqual( + "source=t | stats stddev_pop(a) by b", agg( relation("t"), - exprList( - alias( - "stddev_pop(a)", - aggregate("stddev_pop", field("a")) - ) - ), + exprList(alias("stddev_pop(a)", aggregate("stddev_pop", field("a")))), emptyList(), - exprList( - alias( - "b", - field("b") - )), - defaultStatsArgs() - )); + exprList(alias("b", field("b"))), + defaultStatsArgs())); } @Test public void testPercentileAggFuncExpr() { - assertEqual("source=t | stats percentile<1>(a)", + assertEqual( + "source=t | stats percentile<1>(a)", agg( relation("t"), exprList( - alias("percentile<1>(a)", - aggregate( - "percentile", - field("a"), - argument("rank", intLiteral(1)) - ) - ) - ), + alias( + "percentile<1>(a)", + aggregate("percentile", field("a"), argument("rank", intLiteral(1))))), emptyList(), emptyList(), - defaultStatsArgs() - )); + defaultStatsArgs())); } @Test public void testCountFuncCallExpr() { - assertEqual("source=t | stats count() by b", + assertEqual( + "source=t | stats count() by b", agg( relation("t"), - exprList( - alias( - "count()", - aggregate("count", AllFields.of()) - ) - ), + exprList(alias("count()", aggregate("count", AllFields.of()))), emptyList(), - exprList( - alias( - "b", - field("b") - )), - defaultStatsArgs() - )); + exprList(alias("b", field("b"))), + defaultStatsArgs())); } @Test public void testDistinctCount() { - assertEqual("source=t | stats distinct_count(a)", + assertEqual( + "source=t | stats distinct_count(a)", agg( relation("t"), - exprList( - alias("distinct_count(a)", - distinctAggregate("count", field("a")))), + exprList(alias("distinct_count(a)", distinctAggregate("count", field("a")))), emptyList(), emptyList(), defaultStatsArgs())); @@ -525,168 +371,114 @@ public void testDistinctCount() { @Test public void testTakeAggregationNoArgsShouldPass() { - assertEqual("source=t | stats take(a)", + assertEqual( + "source=t | stats take(a)", agg( relation("t"), - exprList(alias("take(a)", - aggregate("take", field("a"), unresolvedArg("size", intLiteral(10))))), + exprList( + alias( + "take(a)", + aggregate("take", field("a"), unresolvedArg("size", intLiteral(10))))), emptyList(), emptyList(), - defaultStatsArgs() - )); + defaultStatsArgs())); } @Test public void testTakeAggregationWithArgsShouldPass() { - assertEqual("source=t | stats take(a, 5)", + assertEqual( + "source=t | stats take(a, 5)", agg( relation("t"), - exprList(alias("take(a, 5)", - aggregate("take", field("a"), unresolvedArg("size", intLiteral(5))))), + exprList( + alias( + "take(a, 5)", + aggregate("take", field("a"), unresolvedArg("size", intLiteral(5))))), emptyList(), emptyList(), - defaultStatsArgs() - )); + defaultStatsArgs())); } - @Test public void testEvalFuncCallExpr() { - assertEqual("source=t | eval f=abs(a)", - eval( - relation("t"), - let( - field("f"), - function("abs", field("a")) - ) - )); + assertEqual( + "source=t | eval f=abs(a)", + eval(relation("t"), let(field("f"), function("abs", field("a"))))); } @Test public void testDataTypeFuncCall() { - assertEqual("source=t | eval f=cast(1 as string)", - eval( - relation("t"), - let( - field("f"), - cast(intLiteral(1), stringLiteral("string")) - ) - )); + assertEqual( + "source=t | eval f=cast(1 as string)", + eval(relation("t"), let(field("f"), cast(intLiteral(1), stringLiteral("string"))))); } @Test public void testNestedFieldName() { - assertEqual("source=t | fields field0.field1.field2", + assertEqual( + "source=t | fields field0.field1.field2", projectWithArg( relation("t"), defaultFieldsArgs(), - field( - qualifiedName("field0", "field1", "field2") - ) - )); + field(qualifiedName("field0", "field1", "field2")))); } @Test public void testFieldNameWithSpecialChars() { - assertEqual("source=t | fields `field-0`", - projectWithArg( - relation("t"), - defaultFieldsArgs(), - field( - qualifiedName("field-0") - ) - )); + assertEqual( + "source=t | fields `field-0`", + projectWithArg(relation("t"), defaultFieldsArgs(), field(qualifiedName("field-0")))); } @Test public void testNestedFieldNameWithSpecialChars() { - assertEqual("source=t | fields `field-0`.`field#1`.`field*2`", + assertEqual( + "source=t | fields `field-0`.`field#1`.`field*2`", projectWithArg( relation("t"), defaultFieldsArgs(), - field( - qualifiedName("field-0", "field#1", "field*2") - ) - )); + field(qualifiedName("field-0", "field#1", "field*2")))); } @Test public void testStringLiteralExpr() { - assertEqual("source=t a=\"string\"", - filter( - relation("t"), - compare( - "=", - field("a"), - stringLiteral("string") - ) - )); + assertEqual( + "source=t a=\"string\"", + filter(relation("t"), compare("=", field("a"), stringLiteral("string")))); } @Test public void testIntegerLiteralExpr() { - assertEqual("source=t a=1 b=-1", + assertEqual( + "source=t a=1 b=-1", filter( relation("t"), and( - compare( - "=", - field("a"), - intLiteral(1) - ), - compare( - "=", - field("b"), - intLiteral(-1) - ) - ) - )); + compare("=", field("a"), intLiteral(1)), + compare("=", field("b"), intLiteral(-1))))); } @Test public void testLongLiteralExpr() { - assertEqual("source=t a=1234567890123 b=-1234567890123", + assertEqual( + "source=t a=1234567890123 b=-1234567890123", filter( relation("t"), and( - compare( - "=", - field("a"), - longLiteral(1234567890123L) - ), - compare( - "=", - field("b"), - longLiteral(-1234567890123L) - ) - ) - )); + compare("=", field("a"), longLiteral(1234567890123L)), + compare("=", field("b"), longLiteral(-1234567890123L))))); } @Test public void testDoubleLiteralExpr() { - assertEqual("source=t b=0.1", - filter( - relation("t"), - compare( - "=", - field("b"), - doubleLiteral(0.1) - ) - )); + assertEqual( + "source=t b=0.1", filter(relation("t"), compare("=", field("b"), doubleLiteral(0.1)))); } @Test public void testBooleanLiteralExpr() { - assertEqual("source=t a=true", - filter( - relation("t"), - compare( - "=", - field("a"), - booleanLiteral(true) - ) - )); + assertEqual( + "source=t a=true", filter(relation("t"), compare("=", field("a"), booleanLiteral(true)))); } @Test @@ -694,42 +486,23 @@ public void testIntervalLiteralExpr() { assertEqual( "source=t a = interval 1 day", filter( - relation("t"), - compare( - "=", - field("a"), - intervalLiteral(1, DataType.INTEGER, "day") - ) - )); + relation("t"), compare("=", field("a"), intervalLiteral(1, DataType.INTEGER, "day")))); } @Test public void testKeywordsAsIdentifiers() { - assertEqual( - "source=timestamp", - relation("timestamp") - ); + assertEqual("source=timestamp", relation("timestamp")); assertEqual( "source=t | fields timestamp", - projectWithArg( - relation("t"), - defaultFieldsArgs(), - field("timestamp") - ) - ); + projectWithArg(relation("t"), defaultFieldsArgs(), field("timestamp"))); } @Test public void canBuildKeywordsAsIdentInQualifiedName() { assertEqual( "source=test | fields timestamp", - projectWithArg( - relation("test"), - defaultFieldsArgs(), - field("timestamp") - ) - ); + projectWithArg(relation("test"), defaultFieldsArgs(), field("timestamp"))); } @Test @@ -742,10 +515,7 @@ public void canBuildMatchRelevanceFunctionWithArguments() { "match", unresolvedArg("field", qualifiedName("message")), unresolvedArg("query", stringLiteral("test query")), - unresolvedArg("analyzer", stringLiteral("keyword")) - ) - ) - ); + unresolvedArg("analyzer", stringLiteral("keyword"))))); } @Test @@ -757,13 +527,11 @@ public void canBuildMulti_matchRelevanceFunctionWithArguments() { relation("test"), function( "multi_match", - unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of( - "field1", 1.F, "field2", 3.2F))), + unresolvedArg( + "fields", + new RelevanceFieldList(ImmutableMap.of("field1", 1.F, "field2", 3.2F))), unresolvedArg("query", stringLiteral("test query")), - unresolvedArg("analyzer", stringLiteral("keyword")) - ) - ) - ); + unresolvedArg("analyzer", stringLiteral("keyword"))))); } @Test @@ -775,13 +543,11 @@ public void canBuildSimple_query_stringRelevanceFunctionWithArguments() { relation("test"), function( "simple_query_string", - unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of( - "field1", 1.F, "field2", 3.2F))), + unresolvedArg( + "fields", + new RelevanceFieldList(ImmutableMap.of("field1", 1.F, "field2", 3.2F))), unresolvedArg("query", stringLiteral("test query")), - unresolvedArg("analyzer", stringLiteral("keyword")) - ) - ) - ); + unresolvedArg("analyzer", stringLiteral("keyword"))))); } @Test @@ -793,13 +559,11 @@ public void canBuildQuery_stringRelevanceFunctionWithArguments() { relation("test"), function( "query_string", - unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of( - "field1", 1.F, "field2", 3.2F))), + unresolvedArg( + "fields", + new RelevanceFieldList(ImmutableMap.of("field1", 1.F, "field2", 3.2F))), unresolvedArg("query", stringLiteral("test query")), - unresolvedArg("analyzer", stringLiteral("keyword")) - ) - ) - ); + unresolvedArg("analyzer", stringLiteral("keyword"))))); } @Test @@ -818,11 +582,10 @@ public void functionNameCanBeUsedAsIdentifier() { + "| TIME_TO_SEC | TIMESTAMP | TO_DAYS | UNIX_TIMESTAMP | WEEK | YEAR"); assertFunctionNameCouldBeId( "SUBSTR | SUBSTRING | TRIM | LTRIM | RTRIM | LOWER | UPPER | CONCAT | CONCAT_WS | LENGTH " - + "| STRCMP | RIGHT | LEFT | ASCII | LOCATE | REPLACE" - ); + + "| STRCMP | RIGHT | LEFT | ASCII | LOCATE | REPLACE"); assertFunctionNameCouldBeId( "ABS | CEIL | CEILING | CONV | CRC32 | E | EXP | FLOOR | LN | LOG" - + " | LOG10 | LOG2 | MOD | PI |POW | POWER | RAND | ROUND | SIGN | SQRT | TRUNCATE " + + " | LOG10 | LOG2 | MOD | PI |POW | POWER | RAND | ROUND | SIGN | SQRT | TRUNCATE " + "| ACOS | ASIN | ATAN | ATAN2 | COS | COT | DEGREES | RADIANS | SIN | TAN"); assertFunctionNameCouldBeId( "SEARCH | DESCRIBE | SHOW | FROM | WHERE | FIELDS | RENAME | STATS " @@ -833,100 +596,79 @@ public void functionNameCanBeUsedAsIdentifier() { void assertFunctionNameCouldBeId(String antlrFunctionName) { List functionList = - Arrays.stream(antlrFunctionName.split("\\|")).map(String::stripLeading) - .map(String::stripTrailing).collect( - Collectors.toList()); + Arrays.stream(antlrFunctionName.split("\\|")) + .map(String::stripLeading) + .map(String::stripTrailing) + .collect(Collectors.toList()); assertFalse(functionList.isEmpty()); for (String functionName : functionList) { - assertEqual(String.format(Locale.ROOT, "source=t | fields %s", functionName), - projectWithArg( - relation("t"), - defaultFieldsArgs(), - field( - qualifiedName(functionName) - ) - )); + assertEqual( + String.format(Locale.ROOT, "source=t | fields %s", functionName), + projectWithArg(relation("t"), defaultFieldsArgs(), field(qualifiedName(functionName)))); } } // https://github.com/opensearch-project/sql/issues/1318 @Test public void indexCanBeId() { - assertEqual("source = index | stats count() by index", + assertEqual( + "source = index | stats count() by index", agg( relation("index"), - exprList( - alias( - "count()", - aggregate("count", AllFields.of()) - ) - ), + exprList(alias("count()", aggregate("count", AllFields.of()))), emptyList(), - exprList( - alias( - "index", - field("index") - )), - defaultStatsArgs() - )); + exprList(alias("index", field("index"))), + defaultStatsArgs())); } @Test public void testExtractFunctionExpr() { - assertEqual("source=t | eval f=extract(day from '2001-05-07 10:11:12')", + assertEqual( + "source=t | eval f=extract(day from '2001-05-07 10:11:12')", eval( relation("t"), let( field("f"), - function("extract", - stringLiteral("day"), stringLiteral("2001-05-07 10:11:12")) - ) - )); + function("extract", stringLiteral("day"), stringLiteral("2001-05-07 10:11:12"))))); } - @Test public void testGet_FormatFunctionExpr() { - assertEqual("source=t | eval f=get_format(DATE,'USA')", + assertEqual( + "source=t | eval f=get_format(DATE,'USA')", eval( relation("t"), - let( - field("f"), - function("get_format", - stringLiteral("DATE"), stringLiteral("USA")) - ) - )); + let(field("f"), function("get_format", stringLiteral("DATE"), stringLiteral("USA"))))); } @Test public void testTimeStampAddFunctionExpr() { - assertEqual("source=t | eval f=timestampadd(YEAR, 15, '2001-03-06 00:00:00')", + assertEqual( + "source=t | eval f=timestampadd(YEAR, 15, '2001-03-06 00:00:00')", eval( relation("t"), let( field("f"), - function("timestampadd", + function( + "timestampadd", stringLiteral("YEAR"), intLiteral(15), - stringLiteral("2001-03-06 00:00:00")) - ) - )); + stringLiteral("2001-03-06 00:00:00"))))); } @Test public void testTimeStampDiffFunctionExpr() { - assertEqual("source=t | eval f=timestampdiff(" - + "YEAR, '1997-01-01 00:00:00', '2001-03-06 00:00:00')", + assertEqual( + "source=t | eval f=timestampdiff(" + "YEAR, '1997-01-01 00:00:00', '2001-03-06 00:00:00')", eval( relation("t"), let( field("f"), - function("timestampdiff", + function( + "timestampdiff", stringLiteral("YEAR"), stringLiteral("1997-01-01 00:00:00"), - stringLiteral("2001-03-06 00:00:00")) - ) - )); + stringLiteral("2001-03-06 00:00:00"))))); } } diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstNowLikeFunctionTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstNowLikeFunctionTest.java index ddcde513dd..16aa0752e6 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstNowLikeFunctionTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstNowLikeFunctionTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.parser; import static org.junit.Assert.assertEquals; @@ -31,6 +30,7 @@ public class AstNowLikeFunctionTest { /** * Set parameterized values used in test. + * * @param name Function name * @param hasFsp Whether function has fsp argument * @param hasShortcut Whether function has shortcut (call without `()`) @@ -43,24 +43,26 @@ public AstNowLikeFunctionTest(String name, Boolean hasFsp, Boolean hasShortcut) /** * Returns function data to test. + * * @return An iterable. */ @Parameterized.Parameters(name = "{0}") public static Iterable functionNames() { - return List.of(new Object[][]{ - {"now", false, false }, - {"current_timestamp", false, false}, - {"localtimestamp", false, false}, - {"localtime", false, false}, - {"sysdate", true, false}, - {"curtime", false, false}, - {"current_time", false, false}, - {"curdate", false, false}, - {"current_date", false, false}, - {"utc_date", false, false}, - {"utc_time", false, false}, - {"utc_timestamp", false, false} - }); + return List.of( + new Object[][] { + {"now", false, false}, + {"current_timestamp", false, false}, + {"localtimestamp", false, false}, + {"localtime", false, false}, + {"sysdate", true, false}, + {"curtime", false, false}, + {"current_time", false, false}, + {"curdate", false, false}, + {"current_date", false, false}, + {"utc_date", false, false}, + {"utc_time", false, false}, + {"utc_timestamp", false, false} + }); } private final String name; @@ -70,26 +72,20 @@ public static Iterable functionNames() { @Test public void test_function_call_eval() { assertEqual( - eval(relation("t"), let(field("r"), function(name))), - "source=t | eval r=" + name + "()" - ); + eval(relation("t"), let(field("r"), function(name))), "source=t | eval r=" + name + "()"); } @Test public void test_shortcut_eval() { Assume.assumeTrue(hasShortcut); - assertEqual( - eval(relation("t"), let(field("r"), function(name))), - "source=t | eval r=" + name - ); + assertEqual(eval(relation("t"), let(field("r"), function(name))), "source=t | eval r=" + name); } @Test public void test_function_call_where() { assertEqual( filter(relation("t"), compare("=", field("a"), function(name))), - "search source=t | where a=" + name + "()" - ); + "search source=t | where a=" + name + "()"); } @Test @@ -97,18 +93,15 @@ public void test_shortcut_where() { Assume.assumeTrue(hasShortcut); assertEqual( filter(relation("t"), compare("=", field("a"), function(name))), - "search source=t | where a=" + name - ); + "search source=t | where a=" + name); } @Test public void test_function_call_fsp() { Assume.assumeTrue(hasFsp); - assertEqual(filter( - relation("t"), - compare("=", field("a"), function(name, intLiteral(0))) - ), "search source=t | where a=" + name + "(0)" - ); + assertEqual( + filter(relation("t"), compare("=", field("a"), function(name, intLiteral(0)))), + "search source=t | where a=" + name + "(0)"); } protected void assertEqual(Node expectedPlan, String query) { diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstStatementBuilderTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstStatementBuilderTest.java index de74e4932f..7d7b31e822 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstStatementBuilderTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstStatementBuilderTest.java @@ -28,8 +28,7 @@ public class AstStatementBuilderTest { - @Rule - public ExpectedException exceptionRule = ExpectedException.none(); + @Rule public ExpectedException exceptionRule = ExpectedException.none(); private PPLSyntaxParser parser = new PPLSyntaxParser(); @@ -38,9 +37,8 @@ public void buildQueryStatement() { assertEqual( "search source=t a=1", new Query( - project( - filter(relation("t"), compare("=", field("a"), - intLiteral(1))), AllFields.of()), 0)); + project(filter(relation("t"), compare("=", field("a"), intLiteral(1))), AllFields.of()), + 0)); } @Test @@ -50,8 +48,8 @@ public void buildExplainStatement() { new Explain( new Query( project( - filter(relation("t"), compare("=", field("a"), intLiteral(1))), - AllFields.of()), 0))); + filter(relation("t"), compare("=", field("a"), intLiteral(1))), AllFields.of()), + 0))); } private void assertEqual(String query, Statement expectedStatement) { @@ -66,7 +64,8 @@ private void assertExplainEqual(String query, Statement expectedStatement) { private Node plan(String query, boolean isExplain) { final AstStatementBuilder builder = - new AstStatementBuilder(new AstBuilder(new AstExpressionBuilder(), query), + new AstStatementBuilder( + new AstBuilder(new AstExpressionBuilder(), query), AstStatementBuilder.StatementBuilderContext.builder().isExplain(isExplain).build()); return builder.visit(parser.parse(query)); } diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/utils/ArgumentFactoryTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/utils/ArgumentFactoryTest.java index e18dfbd65c..761dbe2997 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/utils/ArgumentFactoryTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/utils/ArgumentFactoryTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.utils; import static java.util.Collections.emptyList; @@ -28,12 +27,10 @@ public class ArgumentFactoryTest extends AstBuilderTest { @Test public void testFieldsCommandArgument() { - assertEqual("source=t | fields - a", + assertEqual( + "source=t | fields - a", projectWithArg( - relation("t"), - exprList(argument("exclude", booleanLiteral(true))), - field("a") - )); + relation("t"), exprList(argument("exclude", booleanLiteral(true))), field("a"))); } @Test @@ -47,20 +44,14 @@ public void testStatsCommandArgument() { "source=t | stats partitions=1 allnum=false delim=',' avg(a) dedup_splitvalues=true", agg( relation("t"), - exprList( - alias( - "avg(a)", - aggregate("avg", field("a"))) - ), + exprList(alias("avg(a)", aggregate("avg", field("a")))), emptyList(), emptyList(), exprList( argument("partitions", intLiteral(1)), argument("allnum", booleanLiteral(false)), argument("delim", stringLiteral(",")), - argument("dedupsplit", booleanLiteral(true)) - ) - )); + argument("dedupsplit", booleanLiteral(true))))); } @Test @@ -72,52 +63,43 @@ public void testStatsCommandDefaultArgument() { @Test public void testDedupCommandArgument() { - assertEqual("source=t | dedup 3 field0 keepempty=false consecutive=true", + assertEqual( + "source=t | dedup 3 field0 keepempty=false consecutive=true", dedupe( relation("t"), exprList( argument("number", intLiteral(3)), argument("keepempty", booleanLiteral(false)), - argument("consecutive", booleanLiteral(true)) - ), - field("field0") - )); + argument("consecutive", booleanLiteral(true))), + field("field0"))); } @Test public void testDedupCommandDefaultArgument() { assertEqual( - "source=t | dedup 1 field0 keepempty=false consecutive=false", - "source=t | dedup field0" - ); + "source=t | dedup 1 field0 keepempty=false consecutive=false", "source=t | dedup field0"); } @Test public void testSortCommandDefaultArgument() { - assertEqual( - "source=t | sort field0", - "source=t | sort field0" - ); + assertEqual("source=t | sort field0", "source=t | sort field0"); } @Test public void testSortFieldArgument() { - assertEqual("source=t | sort - auto(field0)", + assertEqual( + "source=t | sort - auto(field0)", sort( relation("t"), field( "field0", exprList( argument("asc", booleanLiteral(false)), - argument("type", stringLiteral("auto")) - ) - ) - )); + argument("type", stringLiteral("auto")))))); } @Test public void testNoArgConstructorForArgumentFactoryShouldPass() { new ArgumentFactory(); } - } diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/utils/PPLQueryDataAnonymizerTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/utils/PPLQueryDataAnonymizerTest.java index 1998647dba..cd51ea07df 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/utils/PPLQueryDataAnonymizerTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/utils/PPLQueryDataAnonymizerTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.utils; import static org.junit.Assert.assertEquals; @@ -29,166 +28,140 @@ public class PPLQueryDataAnonymizerTest { @Test public void testSearchCommand() { - assertEquals("source=t | where a = ***", - anonymize("search source=t a=1") - ); + assertEquals("source=t | where a = ***", anonymize("search source=t a=1")); } @Test public void testTableFunctionCommand() { - assertEquals("source=prometheus.query_range(***,***,***,***)", - anonymize("source=prometheus.query_range('afsd',123,123,3)") - ); + assertEquals( + "source=prometheus.query_range(***,***,***,***)", + anonymize("source=prometheus.query_range('afsd',123,123,3)")); } @Test public void testPrometheusPPLCommand() { - assertEquals("source=prometheus.http_requests_process", - anonymize("source=prometheus.http_requests_process") - ); + assertEquals( + "source=prometheus.http_requests_process", + anonymize("source=prometheus.http_requests_process")); } @Test public void testWhereCommand() { - assertEquals("source=t | where a = ***", - anonymize("search source=t | where a=1") - ); + assertEquals("source=t | where a = ***", anonymize("search source=t | where a=1")); } @Test public void testFieldsCommandWithoutArguments() { - assertEquals("source=t | fields + f,g", - anonymize("source=t | fields f,g")); + assertEquals("source=t | fields + f,g", anonymize("source=t | fields f,g")); } @Test public void testFieldsCommandWithIncludeArguments() { - assertEquals("source=t | fields + f,g", - anonymize("source=t | fields + f,g")); + assertEquals("source=t | fields + f,g", anonymize("source=t | fields + f,g")); } @Test public void testFieldsCommandWithExcludeArguments() { - assertEquals("source=t | fields - f,g", - anonymize("source=t | fields - f,g")); + assertEquals("source=t | fields - f,g", anonymize("source=t | fields - f,g")); } @Test public void testRenameCommandWithMultiFields() { - assertEquals("source=t | rename f as g,h as i,j as k", + assertEquals( + "source=t | rename f as g,h as i,j as k", anonymize("source=t | rename f as g,h as i,j as k")); } @Test public void testStatsCommandWithByClause() { - assertEquals("source=t | stats count(a) by b", - anonymize("source=t | stats count(a) by b")); + assertEquals("source=t | stats count(a) by b", anonymize("source=t | stats count(a) by b")); } @Test public void testStatsCommandWithNestedFunctions() { - assertEquals("source=t | stats sum(+(a,b))", - anonymize("source=t | stats sum(a+b)")); + assertEquals("source=t | stats sum(+(a,b))", anonymize("source=t | stats sum(a+b)")); } @Test public void testDedupCommand() { - assertEquals("source=t | dedup f1,f2 1 keepempty=false consecutive=false", + assertEquals( + "source=t | dedup f1,f2 1 keepempty=false consecutive=false", anonymize("source=t | dedup f1, f2")); } @Test public void testHeadCommandWithNumber() { - assertEquals("source=t | head 3", - anonymize("source=t | head 3")); + assertEquals("source=t | head 3", anonymize("source=t | head 3")); } - //todo, sort order is ignored, it doesn't impact the log analysis. + // todo, sort order is ignored, it doesn't impact the log analysis. @Test public void testSortCommandWithOptions() { - assertEquals("source=t | sort f1,f2", - anonymize("source=t | sort - f1, + f2")); + assertEquals("source=t | sort f1,f2", anonymize("source=t | sort - f1, + f2")); } @Test public void testEvalCommand() { - assertEquals("source=t | eval r=abs(f)", - anonymize("source=t | eval r=abs(f)")); + assertEquals("source=t | eval r=abs(f)", anonymize("source=t | eval r=abs(f)")); } @Test public void testRareCommandWithGroupBy() { - assertEquals("source=t | rare 10 a by b", - anonymize("source=t | rare a by b")); + assertEquals("source=t | rare 10 a by b", anonymize("source=t | rare a by b")); } @Test public void testTopCommandWithNAndGroupBy() { - assertEquals("source=t | top 1 a by b", - anonymize("source=t | top 1 a by b")); + assertEquals("source=t | top 1 a by b", anonymize("source=t | top 1 a by b")); } @Test public void testAndExpression() { - assertEquals("source=t | where a = *** and b = ***", - anonymize("source=t | where a=1 and b=2") - ); + assertEquals("source=t | where a = *** and b = ***", anonymize("source=t | where a=1 and b=2")); } @Test public void testOrExpression() { - assertEquals("source=t | where a = *** or b = ***", - anonymize("source=t | where a=1 or b=2") - ); + assertEquals("source=t | where a = *** or b = ***", anonymize("source=t | where a=1 or b=2")); } @Test public void testXorExpression() { - assertEquals("source=t | where a = *** xor b = ***", - anonymize("source=t | where a=1 xor b=2") - ); + assertEquals("source=t | where a = *** xor b = ***", anonymize("source=t | where a=1 xor b=2")); } @Test public void testNotExpression() { - assertEquals("source=t | where not a = ***", - anonymize("source=t | where not a=1 ") - ); + assertEquals("source=t | where not a = ***", anonymize("source=t | where not a=1 ")); } @Test public void testQualifiedName() { - assertEquals("source=t | fields + field0", - anonymize("source=t | fields field0") - ); + assertEquals("source=t | fields + field0", anonymize("source=t | fields field0")); } @Test public void testDateFunction() { - assertEquals("source=t | eval date=DATE_ADD(DATE(***),INTERVAL *** HOUR)", - anonymize("source=t | eval date=DATE_ADD(DATE('2020-08-26'),INTERVAL 1 HOUR)") - ); + assertEquals( + "source=t | eval date=DATE_ADD(DATE(***),INTERVAL *** HOUR)", + anonymize("source=t | eval date=DATE_ADD(DATE('2020-08-26'),INTERVAL 1 HOUR)")); } @Test public void testExplain() { - assertEquals("source=t | fields + a", - anonymizeStatement("source=t | fields a", true) - ); + assertEquals("source=t | fields + a", anonymizeStatement("source=t | fields a", true)); } @Test public void testQuery() { - assertEquals("source=t | fields + a", - anonymizeStatement("source=t | fields a", false) - ); + assertEquals("source=t | fields + a", anonymizeStatement("source=t | fields a", false)); } @Test public void anonymizeFieldsNoArg() { - assertEquals("source=t | fields + f", - anonymize(projectWithArg(relation("t"), Collections.emptyList(), field("f"))) - ); + assertEquals( + "source=t | fields + f", + anonymize(projectWithArg(relation("t"), Collections.emptyList(), field("f")))); } private String anonymize(String query) { diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/utils/UnresolvedPlanHelperTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/utils/UnresolvedPlanHelperTest.java index d64c8d5db4..7c1264e0b6 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/utils/UnresolvedPlanHelperTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/utils/UnresolvedPlanHelperTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.ppl.utils; import static org.hamcrest.MatcherAssert.assertThat; diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/client/PrometheusClientImpl.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/client/PrometheusClientImpl.java index 9472be7487..2bfaaccd47 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/client/PrometheusClientImpl.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/client/PrometheusClientImpl.java @@ -38,16 +38,18 @@ public PrometheusClientImpl(OkHttpClient okHttpClient, URI uri) { this.uri = uri; } - @Override public JSONObject queryRange(String query, Long start, Long end, String step) throws IOException { - String queryUrl = String.format("%s/api/v1/query_range?query=%s&start=%s&end=%s&step=%s", - uri.toString().replaceAll("/$", ""), URLEncoder.encode(query, StandardCharsets.UTF_8), - start, end, step); + String queryUrl = + String.format( + "%s/api/v1/query_range?query=%s&start=%s&end=%s&step=%s", + uri.toString().replaceAll("/$", ""), + URLEncoder.encode(query, StandardCharsets.UTF_8), + start, + end, + step); logger.debug("queryUrl: " + queryUrl); - Request request = new Request.Builder() - .url(queryUrl) - .build(); + Request request = new Request.Builder().url(queryUrl).build(); Response response = this.okHttpClient.newCall(request).execute(); JSONObject jsonObject = readResponse(response); return jsonObject.getJSONObject("data"); @@ -55,14 +57,14 @@ public JSONObject queryRange(String query, Long start, Long end, String step) th @Override public List getLabels(String metricName) throws IOException { - String queryUrl = String.format("%s/api/v1/labels?%s=%s", - uri.toString().replaceAll("/$", ""), - URLEncoder.encode("match[]", StandardCharsets.UTF_8), - URLEncoder.encode(metricName, StandardCharsets.UTF_8)); + String queryUrl = + String.format( + "%s/api/v1/labels?%s=%s", + uri.toString().replaceAll("/$", ""), + URLEncoder.encode("match[]", StandardCharsets.UTF_8), + URLEncoder.encode(metricName, StandardCharsets.UTF_8)); logger.debug("queryUrl: " + queryUrl); - Request request = new Request.Builder() - .url(queryUrl) - .build(); + Request request = new Request.Builder().url(queryUrl).build(); Response response = this.okHttpClient.newCall(request).execute(); JSONObject jsonObject = readResponse(response); return toListOfLabels(jsonObject.getJSONArray("data")); @@ -70,28 +72,26 @@ public List getLabels(String metricName) throws IOException { @Override public Map> getAllMetrics() throws IOException { - String queryUrl = String.format("%s/api/v1/metadata", - uri.toString().replaceAll("/$", "")); + String queryUrl = String.format("%s/api/v1/metadata", uri.toString().replaceAll("/$", "")); logger.debug("queryUrl: " + queryUrl); - Request request = new Request.Builder() - .url(queryUrl) - .build(); + Request request = new Request.Builder().url(queryUrl).build(); Response response = this.okHttpClient.newCall(request).execute(); JSONObject jsonObject = readResponse(response); - TypeReference>> typeRef - = new TypeReference<>() {}; + TypeReference>> typeRef = new TypeReference<>() {}; return new ObjectMapper().readValue(jsonObject.getJSONObject("data").toString(), typeRef); } @Override public JSONArray queryExemplars(String query, Long start, Long end) throws IOException { - String queryUrl = String.format("%s/api/v1/query_exemplars?query=%s&start=%s&end=%s", - uri.toString().replaceAll("/$", ""), URLEncoder.encode(query, StandardCharsets.UTF_8), - start, end); + String queryUrl = + String.format( + "%s/api/v1/query_exemplars?query=%s&start=%s&end=%s", + uri.toString().replaceAll("/$", ""), + URLEncoder.encode(query, StandardCharsets.UTF_8), + start, + end); logger.debug("queryUrl: " + queryUrl); - Request request = new Request.Builder() - .url(queryUrl) - .build(); + Request request = new Request.Builder().url(queryUrl).build(); Response response = this.okHttpClient.newCall(request).execute(); JSONObject jsonObject = readResponse(response); return jsonObject.getJSONArray("data"); @@ -100,8 +100,8 @@ public JSONArray queryExemplars(String query, Long start, Long end) throws IOExc private List toListOfLabels(JSONArray array) { List result = new ArrayList<>(); for (int i = 0; i < array.length(); i++) { - //__name__ is internal label in prometheus representing the metric name. - //Exempting this from labels list as it is not required in any of the operations. + // __name__ is internal label in prometheus representing the metric name. + // Exempting this from labels list as it is not required in any of the operations. if (!"__name__".equals(array.optString(i))) { result.add(array.optString(i)); } @@ -109,7 +109,6 @@ private List toListOfLabels(JSONArray array) { return result; } - private JSONObject readResponse(Response response) throws IOException { if (response.isSuccessful()) { JSONObject jsonObject = new JSONObject(Objects.requireNonNull(response.body()).string()); @@ -120,10 +119,9 @@ private JSONObject readResponse(Response response) throws IOException { } } else { throw new RuntimeException( - String.format("Request to Prometheus is Unsuccessful with : %s", Objects.requireNonNull( - response.body(), "Response body can't be null").string())); + String.format( + "Request to Prometheus is Unsuccessful with : %s", + Objects.requireNonNull(response.body(), "Response body can't be null").string())); } } - - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/data/constants/PrometheusFieldConstants.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/data/constants/PrometheusFieldConstants.java index 88e9df6a88..0f687b3cd1 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/data/constants/PrometheusFieldConstants.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/data/constants/PrometheusFieldConstants.java @@ -18,6 +18,6 @@ public class PrometheusFieldConstants { public static final String EXEMPLARS_KEY = "exemplars"; public static final String TRACE_ID_KEY = "traceID"; public static final String LABELS_KEY = "labels"; - public static final String TIMESTAMP_KEY = "timestamp"; - public static final String VALUE_KEY = "value"; + public static final String TIMESTAMP_KEY = "timestamp"; + public static final String VALUE_KEY = "value"; } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/implementation/QueryExemplarFunctionImplementation.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/implementation/QueryExemplarFunctionImplementation.java index 9d455b3cfc..bbd3a36f5f 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/implementation/QueryExemplarFunctionImplementation.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/implementation/QueryExemplarFunctionImplementation.java @@ -28,8 +28,8 @@ import org.opensearch.sql.prometheus.storage.QueryExemplarsTable; import org.opensearch.sql.storage.Table; -public class QueryExemplarFunctionImplementation extends FunctionExpression implements - TableFunctionImplementation { +public class QueryExemplarFunctionImplementation extends FunctionExpression + implements TableFunctionImplementation { private final FunctionName functionName; private final List arguments; @@ -39,10 +39,10 @@ public class QueryExemplarFunctionImplementation extends FunctionExpression impl * Required argument constructor. * * @param functionName name of the function - * @param arguments a list of arguments provided + * @param arguments a list of arguments provided */ - public QueryExemplarFunctionImplementation(FunctionName functionName, List arguments, - PrometheusClient prometheusClient) { + public QueryExemplarFunctionImplementation( + FunctionName functionName, List arguments, PrometheusClient prometheusClient) { super(functionName, arguments); this.functionName = functionName; this.arguments = arguments; @@ -51,10 +51,11 @@ public QueryExemplarFunctionImplementation(FunctionName functionName, List valueEnv) { - throw new UnsupportedOperationException(String.format( - "Prometheus defined function [%s] is only " - + "supported in SOURCE clause with prometheus connector catalog", - functionName)); + throw new UnsupportedOperationException( + String.format( + "Prometheus defined function [%s] is only " + + "supported in SOURCE clause with prometheus connector catalog", + functionName)); } @Override @@ -64,10 +65,15 @@ public ExprType type() { @Override public String toString() { - List args = arguments.stream() - .map(arg -> String.format("%s=%s", ((NamedArgumentExpression) arg) - .getArgName(), ((NamedArgumentExpression) arg).getValue().toString())) - .collect(Collectors.toList()); + List args = + arguments.stream() + .map( + arg -> + String.format( + "%s=%s", + ((NamedArgumentExpression) arg).getArgName(), + ((NamedArgumentExpression) arg).getValue().toString())) + .collect(Collectors.toList()); return String.format("%s(%s)", functionName, String.join(", ", args)); } @@ -79,27 +85,26 @@ public Table applyArguments() { private PrometheusQueryExemplarsRequest buildExemplarsQueryRequest(List arguments) { PrometheusQueryExemplarsRequest request = new PrometheusQueryExemplarsRequest(); - arguments.forEach(arg -> { - String argName = ((NamedArgumentExpression) arg).getArgName(); - Expression argValue = ((NamedArgumentExpression) arg).getValue(); - ExprValue literalValue = argValue.valueOf(); - switch (argName) { - case QUERY: - request - .setQuery((String) literalValue.value()); - break; - case STARTTIME: - request.setStartTime(((Number) literalValue.value()).longValue()); - break; - case ENDTIME: - request.setEndTime(((Number) literalValue.value()).longValue()); - break; - default: - throw new ExpressionEvaluationException( - String.format("Invalid Function Argument:%s", argName)); - } - }); + arguments.forEach( + arg -> { + String argName = ((NamedArgumentExpression) arg).getArgName(); + Expression argValue = ((NamedArgumentExpression) arg).getValue(); + ExprValue literalValue = argValue.valueOf(); + switch (argName) { + case QUERY: + request.setQuery((String) literalValue.value()); + break; + case STARTTIME: + request.setStartTime(((Number) literalValue.value()).longValue()); + break; + case ENDTIME: + request.setEndTime(((Number) literalValue.value()).longValue()); + break; + default: + throw new ExpressionEvaluationException( + String.format("Invalid Function Argument:%s", argName)); + } + }); return request; } - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/implementation/QueryRangeFunctionImplementation.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/implementation/QueryRangeFunctionImplementation.java index 2d3710037a..0719bd1525 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/implementation/QueryRangeFunctionImplementation.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/implementation/QueryRangeFunctionImplementation.java @@ -29,8 +29,8 @@ import org.opensearch.sql.prometheus.storage.PrometheusMetricTable; import org.opensearch.sql.storage.Table; -public class QueryRangeFunctionImplementation extends FunctionExpression implements - TableFunctionImplementation { +public class QueryRangeFunctionImplementation extends FunctionExpression + implements TableFunctionImplementation { private final FunctionName functionName; private final List arguments; @@ -40,10 +40,10 @@ public class QueryRangeFunctionImplementation extends FunctionExpression impleme * Required argument constructor. * * @param functionName name of the function - * @param arguments a list of expressions + * @param arguments a list of expressions */ - public QueryRangeFunctionImplementation(FunctionName functionName, List arguments, - PrometheusClient prometheusClient) { + public QueryRangeFunctionImplementation( + FunctionName functionName, List arguments, PrometheusClient prometheusClient) { super(functionName, arguments); this.functionName = functionName; this.arguments = arguments; @@ -52,10 +52,11 @@ public QueryRangeFunctionImplementation(FunctionName functionName, List valueEnv) { - throw new UnsupportedOperationException(String.format( - "Prometheus defined function [%s] is only " - + "supported in SOURCE clause with prometheus connector catalog", - functionName)); + throw new UnsupportedOperationException( + String.format( + "Prometheus defined function [%s] is only " + + "supported in SOURCE clause with prometheus connector catalog", + functionName)); } @Override @@ -65,10 +66,15 @@ public ExprType type() { @Override public String toString() { - List args = arguments.stream() - .map(arg -> String.format("%s=%s", ((NamedArgumentExpression) arg) - .getArgName(), ((NamedArgumentExpression) arg).getValue().toString())) - .collect(Collectors.toList()); + List args = + arguments.stream() + .map( + arg -> + String.format( + "%s=%s", + ((NamedArgumentExpression) arg).getArgName(), + ((NamedArgumentExpression) arg).getValue().toString())) + .collect(Collectors.toList()); return String.format("%s(%s)", functionName, String.join(", ", args)); } @@ -80,30 +86,29 @@ public Table applyArguments() { private PrometheusQueryRequest buildQueryFromQueryRangeFunction(List arguments) { PrometheusQueryRequest prometheusQueryRequest = new PrometheusQueryRequest(); - arguments.forEach(arg -> { - String argName = ((NamedArgumentExpression) arg).getArgName(); - Expression argValue = ((NamedArgumentExpression) arg).getValue(); - ExprValue literalValue = argValue.valueOf(); - switch (argName) { - case QUERY: - prometheusQueryRequest - .setPromQl((String) literalValue.value()); - break; - case STARTTIME: - prometheusQueryRequest.setStartTime(((Number) literalValue.value()).longValue()); - break; - case ENDTIME: - prometheusQueryRequest.setEndTime(((Number) literalValue.value()).longValue()); - break; - case STEP: - prometheusQueryRequest.setStep(literalValue.value().toString()); - break; - default: - throw new ExpressionEvaluationException( - String.format("Invalid Function Argument:%s", argName)); - } - }); + arguments.forEach( + arg -> { + String argName = ((NamedArgumentExpression) arg).getArgName(); + Expression argValue = ((NamedArgumentExpression) arg).getValue(); + ExprValue literalValue = argValue.valueOf(); + switch (argName) { + case QUERY: + prometheusQueryRequest.setPromQl((String) literalValue.value()); + break; + case STARTTIME: + prometheusQueryRequest.setStartTime(((Number) literalValue.value()).longValue()); + break; + case ENDTIME: + prometheusQueryRequest.setEndTime(((Number) literalValue.value()).longValue()); + break; + case STEP: + prometheusQueryRequest.setStep(literalValue.value().toString()); + break; + default: + throw new ExpressionEvaluationException( + String.format("Invalid Function Argument:%s", argName)); + } + }); return prometheusQueryRequest; } - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/resolver/QueryExemplarsTableFunctionResolver.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/resolver/QueryExemplarsTableFunctionResolver.java index a82e5a397a..78d87b0a0b 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/resolver/QueryExemplarsTableFunctionResolver.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/resolver/QueryExemplarsTableFunctionResolver.java @@ -22,9 +22,9 @@ import org.opensearch.sql.prometheus.functions.implementation.QueryExemplarFunctionImplementation; /** - * This class is for query_exemplars table function resolver {@link FunctionResolver}. - * It takes care of validating function arguments and also creating - * required {@link org.opensearch.sql.expression.function.TableFunctionImplementation} Class. + * This class is for query_exemplars table function resolver {@link FunctionResolver}. It takes care + * of validating function arguments and also creating required {@link + * org.opensearch.sql.expression.function.TableFunctionImplementation} Class. */ @RequiredArgsConstructor public class QueryExemplarsTableFunctionResolver implements FunctionResolver { @@ -41,13 +41,15 @@ public Pair resolve(FunctionSignature unreso final FunctionName functionName = FunctionName.of(QUERY_EXEMPLARS); FunctionSignature functionSignature = new FunctionSignature(FunctionName.of(QUERY_EXEMPLARS), List.of(STRING, LONG, LONG)); - FunctionBuilder functionBuilder = (functionProperties, arguments) -> { - final List argumentNames = List.of(QUERY, STARTTIME, ENDTIME); - validatePrometheusTableFunctionArguments(arguments, argumentNames); - List namedArguments = getNamedArgumentsOfTableFunction(arguments, argumentNames); - return new QueryExemplarFunctionImplementation(functionName, - namedArguments, prometheusClient); - }; + FunctionBuilder functionBuilder = + (functionProperties, arguments) -> { + final List argumentNames = List.of(QUERY, STARTTIME, ENDTIME); + validatePrometheusTableFunctionArguments(arguments, argumentNames); + List namedArguments = + getNamedArgumentsOfTableFunction(arguments, argumentNames); + return new QueryExemplarFunctionImplementation( + functionName, namedArguments, prometheusClient); + }; return Pair.of(functionSignature, functionBuilder); } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/resolver/QueryRangeTableFunctionResolver.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/resolver/QueryRangeTableFunctionResolver.java index 8bb2a2d758..8dfa12134e 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/resolver/QueryRangeTableFunctionResolver.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/resolver/QueryRangeTableFunctionResolver.java @@ -39,11 +39,14 @@ public Pair resolve(FunctionSignature unreso FunctionSignature functionSignature = new FunctionSignature(functionName, List.of(STRING, LONG, LONG, STRING)); final List argumentNames = List.of(QUERY, STARTTIME, ENDTIME, STEP); - FunctionBuilder functionBuilder = (functionProperties, arguments) -> { - validatePrometheusTableFunctionArguments(arguments, argumentNames); - List namedArguments = getNamedArgumentsOfTableFunction(arguments, argumentNames); - return new QueryRangeFunctionImplementation(functionName, namedArguments, prometheusClient); - }; + FunctionBuilder functionBuilder = + (functionProperties, arguments) -> { + validatePrometheusTableFunctionArguments(arguments, argumentNames); + List namedArguments = + getNamedArgumentsOfTableFunction(arguments, argumentNames); + return new QueryRangeFunctionImplementation( + functionName, namedArguments, prometheusClient); + }; return Pair.of(functionSignature, functionBuilder); } @@ -51,5 +54,4 @@ public Pair resolve(FunctionSignature unreso public FunctionName getFunctionName() { return FunctionName.of(QUERY_RANGE); } - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/response/PrometheusFunctionResponseHandle.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/response/PrometheusFunctionResponseHandle.java index f2cefa85ec..bbc0516df6 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/response/PrometheusFunctionResponseHandle.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/response/PrometheusFunctionResponseHandle.java @@ -8,14 +8,10 @@ import org.opensearch.sql.data.model.ExprValue; import org.opensearch.sql.executor.ExecutionEngine; -/** - * Handle Prometheus response. - */ +/** Handle Prometheus response. */ public interface PrometheusFunctionResponseHandle { - /** - * Return true if Prometheus response has more result. - */ + /** Return true if Prometheus response has more result. */ boolean hasNext(); /** @@ -24,8 +20,6 @@ public interface PrometheusFunctionResponseHandle { */ ExprValue next(); - /** - * Return ExecutionEngine.Schema of the Prometheus response. - */ + /** Return ExecutionEngine.Schema of the Prometheus response. */ ExecutionEngine.Schema schema(); } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/response/QueryExemplarsFunctionResponseHandle.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/response/QueryExemplarsFunctionResponseHandle.java index f734159720..8d1c267a90 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/response/QueryExemplarsFunctionResponseHandle.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/response/QueryExemplarsFunctionResponseHandle.java @@ -10,7 +10,6 @@ import static org.opensearch.sql.prometheus.data.constants.PrometheusFieldConstants.LABELS_KEY; import static org.opensearch.sql.prometheus.data.constants.PrometheusFieldConstants.SERIES_LABELS_KEY; import static org.opensearch.sql.prometheus.data.constants.PrometheusFieldConstants.TIMESTAMP_KEY; -import static org.opensearch.sql.prometheus.data.constants.PrometheusFieldConstants.TRACE_ID_KEY; import static org.opensearch.sql.prometheus.data.constants.PrometheusFieldConstants.VALUE_KEY; import java.time.Instant; @@ -45,8 +44,8 @@ public QueryExemplarsFunctionResponseHandle(JSONArray responseArray) { private void constructIteratorAndSchema(JSONArray responseArray) { List columnList = new ArrayList<>(); columnList.add(new ExecutionEngine.Schema.Column(SERIES_LABELS_KEY, SERIES_LABELS_KEY, STRUCT)); - columnList.add(new ExecutionEngine.Schema.Column(EXEMPLARS_KEY, EXEMPLARS_KEY, - ExprCoreType.ARRAY)); + columnList.add( + new ExecutionEngine.Schema.Column(EXEMPLARS_KEY, EXEMPLARS_KEY, ExprCoreType.ARRAY)); this.schema = new ExecutionEngine.Schema(columnList); List result = new ArrayList<>(); for (int i = 0; i < responseArray.length(); i++) { @@ -63,7 +62,8 @@ private void constructIteratorAndSchema(JSONArray responseArray) { private ExprValue constructSeriesLabels(JSONObject seriesLabels) { LinkedHashMap seriesLabelsMap = new LinkedHashMap<>(); - seriesLabels.keySet() + seriesLabels + .keySet() .forEach(key -> seriesLabelsMap.put(key, new ExprStringValue(seriesLabels.getString(key)))); return new ExprTupleValue(seriesLabelsMap); } @@ -79,13 +79,13 @@ private ExprValue constructExemplarList(JSONArray exemplars) { private ExprValue constructExemplar(JSONObject exemplarsJSONObject) { LinkedHashMap exemplarHashMap = new LinkedHashMap<>(); - exemplarHashMap.put(LABELS_KEY, - constructLabelsInExemplar(exemplarsJSONObject.getJSONObject(LABELS_KEY))); - exemplarHashMap.put(TIMESTAMP_KEY, - new ExprTimestampValue(Instant.ofEpochMilli((long)( - exemplarsJSONObject.getDouble(TIMESTAMP_KEY) * 1000)))); - exemplarHashMap.put(VALUE_KEY, - new ExprDoubleValue(exemplarsJSONObject.getDouble(VALUE_KEY))); + exemplarHashMap.put( + LABELS_KEY, constructLabelsInExemplar(exemplarsJSONObject.getJSONObject(LABELS_KEY))); + exemplarHashMap.put( + TIMESTAMP_KEY, + new ExprTimestampValue( + Instant.ofEpochMilli((long) (exemplarsJSONObject.getDouble(TIMESTAMP_KEY) * 1000)))); + exemplarHashMap.put(VALUE_KEY, new ExprDoubleValue(exemplarsJSONObject.getDouble(VALUE_KEY))); return new ExprTupleValue(exemplarHashMap); } @@ -107,7 +107,6 @@ public ExprValue next() { return responseIterator.next(); } - @Override public ExecutionEngine.Schema schema() { return schema; diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/response/QueryRangeFunctionResponseHandle.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/response/QueryRangeFunctionResponseHandle.java index a3c68617e8..e10c9d7aff 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/response/QueryRangeFunctionResponseHandle.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/response/QueryRangeFunctionResponseHandle.java @@ -30,9 +30,7 @@ import org.opensearch.sql.data.type.ExprCoreType; import org.opensearch.sql.executor.ExecutionEngine; -/** - * Default implementation of QueryRangeFunctionResponseHandle. - */ +/** Default implementation of QueryRangeFunctionResponseHandle. */ public class QueryRangeFunctionResponseHandle implements PrometheusFunctionResponseHandle { private final JSONObject responseObject; @@ -62,25 +60,26 @@ private void constructIterator() { result.add(new ExprTupleValue(linkedHashMap)); } } else { - throw new RuntimeException(String.format("Unexpected Result Type: %s during Prometheus " - + "Response Parsing. 'matrix' resultType is expected", - responseObject.getString("resultType"))); + throw new RuntimeException( + String.format( + "Unexpected Result Type: %s during Prometheus " + + "Response Parsing. 'matrix' resultType is expected", + responseObject.getString("resultType"))); } this.responseIterator = result.iterator(); } - private static void extractTimestampAndValues(JSONArray values, - LinkedHashMap linkedHashMap) { + private static void extractTimestampAndValues( + JSONArray values, LinkedHashMap linkedHashMap) { List timestampList = new ArrayList<>(); List valueList = new ArrayList<>(); for (int j = 0; j < values.length(); j++) { JSONArray value = values.getJSONArray(j); - timestampList.add(new ExprTimestampValue( - Instant.ofEpochMilli((long) (value.getDouble(0) * 1000)))); + timestampList.add( + new ExprTimestampValue(Instant.ofEpochMilli((long) (value.getDouble(0) * 1000)))); valueList.add(new ExprDoubleValue(value.getDouble(1))); } - linkedHashMap.put(TIMESTAMP, - new ExprCollectionValue(timestampList)); + linkedHashMap.put(TIMESTAMP, new ExprCollectionValue(timestampList)); linkedHashMap.put(VALUE, new ExprCollectionValue(valueList)); } @@ -90,12 +89,10 @@ private void constructSchema() { private ExprValue extractLabels(JSONObject metric) { LinkedHashMap labelsMap = new LinkedHashMap<>(); - metric.keySet().forEach(key - -> labelsMap.put(key, new ExprStringValue(metric.getString(key)))); + metric.keySet().forEach(key -> labelsMap.put(key, new ExprStringValue(metric.getString(key)))); return new ExprTupleValue(labelsMap); } - private List getColumnList() { List columnList = new ArrayList<>(); columnList.add(new ExecutionEngine.Schema.Column(LABELS, LABELS, ExprCoreType.STRUCT)); diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanBuilder.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanBuilder.java index 8364173889..7e779eb77c 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanBuilder.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanBuilder.java @@ -12,9 +12,7 @@ import org.opensearch.sql.storage.TableScanOperator; import org.opensearch.sql.storage.read.TableScanBuilder; -/** - * TableScanBuilder for query_exemplars table function of prometheus connector. - */ +/** TableScanBuilder for query_exemplars table function of prometheus connector. */ @AllArgsConstructor public class QueryExemplarsFunctionTableScanBuilder extends TableScanBuilder { @@ -24,8 +22,8 @@ public class QueryExemplarsFunctionTableScanBuilder extends TableScanBuilder { @Override public TableScanOperator build() { - return new QueryExemplarsFunctionTableScanOperator(prometheusClient, - prometheusQueryExemplarsRequest); + return new QueryExemplarsFunctionTableScanOperator( + prometheusClient, prometheusQueryExemplarsRequest); } // Since we are determining the schema after table scan, @@ -34,5 +32,4 @@ public TableScanOperator build() { public boolean pushDownProject(LogicalProject project) { return true; } - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanOperator.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanOperator.java index 85ba6c854a..1a58429328 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanOperator.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanOperator.java @@ -22,37 +22,37 @@ import org.opensearch.sql.storage.TableScanOperator; /** - * This class is for QueryExemplars function {@link TableScanOperator}. - * This takes care of getting exemplar data from prometheus by making - * {@link PrometheusQueryExemplarsRequest}. + * This class is for QueryExemplars function {@link TableScanOperator}. This takes care of getting + * exemplar data from prometheus by making {@link PrometheusQueryExemplarsRequest}. */ @RequiredArgsConstructor public class QueryExemplarsFunctionTableScanOperator extends TableScanOperator { private final PrometheusClient prometheusClient; - @Getter - private final PrometheusQueryExemplarsRequest request; + @Getter private final PrometheusQueryExemplarsRequest request; private QueryExemplarsFunctionResponseHandle queryExemplarsFunctionResponseHandle; private static final Logger LOG = LogManager.getLogger(); @Override public void open() { super.open(); - this.queryExemplarsFunctionResponseHandle - = AccessController - .doPrivileged((PrivilegedAction) () -> { - try { - JSONArray responseArray = prometheusClient.queryExemplars( - request.getQuery(), - request.getStartTime(), request.getEndTime()); - return new QueryExemplarsFunctionResponseHandle(responseArray); - } catch (IOException e) { - LOG.error(e.getMessage()); - throw new RuntimeException( - String.format("Error fetching data from prometheus server: %s", e.getMessage())); - } - }); + this.queryExemplarsFunctionResponseHandle = + AccessController.doPrivileged( + (PrivilegedAction) + () -> { + try { + JSONArray responseArray = + prometheusClient.queryExemplars( + request.getQuery(), request.getStartTime(), request.getEndTime()); + return new QueryExemplarsFunctionResponseHandle(responseArray); + } catch (IOException e) { + LOG.error(e.getMessage()); + throw new RuntimeException( + String.format( + "Error fetching data from prometheus server: %s", e.getMessage())); + } + }); } @Override @@ -72,7 +72,9 @@ public ExprValue next() { @Override public String explain() { - return String.format(Locale.ROOT, "query_exemplars(%s, %s, %s)", + return String.format( + Locale.ROOT, + "query_exemplars(%s, %s, %s)", request.getQuery(), request.getStartTime(), request.getEndTime()); diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanBuilder.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanBuilder.java index 00e2191d09..2d22c0af69 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanBuilder.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanBuilder.java @@ -15,9 +15,8 @@ import org.opensearch.sql.storage.read.TableScanBuilder; /** - * TableScanBuilder for query_range table function of prometheus connector. - * we can merge this when we refactor for existing - * ppl queries based on prometheus connector. + * TableScanBuilder for query_range table function of prometheus connector. we can merge this when + * we refactor for existing ppl queries based on prometheus connector. */ @AllArgsConstructor public class QueryRangeFunctionTableScanBuilder extends TableScanBuilder { diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanOperator.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanOperator.java index 68b9b60643..fc3f9f9a9b 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanOperator.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanOperator.java @@ -23,9 +23,7 @@ import org.opensearch.sql.prometheus.request.PrometheusQueryRequest; import org.opensearch.sql.storage.TableScanOperator; -/** - * This a table scan operator to handle Query Range table function. - */ +/** This a table scan operator to handle Query Range table function. */ @RequiredArgsConstructor public class QueryRangeFunctionTableScanOperator extends TableScanOperator { @@ -39,19 +37,25 @@ public class QueryRangeFunctionTableScanOperator extends TableScanOperator { @Override public void open() { super.open(); - this.prometheusResponseHandle - = AccessController.doPrivileged((PrivilegedAction) () -> { - try { - JSONObject responseObject = prometheusClient.queryRange( - request.getPromQl(), - request.getStartTime(), request.getEndTime(), request.getStep()); - return new QueryRangeFunctionResponseHandle(responseObject); - } catch (IOException e) { - LOG.error(e.getMessage()); - throw new RuntimeException( - String.format("Error fetching data from prometheus server: %s", e.getMessage())); - } - }); + this.prometheusResponseHandle = + AccessController.doPrivileged( + (PrivilegedAction) + () -> { + try { + JSONObject responseObject = + prometheusClient.queryRange( + request.getPromQl(), + request.getStartTime(), + request.getEndTime(), + request.getStep()); + return new QueryRangeFunctionResponseHandle(responseObject); + } catch (IOException e) { + LOG.error(e.getMessage()); + throw new RuntimeException( + String.format( + "Error fetching data from prometheus server: %s", e.getMessage())); + } + }); } @Override @@ -71,7 +75,9 @@ public ExprValue next() { @Override public String explain() { - return String.format(Locale.ROOT, "query_range(%s, %s, %s, %s)", + return String.format( + Locale.ROOT, + "query_range(%s, %s, %s, %s)", request.getPromQl(), request.getStartTime(), request.getEndTime(), diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicalMetricAgg.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicalMetricAgg.java index f348c699a1..f7c45f6ad2 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicalMetricAgg.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicalMetricAgg.java @@ -20,10 +20,7 @@ import org.opensearch.sql.planner.logical.LogicalPlan; import org.opensearch.sql.planner.logical.LogicalPlanNodeVisitor; - -/** - * Logical Metric Scan along with aggregation Operation. - */ +/** Logical Metric Scan along with aggregation Operation. */ @Getter @ToString @EqualsAndHashCode(callSuper = false) @@ -31,37 +28,29 @@ public class PrometheusLogicalMetricAgg extends LogicalPlan { private final String metricName; - /** - * Filter Condition. - */ - @Setter - private Expression filter; + /** Filter Condition. */ + @Setter private Expression filter; - /** - * Aggregation List. - */ - @Setter - private List aggregatorList; + /** Aggregation List. */ + @Setter private List aggregatorList; - /** - * Group List. - */ - @Setter - private List groupByList; + /** Group List. */ + @Setter private List groupByList; /** * Constructor for LogicalMetricAgg Logical Plan. * - * @param metricName metricName - * @param filter filter + * @param metricName metricName + * @param filter filter * @param aggregatorList aggregatorList - * @param groupByList groupByList. + * @param groupByList groupByList. */ @Builder - public PrometheusLogicalMetricAgg(String metricName, - Expression filter, - List aggregatorList, - List groupByList) { + public PrometheusLogicalMetricAgg( + String metricName, + Expression filter, + List aggregatorList, + List groupByList) { super(ImmutableList.of()); this.metricName = metricName; this.filter = filter; diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicalMetricScan.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicalMetricScan.java index 5e07d6899f..7b28a8a6c9 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicalMetricScan.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicalMetricScan.java @@ -17,8 +17,8 @@ import org.opensearch.sql.planner.logical.LogicalPlanNodeVisitor; /** - * Prometheus Logical Metric Scan Operation. - * In an optimized plan this node represents both Relation and Filter Operation. + * Prometheus Logical Metric Scan Operation. In an optimized plan this node represents both Relation + * and Filter Operation. */ @Getter @ToString @@ -27,9 +27,7 @@ public class PrometheusLogicalMetricScan extends LogicalPlan { private final String metricName; - /** - * Filter Condition. - */ + /** Filter Condition. */ private final Expression filter; /** @@ -39,8 +37,7 @@ public class PrometheusLogicalMetricScan extends LogicalPlan { * @param filter filter. */ @Builder - public PrometheusLogicalMetricScan(String metricName, - Expression filter) { + public PrometheusLogicalMetricScan(String metricName, Expression filter) { super(ImmutableList.of()); this.metricName = metricName; this.filter = filter; @@ -50,5 +47,4 @@ public PrometheusLogicalMetricScan(String metricName, public R accept(LogicalPlanNodeVisitor visitor, C context) { return visitor.visitNode(this, context); } - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicalPlanOptimizerFactory.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicalPlanOptimizerFactory.java index 8a365b2786..ea14be0e0a 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicalPlanOptimizerFactory.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicalPlanOptimizerFactory.java @@ -5,7 +5,6 @@ package org.opensearch.sql.prometheus.planner.logical; - import java.util.Arrays; import lombok.experimental.UtilityClass; import org.opensearch.sql.planner.optimizer.LogicalPlanOptimizer; @@ -13,20 +12,14 @@ import org.opensearch.sql.prometheus.planner.logical.rules.MergeAggAndRelation; import org.opensearch.sql.prometheus.planner.logical.rules.MergeFilterAndRelation; -/** - * Prometheus storage engine specified logical plan optimizer. - */ +/** Prometheus storage engine specified logical plan optimizer. */ @UtilityClass public class PrometheusLogicalPlanOptimizerFactory { - /** - * Create Prometheus storage specified logical plan optimizer. - */ + /** Create Prometheus storage specified logical plan optimizer. */ public static LogicalPlanOptimizer create() { - return new LogicalPlanOptimizer(Arrays.asList( - new MergeFilterAndRelation(), - new MergeAggAndIndexScan(), - new MergeAggAndRelation() - )); + return new LogicalPlanOptimizer( + Arrays.asList( + new MergeFilterAndRelation(), new MergeAggAndIndexScan(), new MergeAggAndRelation())); } } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/rules/MergeAggAndIndexScan.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/rules/MergeAggAndIndexScan.java index 76bc6cc840..2594b74eb5 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/rules/MergeAggAndIndexScan.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/rules/MergeAggAndIndexScan.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.prometheus.planner.logical.rules; import static com.facebook.presto.matching.Pattern.typeOf; @@ -20,9 +19,7 @@ import org.opensearch.sql.prometheus.planner.logical.PrometheusLogicalMetricAgg; import org.opensearch.sql.prometheus.planner.logical.PrometheusLogicalMetricScan; -/** - * Merge Aggregation -- Relation to MetricScanAggregation. - */ +/** Merge Aggregation -- Relation to MetricScanAggregation. */ public class MergeAggAndIndexScan implements Rule { private final Capture capture; @@ -31,22 +28,18 @@ public class MergeAggAndIndexScan implements Rule { @Getter private final Pattern pattern; - /** - * Constructor of MergeAggAndIndexScan. - */ + /** Constructor of MergeAggAndIndexScan. */ public MergeAggAndIndexScan() { this.capture = Capture.newCapture(); - this.pattern = typeOf(LogicalAggregation.class) - .with(source().matching(typeOf(PrometheusLogicalMetricScan.class) - .capturedAs(capture))); + this.pattern = + typeOf(LogicalAggregation.class) + .with(source().matching(typeOf(PrometheusLogicalMetricScan.class).capturedAs(capture))); } @Override - public LogicalPlan apply(LogicalAggregation aggregation, - Captures captures) { + public LogicalPlan apply(LogicalAggregation aggregation, Captures captures) { PrometheusLogicalMetricScan indexScan = captures.get(capture); - return PrometheusLogicalMetricAgg - .builder() + return PrometheusLogicalMetricAgg.builder() .metricName(indexScan.getMetricName()) .filter(indexScan.getFilter()) .aggregatorList(aggregation.getAggregatorList()) diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/rules/MergeAggAndRelation.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/rules/MergeAggAndRelation.java index fa9b0c7206..e6170e41f9 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/rules/MergeAggAndRelation.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/rules/MergeAggAndRelation.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.prometheus.planner.logical.rules; import static com.facebook.presto.matching.Pattern.typeOf; @@ -20,9 +19,7 @@ import org.opensearch.sql.planner.optimizer.Rule; import org.opensearch.sql.prometheus.planner.logical.PrometheusLogicalMetricAgg; -/** - * Merge Aggregation -- Relation to IndexScanAggregation. - */ +/** Merge Aggregation -- Relation to IndexScanAggregation. */ public class MergeAggAndRelation implements Rule { private final Capture relationCapture; @@ -31,21 +28,18 @@ public class MergeAggAndRelation implements Rule { @Getter private final Pattern pattern; - /** - * Constructor of MergeAggAndRelation. - */ + /** Constructor of MergeAggAndRelation. */ public MergeAggAndRelation() { this.relationCapture = Capture.newCapture(); - this.pattern = typeOf(LogicalAggregation.class) - .with(source().matching(typeOf(LogicalRelation.class).capturedAs(relationCapture))); + this.pattern = + typeOf(LogicalAggregation.class) + .with(source().matching(typeOf(LogicalRelation.class).capturedAs(relationCapture))); } @Override - public LogicalPlan apply(LogicalAggregation aggregation, - Captures captures) { + public LogicalPlan apply(LogicalAggregation aggregation, Captures captures) { LogicalRelation relation = captures.get(relationCapture); - return PrometheusLogicalMetricAgg - .builder() + return PrometheusLogicalMetricAgg.builder() .metricName(relation.getRelationName()) .aggregatorList(aggregation.getAggregatorList()) .groupByList(aggregation.getGroupByList()) diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/rules/MergeFilterAndRelation.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/rules/MergeFilterAndRelation.java index a99eb695be..2013938d73 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/rules/MergeFilterAndRelation.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/planner/logical/rules/MergeFilterAndRelation.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.prometheus.planner.logical.rules; import static com.facebook.presto.matching.Pattern.typeOf; @@ -18,21 +17,18 @@ import org.opensearch.sql.planner.optimizer.Rule; import org.opensearch.sql.prometheus.planner.logical.PrometheusLogicalMetricScan; -/** - * Merge Filter -- Relation to LogicalMetricScan. - */ +/** Merge Filter -- Relation to LogicalMetricScan. */ public class MergeFilterAndRelation implements Rule { private final Capture relationCapture; private final Pattern pattern; - /** - * Constructor of MergeFilterAndRelation. - */ + /** Constructor of MergeFilterAndRelation. */ public MergeFilterAndRelation() { this.relationCapture = Capture.newCapture(); - this.pattern = typeOf(LogicalFilter.class) - .with(source().matching(typeOf(LogicalRelation.class).capturedAs(relationCapture))); + this.pattern = + typeOf(LogicalFilter.class) + .with(source().matching(typeOf(LogicalRelation.class).capturedAs(relationCapture))); } @Override @@ -41,11 +37,9 @@ public Pattern pattern() { } @Override - public LogicalPlan apply(LogicalFilter filter, - Captures captures) { + public LogicalPlan apply(LogicalFilter filter, Captures captures) { LogicalRelation relation = captures.get(relationCapture); - return PrometheusLogicalMetricScan - .builder() + return PrometheusLogicalMetricScan.builder() .metricName(relation.getRelationName()) .filter(filter.getCondition()) .build(); diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/request/PrometheusQueryExemplarsRequest.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/request/PrometheusQueryExemplarsRequest.java index 9cf3d41522..d4eea97c48 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/request/PrometheusQueryExemplarsRequest.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/request/PrometheusQueryExemplarsRequest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.prometheus.request; import lombok.AllArgsConstructor; @@ -12,9 +11,7 @@ import lombok.NoArgsConstructor; import lombok.ToString; -/** - * Prometheus metric query request. - */ +/** Prometheus metric query request. */ @EqualsAndHashCode @Data @ToString @@ -22,19 +19,12 @@ @NoArgsConstructor public class PrometheusQueryExemplarsRequest { - /** - * PromQL. - */ + /** PromQL. */ private String query; - /** - * startTime of the query. - */ + /** startTime of the query. */ private Long startTime; - /** - * endTime of the query. - */ + /** endTime of the query. */ private Long endTime; - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/request/PrometheusQueryRequest.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/request/PrometheusQueryRequest.java index 176a52a1d9..e24c27c52a 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/request/PrometheusQueryRequest.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/request/PrometheusQueryRequest.java @@ -3,20 +3,15 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.prometheus.request; import lombok.AllArgsConstructor; import lombok.Data; import lombok.EqualsAndHashCode; -import lombok.Getter; import lombok.NoArgsConstructor; -import lombok.Setter; import lombok.ToString; -/** - * Prometheus metric query request. - */ +/** Prometheus metric query request. */ @EqualsAndHashCode @Data @ToString @@ -24,24 +19,15 @@ @NoArgsConstructor public class PrometheusQueryRequest { - /** - * PromQL. - */ + /** PromQL. */ private String promQl; - /** - * startTime of the query. - */ + /** startTime of the query. */ private Long startTime; - /** - * endTime of the query. - */ + /** endTime of the query. */ private Long endTime; - /** - * step is the resolution required between startTime and endTime. - */ + /** step is the resolution required between startTime and endTime. */ private String step; - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/request/system/PrometheusDescribeMetricRequest.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/request/system/PrometheusDescribeMetricRequest.java index 2e0d46b3e8..b6a4e3c49c 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/request/system/PrometheusDescribeMetricRequest.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/request/system/PrometheusDescribeMetricRequest.java @@ -5,7 +5,6 @@ * */ - package org.opensearch.sql.prometheus.request.system; import static org.opensearch.sql.data.model.ExprValueUtils.stringValue; @@ -31,60 +30,63 @@ import org.opensearch.sql.prometheus.storage.PrometheusMetricDefaultSchema; /** - * Describe Metric metadata request. - * This is triggered in case of both query range table function and relation. - * In case of table function metric name is null. + * Describe Metric metadata request. This is triggered in case of both query range table function + * and relation. In case of table function metric name is null. */ @ToString(onlyExplicitlyIncluded = true) public class PrometheusDescribeMetricRequest implements PrometheusSystemRequest { private final PrometheusClient prometheusClient; - @ToString.Include - private final String metricName; + @ToString.Include private final String metricName; private final DataSourceSchemaName dataSourceSchemaName; private static final Logger LOG = LogManager.getLogger(); /** - * Constructor for Prometheus Describe Metric Request. - * In case of pass through queries like query_range function, - * metric names are optional. + * Constructor for Prometheus Describe Metric Request. In case of pass through queries like + * query_range function, metric names are optional. * - * @param prometheusClient prometheusClient. + * @param prometheusClient prometheusClient. * @param dataSourceSchemaName dataSourceSchemaName. - * @param metricName metricName. + * @param metricName metricName. */ - public PrometheusDescribeMetricRequest(PrometheusClient prometheusClient, - DataSourceSchemaName dataSourceSchemaName, - @NonNull String metricName) { + public PrometheusDescribeMetricRequest( + PrometheusClient prometheusClient, + DataSourceSchemaName dataSourceSchemaName, + @NonNull String metricName) { this.prometheusClient = prometheusClient; this.metricName = metricName; this.dataSourceSchemaName = dataSourceSchemaName; } - /** - * Get the mapping of field and type. - * Returns labels and default schema fields. + * Get the mapping of field and type. Returns labels and default schema fields. * * @return mapping of field and type. */ public Map getFieldTypes() { Map fieldTypes = new HashMap<>(); - AccessController.doPrivileged((PrivilegedAction>) () -> { - try { - prometheusClient.getLabels(metricName) - .forEach(label -> fieldTypes.put(label, ExprCoreType.STRING)); - } catch (IOException e) { - LOG.error("Error while fetching labels for {} from prometheus: {}", - metricName, e.getMessage()); - throw new RuntimeException(String.format("Error while fetching labels " - + "for %s from prometheus: %s", metricName, e.getMessage())); - } - return null; - }); + AccessController.doPrivileged( + (PrivilegedAction>) + () -> { + try { + prometheusClient + .getLabels(metricName) + .forEach(label -> fieldTypes.put(label, ExprCoreType.STRING)); + } catch (IOException e) { + LOG.error( + "Error while fetching labels for {} from prometheus: {}", + metricName, + e.getMessage()); + throw new RuntimeException( + String.format( + "Error while fetching labels " + "for %s from prometheus: %s", + metricName, e.getMessage())); + } + return null; + }); fieldTypes.putAll(PrometheusMetricDefaultSchema.DEFAULT_MAPPING.getMapping()); return fieldTypes; } @@ -93,14 +95,17 @@ public Map getFieldTypes() { public List search() { List results = new ArrayList<>(); for (Map.Entry entry : getFieldTypes().entrySet()) { - results.add(row(entry.getKey(), entry.getValue().legacyTypeName().toLowerCase(), - dataSourceSchemaName)); + results.add( + row( + entry.getKey(), + entry.getValue().legacyTypeName().toLowerCase(), + dataSourceSchemaName)); } return results; } - private ExprTupleValue row(String fieldName, String fieldType, - DataSourceSchemaName dataSourceSchemaName) { + private ExprTupleValue row( + String fieldName, String fieldType, DataSourceSchemaName dataSourceSchemaName) { LinkedHashMap valueMap = new LinkedHashMap<>(); valueMap.put("TABLE_CATALOG", stringValue(dataSourceSchemaName.getDataSourceName())); valueMap.put("TABLE_SCHEMA", stringValue(dataSourceSchemaName.getSchemaName())); diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/request/system/PrometheusListMetricsRequest.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/request/system/PrometheusListMetricsRequest.java index f5d2a44340..0e6c2bb2c6 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/request/system/PrometheusListMetricsRequest.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/request/system/PrometheusListMetricsRequest.java @@ -34,28 +34,33 @@ public class PrometheusListMetricsRequest implements PrometheusSystemRequest { private static final Logger LOG = LogManager.getLogger(); - @Override public List search() { - return AccessController.doPrivileged((PrivilegedAction>) () -> { - try { - Map> result = prometheusClient.getAllMetrics(); - return result.keySet() - .stream() - .map(x -> { - MetricMetadata metricMetadata = result.get(x).get(0); - return row(x, metricMetadata.getType(), - metricMetadata.getUnit(), metricMetadata.getHelp()); - }) - .collect(Collectors.toList()); - } catch (IOException e) { - LOG.error("Error while fetching metric list for from prometheus: {}", - e.getMessage()); - throw new RuntimeException(String.format("Error while fetching metric list " - + "for from prometheus: %s", e.getMessage())); - } - }); - + return AccessController.doPrivileged( + (PrivilegedAction>) + () -> { + try { + Map> result = prometheusClient.getAllMetrics(); + return result.keySet().stream() + .map( + x -> { + MetricMetadata metricMetadata = result.get(x).get(0); + return row( + x, + metricMetadata.getType(), + metricMetadata.getUnit(), + metricMetadata.getHelp()); + }) + .collect(Collectors.toList()); + } catch (IOException e) { + LOG.error( + "Error while fetching metric list for from prometheus: {}", e.getMessage()); + throw new RuntimeException( + String.format( + "Error while fetching metric list " + "for from prometheus: %s", + e.getMessage())); + } + }); } private ExprTupleValue row(String metricName, String tableType, String unit, String help) { diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/request/system/PrometheusSystemRequest.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/request/system/PrometheusSystemRequest.java index e68ad22c30..6972a9390c 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/request/system/PrometheusSystemRequest.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/request/system/PrometheusSystemRequest.java @@ -10,9 +10,7 @@ import java.util.List; import org.opensearch.sql.data.model.ExprValue; -/** - * Prometheus system request query to get metadata Info. - */ +/** Prometheus system request query to get metadata Info. */ public interface PrometheusSystemRequest { /** @@ -21,5 +19,4 @@ public interface PrometheusSystemRequest { * @return list of ExprValue. */ List search(); - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/response/PrometheusResponse.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/response/PrometheusResponse.java index 2c75588e4c..339d882f5a 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/response/PrometheusResponse.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/response/PrometheusResponse.java @@ -7,7 +7,6 @@ import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; import static org.opensearch.sql.data.type.ExprCoreType.LONG; -import static org.opensearch.sql.prometheus.data.constants.PrometheusFieldConstants.LABELS; import static org.opensearch.sql.prometheus.data.constants.PrometheusFieldConstants.MATRIX_KEY; import static org.opensearch.sql.prometheus.data.constants.PrometheusFieldConstants.METRIC_KEY; import static org.opensearch.sql.prometheus.data.constants.PrometheusFieldConstants.RESULT_KEY; @@ -43,13 +42,12 @@ public class PrometheusResponse implements Iterable { /** * Constructor. * - * @param responseObject Prometheus responseObject. - * @param prometheusResponseFieldNames data model which - * contains field names for the metric measurement - * and timestamp fieldName. + * @param responseObject Prometheus responseObject. + * @param prometheusResponseFieldNames data model which contains field names for the metric + * measurement and timestamp fieldName. */ - public PrometheusResponse(JSONObject responseObject, - PrometheusResponseFieldNames prometheusResponseFieldNames) { + public PrometheusResponse( + JSONObject responseObject, PrometheusResponseFieldNames prometheusResponseFieldNames) { this.responseObject = responseObject; this.prometheusResponseFieldNames = prometheusResponseFieldNames; } @@ -67,18 +65,22 @@ public Iterator iterator() { for (int j = 0; j < values.length(); j++) { LinkedHashMap linkedHashMap = new LinkedHashMap<>(); JSONArray val = values.getJSONArray(j); - linkedHashMap.put(prometheusResponseFieldNames.getTimestampFieldName(), + linkedHashMap.put( + prometheusResponseFieldNames.getTimestampFieldName(), new ExprTimestampValue(Instant.ofEpochMilli((long) (val.getDouble(0) * 1000)))); - linkedHashMap.put(prometheusResponseFieldNames.getValueFieldName(), getValue(val, 1, - prometheusResponseFieldNames.getValueType())); + linkedHashMap.put( + prometheusResponseFieldNames.getValueFieldName(), + getValue(val, 1, prometheusResponseFieldNames.getValueType())); insertLabels(linkedHashMap, metric); result.add(new ExprTupleValue(linkedHashMap)); } } } else { - throw new RuntimeException(String.format("Unexpected Result Type: %s during Prometheus " - + "Response Parsing. 'matrix' resultType is expected", - responseObject.getString(RESULT_TYPE_KEY))); + throw new RuntimeException( + String.format( + "Unexpected Result Type: %s during Prometheus " + + "Response Parsing. 'matrix' resultType is expected", + responseObject.getString(RESULT_TYPE_KEY))); } return result.iterator(); } @@ -104,12 +106,11 @@ private String getKey(String key) { } else { return this.prometheusResponseFieldNames.getGroupByList().stream() .filter(expression -> expression.getDelegated() instanceof ReferenceExpression) - .filter(expression - -> ((ReferenceExpression) expression.getDelegated()).getAttr().equals(key)) + .filter( + expression -> ((ReferenceExpression) expression.getDelegated()).getAttr().equals(key)) .findFirst() .map(NamedExpression::getName) .orElse(key); } } - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusMetricDefaultSchema.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusMetricDefaultSchema.java index 790189d903..f0933eee9d 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusMetricDefaultSchema.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusMetricDefaultSchema.java @@ -20,12 +20,11 @@ @Getter @RequiredArgsConstructor public enum PrometheusMetricDefaultSchema { - - DEFAULT_MAPPING(new ImmutableMap.Builder() - .put(TIMESTAMP, ExprCoreType.TIMESTAMP) - .put(VALUE, ExprCoreType.DOUBLE) - .build()); + DEFAULT_MAPPING( + new ImmutableMap.Builder() + .put(TIMESTAMP, ExprCoreType.TIMESTAMP) + .put(VALUE, ExprCoreType.DOUBLE) + .build()); private final Map mapping; - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusMetricScan.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusMetricScan.java index 7f75cb3c07..598e388914 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusMetricScan.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusMetricScan.java @@ -23,26 +23,19 @@ import org.opensearch.sql.prometheus.storage.model.PrometheusResponseFieldNames; import org.opensearch.sql.storage.TableScanOperator; -/** - * Prometheus metric scan operator. - */ +/** Prometheus metric scan operator. */ @EqualsAndHashCode(onlyExplicitlyIncluded = true, callSuper = false) @ToString(onlyExplicitlyIncluded = true) public class PrometheusMetricScan extends TableScanOperator { private final PrometheusClient prometheusClient; - @EqualsAndHashCode.Include - @Getter - @Setter - @ToString.Include + @EqualsAndHashCode.Include @Getter @Setter @ToString.Include private PrometheusQueryRequest request; private Iterator iterator; - @Setter - private PrometheusResponseFieldNames prometheusResponseFieldNames; - + @Setter private PrometheusResponseFieldNames prometheusResponseFieldNames; private static final Logger LOG = LogManager.getLogger(); @@ -60,17 +53,25 @@ public PrometheusMetricScan(PrometheusClient prometheusClient) { @Override public void open() { super.open(); - this.iterator = AccessController.doPrivileged((PrivilegedAction>) () -> { - try { - JSONObject responseObject = prometheusClient.queryRange( - request.getPromQl(), - request.getStartTime(), request.getEndTime(), request.getStep()); - return new PrometheusResponse(responseObject, prometheusResponseFieldNames).iterator(); - } catch (IOException e) { - LOG.error(e.getMessage()); - throw new RuntimeException("Error fetching data from prometheus server. " + e.getMessage()); - } - }); + this.iterator = + AccessController.doPrivileged( + (PrivilegedAction>) + () -> { + try { + JSONObject responseObject = + prometheusClient.queryRange( + request.getPromQl(), + request.getStartTime(), + request.getEndTime(), + request.getStep()); + return new PrometheusResponse(responseObject, prometheusResponseFieldNames) + .iterator(); + } catch (IOException e) { + LOG.error(e.getMessage()); + throw new RuntimeException( + "Error fetching data from prometheus server. " + e.getMessage()); + } + }); } @Override diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusMetricTable.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusMetricTable.java index 4844e1f6db..1124e93608 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusMetricTable.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusMetricTable.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.prometheus.storage; import static org.opensearch.sql.prometheus.data.constants.PrometheusFieldConstants.LABELS; @@ -26,40 +25,30 @@ import org.opensearch.sql.storage.read.TableScanBuilder; /** - * Prometheus table (metric) implementation. - * This can be constructed from a metric Name - * or from PrometheusQueryRequest In case of query_range table function. + * Prometheus table (metric) implementation. This can be constructed from a metric Name or from + * PrometheusQueryRequest In case of query_range table function. */ public class PrometheusMetricTable implements Table { private final PrometheusClient prometheusClient; - @Getter - private final String metricName; - - @Getter - private final PrometheusQueryRequest prometheusQueryRequest; + @Getter private final String metricName; + @Getter private final PrometheusQueryRequest prometheusQueryRequest; - /** - * The cached mapping of field and type in index. - */ + /** The cached mapping of field and type in index. */ private Map cachedFieldTypes = null; - /** - * Constructor only with metric name. - */ + /** Constructor only with metric name. */ public PrometheusMetricTable(PrometheusClient prometheusService, @Nonnull String metricName) { this.prometheusClient = prometheusService; this.metricName = metricName; this.prometheusQueryRequest = null; } - /** - * Constructor for entire promQl Request. - */ - public PrometheusMetricTable(PrometheusClient prometheusService, - @Nonnull PrometheusQueryRequest prometheusQueryRequest) { + /** Constructor for entire promQl Request. */ + public PrometheusMetricTable( + PrometheusClient prometheusService, @Nonnull PrometheusQueryRequest prometheusQueryRequest) { this.prometheusClient = prometheusService; this.metricName = null; this.prometheusQueryRequest = prometheusQueryRequest; @@ -67,14 +56,12 @@ public PrometheusMetricTable(PrometheusClient prometheusService, @Override public boolean exists() { - throw new UnsupportedOperationException( - "Prometheus metric exists operation is not supported"); + throw new UnsupportedOperationException("Prometheus metric exists operation is not supported"); } @Override public void create(Map schema) { - throw new UnsupportedOperationException( - "Prometheus metric create operation is not supported"); + throw new UnsupportedOperationException("Prometheus metric create operation is not supported"); } @Override @@ -82,11 +69,10 @@ public Map getFieldTypes() { if (cachedFieldTypes == null) { if (metricName != null) { cachedFieldTypes = - new PrometheusDescribeMetricRequest(prometheusClient, null, - metricName).getFieldTypes(); + new PrometheusDescribeMetricRequest(prometheusClient, null, metricName).getFieldTypes(); } else { - cachedFieldTypes = new HashMap<>(PrometheusMetricDefaultSchema.DEFAULT_MAPPING - .getMapping()); + cachedFieldTypes = + new HashMap<>(PrometheusMetricDefaultSchema.DEFAULT_MAPPING.getMapping()); cachedFieldTypes.put(LABELS, ExprCoreType.STRING); } } @@ -95,8 +81,7 @@ public Map getFieldTypes() { @Override public PhysicalPlan implement(LogicalPlan plan) { - PrometheusMetricScan metricScan = - new PrometheusMetricScan(prometheusClient); + PrometheusMetricScan metricScan = new PrometheusMetricScan(prometheusClient); return plan.accept(new PrometheusDefaultImplementor(), metricScan); } @@ -105,8 +90,8 @@ public LogicalPlan optimize(LogicalPlan plan) { return PrometheusLogicalPlanOptimizerFactory.create().optimize(plan); } - //Only handling query_range function for now. - //we need to move PPL implementations to ScanBuilder in future. + // Only handling query_range function for now. + // we need to move PPL implementations to ScanBuilder in future. @Override public TableScanBuilder createScanBuilder() { if (metricName == null) { diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusStorageEngine.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusStorageEngine.java index e19b369a97..29fc15e2d0 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusStorageEngine.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusStorageEngine.java @@ -12,7 +12,6 @@ import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; import lombok.RequiredArgsConstructor; import org.opensearch.sql.DataSourceSchemaName; import org.opensearch.sql.exception.SemanticCheckException; @@ -25,10 +24,7 @@ import org.opensearch.sql.storage.Table; import org.opensearch.sql.utils.SystemIndexUtils; - -/** - * Prometheus storage engine implementation. - */ +/** Prometheus storage engine implementation. */ @RequiredArgsConstructor public class PrometheusStorageEngine implements StorageEngine { @@ -53,16 +49,14 @@ public Table getTable(DataSourceSchemaName dataSourceSchemaName, String tableNam } } - private Table resolveInformationSchemaTable(DataSourceSchemaName dataSourceSchemaName, - String tableName) { + private Table resolveInformationSchemaTable( + DataSourceSchemaName dataSourceSchemaName, String tableName) { if (SystemIndexUtils.TABLE_NAME_FOR_TABLES_INFO.equals(tableName)) { - return new PrometheusSystemTable(prometheusClient, - dataSourceSchemaName, SystemIndexUtils.TABLE_INFO); + return new PrometheusSystemTable( + prometheusClient, dataSourceSchemaName, SystemIndexUtils.TABLE_INFO); } else { throw new SemanticCheckException( String.format("Information Schema doesn't contain %s table", tableName)); } } - - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusStorageFactory.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusStorageFactory.java index b3ecd25af3..edae263ce3 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusStorageFactory.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusStorageFactory.java @@ -56,23 +56,20 @@ public DataSourceType getDataSourceType() { @Override public DataSource createDataSource(DataSourceMetadata metadata) { return new DataSource( - metadata.getName(), - DataSourceType.PROMETHEUS, - getStorageEngine(metadata.getProperties())); + metadata.getName(), DataSourceType.PROMETHEUS, getStorageEngine(metadata.getProperties())); } - - //Need to refactor to a separate Validator class. + // Need to refactor to a separate Validator class. private void validateDataSourceConfigProperties(Map dataSourceMetadataConfig) throws URISyntaxException { if (dataSourceMetadataConfig.get(AUTH_TYPE) != null) { - AuthenticationType authenticationType - = AuthenticationType.get(dataSourceMetadataConfig.get(AUTH_TYPE)); + AuthenticationType authenticationType = + AuthenticationType.get(dataSourceMetadataConfig.get(AUTH_TYPE)); if (AuthenticationType.BASICAUTH.equals(authenticationType)) { validateMissingFields(dataSourceMetadataConfig, Set.of(URI, USERNAME, PASSWORD)); } else if (AuthenticationType.AWSSIGV4AUTH.equals(authenticationType)) { - validateMissingFields(dataSourceMetadataConfig, Set.of(URI, ACCESS_KEY, SECRET_KEY, - REGION)); + validateMissingFields( + dataSourceMetadataConfig, Set.of(URI, ACCESS_KEY, SECRET_KEY, REGION)); } } else { validateMissingFields(dataSourceMetadataConfig, Set.of(URI)); @@ -83,20 +80,21 @@ private void validateDataSourceConfigProperties(Map dataSourceMe StorageEngine getStorageEngine(Map requiredConfig) { PrometheusClient prometheusClient; prometheusClient = - AccessController.doPrivileged((PrivilegedAction) () -> { - try { - validateDataSourceConfigProperties(requiredConfig); - return new PrometheusClientImpl(getHttpClient(requiredConfig), - new URI(requiredConfig.get(URI))); - } catch (URISyntaxException e) { - throw new IllegalArgumentException( - String.format("Invalid URI in prometheus properties: %s", e.getMessage())); - } - }); + AccessController.doPrivileged( + (PrivilegedAction) + () -> { + try { + validateDataSourceConfigProperties(requiredConfig); + return new PrometheusClientImpl( + getHttpClient(requiredConfig), new URI(requiredConfig.get(URI))); + } catch (URISyntaxException e) { + throw new IllegalArgumentException( + String.format("Invalid URI in prometheus properties: %s", e.getMessage())); + } + }); return new PrometheusStorageEngine(prometheusClient); } - private OkHttpClient getHttpClient(Map config) { OkHttpClient.Builder okHttpClient = new OkHttpClient.Builder(); okHttpClient.callTimeout(1, TimeUnit.MINUTES); @@ -104,16 +102,19 @@ private OkHttpClient getHttpClient(Map config) { if (config.get(AUTH_TYPE) != null) { AuthenticationType authenticationType = AuthenticationType.get(config.get(AUTH_TYPE)); if (AuthenticationType.BASICAUTH.equals(authenticationType)) { - okHttpClient.addInterceptor(new BasicAuthenticationInterceptor(config.get(USERNAME), - config.get(PASSWORD))); + okHttpClient.addInterceptor( + new BasicAuthenticationInterceptor(config.get(USERNAME), config.get(PASSWORD))); } else if (AuthenticationType.AWSSIGV4AUTH.equals(authenticationType)) { - okHttpClient.addInterceptor(new AwsSigningInterceptor( - new AWSStaticCredentialsProvider( - new BasicAWSCredentials(config.get(ACCESS_KEY), config.get(SECRET_KEY))), - config.get(REGION), "aps")); + okHttpClient.addInterceptor( + new AwsSigningInterceptor( + new AWSStaticCredentialsProvider( + new BasicAWSCredentials(config.get(ACCESS_KEY), config.get(SECRET_KEY))), + config.get(REGION), + "aps")); } else { throw new IllegalArgumentException( - String.format("AUTH Type : %s is not supported with Prometheus Connector", + String.format( + "AUTH Type : %s is not supported with Prometheus Connector", config.get(AUTH_TYPE))); } } @@ -132,13 +133,14 @@ private void validateMissingFields(Map config, Set field } StringBuilder errorStringBuilder = new StringBuilder(); if (missingFields.size() > 0) { - errorStringBuilder.append(String.format( - "Missing %s fields in the Prometheus connector properties.", missingFields)); + errorStringBuilder.append( + String.format( + "Missing %s fields in the Prometheus connector properties.", missingFields)); } if (invalidLengthFields.size() > 0) { - errorStringBuilder.append(String.format( - "Fields %s exceeds more than 1000 characters.", invalidLengthFields)); + errorStringBuilder.append( + String.format("Fields %s exceeds more than 1000 characters.", invalidLengthFields)); } if (errorStringBuilder.length() > 0) { throw new IllegalArgumentException(errorStringBuilder.toString()); @@ -148,8 +150,9 @@ private void validateMissingFields(Map config, Set field private void validateURI(Map config) throws URISyntaxException { URI uri = new URI(config.get(URI)); String host = uri.getHost(); - if (host == null || (!(DomainValidator.getInstance().isValid(host) - || DomainValidator.getInstance().isValidLocalTld(host)))) { + if (host == null + || (!(DomainValidator.getInstance().isValid(host) + || DomainValidator.getInstance().isValidLocalTld(host)))) { throw new IllegalArgumentException( String.format("Invalid hostname in the uri: %s", config.get(URI))); } else { @@ -158,10 +161,10 @@ private void validateURI(Map config) throws URISyntaxException { Matcher matcher = allowHostsPattern.matcher(host); if (!matcher.matches()) { throw new IllegalArgumentException( - String.format("Disallowed hostname in the uri: %s. Validate with %s config", + String.format( + "Disallowed hostname in the uri: %s. Validate with %s config", config.get(URI), Settings.Key.DATASOURCES_URI_ALLOWHOSTS.getKeyValue())); } } } - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/QueryExemplarsTable.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/QueryExemplarsTable.java index dcb87c2cce..9ce8ae85fb 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/QueryExemplarsTable.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/QueryExemplarsTable.java @@ -20,20 +20,16 @@ import org.opensearch.sql.storage.read.TableScanBuilder; /** - * This is {@link Table} for querying exemplars in prometheus Table. - * Since {@link PrometheusMetricTable} is overloaded with query_range and normal - * PPL metric queries. Created a separate table for handling - * {@link PrometheusQueryExemplarsRequest} + * This is {@link Table} for querying exemplars in prometheus Table. Since {@link + * PrometheusMetricTable} is overloaded with query_range and normal PPL metric queries. Created a + * separate table for handling {@link PrometheusQueryExemplarsRequest} */ @RequiredArgsConstructor public class QueryExemplarsTable implements Table { - @Getter - private final PrometheusClient prometheusClient; - - @Getter - private final PrometheusQueryExemplarsRequest exemplarsRequest; + @Getter private final PrometheusClient prometheusClient; + @Getter private final PrometheusQueryExemplarsRequest exemplarsRequest; @Override public Map getFieldTypes() { @@ -49,5 +45,4 @@ public PhysicalPlan implement(LogicalPlan plan) { public TableScanBuilder createScanBuilder() { return new QueryExemplarsFunctionTableScanBuilder(prometheusClient, exemplarsRequest); } - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/implementor/PrometheusDefaultImplementor.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/implementor/PrometheusDefaultImplementor.java index 6d426d13c8..f83a97dc06 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/implementor/PrometheusDefaultImplementor.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/implementor/PrometheusDefaultImplementor.java @@ -29,13 +29,9 @@ import org.opensearch.sql.prometheus.storage.querybuilder.StepParameterResolver; import org.opensearch.sql.prometheus.storage.querybuilder.TimeRangeParametersResolver; -/** - * Default Implementor of Logical plan for prometheus. - */ +/** Default Implementor of Logical plan for prometheus. */ @RequiredArgsConstructor -public class PrometheusDefaultImplementor - extends DefaultImplementor { - +public class PrometheusDefaultImplementor extends DefaultImplementor { @Override public PhysicalPlan visitNode(LogicalPlan plan, PrometheusMetricScan context) { @@ -44,62 +40,64 @@ public PhysicalPlan visitNode(LogicalPlan plan, PrometheusMetricScan context) { } else if (plan instanceof PrometheusLogicalMetricAgg) { return visitIndexAggregation((PrometheusLogicalMetricAgg) plan, context); } else { - throw new IllegalStateException(StringUtils.format("unexpected plan node type %s", - plan.getClass())); + throw new IllegalStateException( + StringUtils.format("unexpected plan node type %s", plan.getClass())); } } - /** - * Implement PrometheusLogicalMetricScan. - */ - public PhysicalPlan visitIndexScan(PrometheusLogicalMetricScan node, - PrometheusMetricScan context) { + /** Implement PrometheusLogicalMetricScan. */ + public PhysicalPlan visitIndexScan( + PrometheusLogicalMetricScan node, PrometheusMetricScan context) { String query = SeriesSelectionQueryBuilder.build(node.getMetricName(), node.getFilter()); context.getRequest().setPromQl(query); setTimeRangeParameters(node.getFilter(), context); - context.getRequest() - .setStep(StepParameterResolver.resolve(context.getRequest().getStartTime(), - context.getRequest().getEndTime(), null)); + context + .getRequest() + .setStep( + StepParameterResolver.resolve( + context.getRequest().getStartTime(), context.getRequest().getEndTime(), null)); return context; } - /** - * Implement PrometheusLogicalMetricAgg. - */ - public PhysicalPlan visitIndexAggregation(PrometheusLogicalMetricAgg node, - PrometheusMetricScan context) { + /** Implement PrometheusLogicalMetricAgg. */ + public PhysicalPlan visitIndexAggregation( + PrometheusLogicalMetricAgg node, PrometheusMetricScan context) { setTimeRangeParameters(node.getFilter(), context); - context.getRequest() - .setStep(StepParameterResolver.resolve(context.getRequest().getStartTime(), - context.getRequest().getEndTime(), node.getGroupByList())); + context + .getRequest() + .setStep( + StepParameterResolver.resolve( + context.getRequest().getStartTime(), + context.getRequest().getEndTime(), + node.getGroupByList())); String step = context.getRequest().getStep(); - String seriesSelectionQuery - = SeriesSelectionQueryBuilder.build(node.getMetricName(), node.getFilter()); + String seriesSelectionQuery = + SeriesSelectionQueryBuilder.build(node.getMetricName(), node.getFilter()); - String aggregateQuery - = AggregationQueryBuilder.build(node.getAggregatorList(), - node.getGroupByList()); + String aggregateQuery = + AggregationQueryBuilder.build(node.getAggregatorList(), node.getGroupByList()); String finalQuery = String.format(aggregateQuery, seriesSelectionQuery + "[" + step + "]"); context.getRequest().setPromQl(finalQuery); - //Since prometheus response doesn't have any fieldNames in its output. - //the field names are sent to PrometheusResponse constructor via context. + // Since prometheus response doesn't have any fieldNames in its output. + // the field names are sent to PrometheusResponse constructor via context. setPrometheusResponseFieldNames(node, context); return context; } @Override - public PhysicalPlan visitRelation(LogicalRelation node, - PrometheusMetricScan context) { + public PhysicalPlan visitRelation(LogicalRelation node, PrometheusMetricScan context) { PrometheusMetricTable prometheusMetricTable = (PrometheusMetricTable) node.getTable(); String query = SeriesSelectionQueryBuilder.build(node.getRelationName(), null); context.getRequest().setPromQl(query); setTimeRangeParameters(null, context); - context.getRequest() - .setStep(StepParameterResolver.resolve(context.getRequest().getStartTime(), - context.getRequest().getEndTime(), null)); + context + .getRequest() + .setStep( + StepParameterResolver.resolve( + context.getRequest().getStartTime(), context.getRequest().getEndTime(), null)); return context; } @@ -110,8 +108,8 @@ private void setTimeRangeParameters(Expression filter, PrometheusMetricScan cont context.getRequest().setEndTime(timeRange.getSecond()); } - private void setPrometheusResponseFieldNames(PrometheusLogicalMetricAgg node, - PrometheusMetricScan context) { + private void setPrometheusResponseFieldNames( + PrometheusLogicalMetricAgg node, PrometheusMetricScan context) { Optional spanExpression = getSpanExpression(node.getGroupByList()); if (spanExpression.isEmpty()) { throw new RuntimeException( @@ -133,6 +131,4 @@ private Optional getSpanExpression(List namedE .filter(expression -> expression.getDelegated() instanceof SpanExpression) .findFirst(); } - - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/model/PrometheusResponseFieldNames.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/model/PrometheusResponseFieldNames.java index d3a6ef184f..303ace7906 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/model/PrometheusResponseFieldNames.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/model/PrometheusResponseFieldNames.java @@ -17,7 +17,6 @@ import org.opensearch.sql.data.type.ExprType; import org.opensearch.sql.expression.NamedExpression; - @Getter @Setter public class PrometheusResponseFieldNames { @@ -26,5 +25,4 @@ public class PrometheusResponseFieldNames { private ExprType valueType = DOUBLE; private String timestampFieldName = TIMESTAMP; private List groupByList; - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/model/QueryRangeParameters.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/model/QueryRangeParameters.java index 86ca99cea8..02187c5662 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/model/QueryRangeParameters.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/model/QueryRangeParameters.java @@ -21,5 +21,4 @@ public class QueryRangeParameters { private Long start; private Long end; private String step; - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/AggregationQueryBuilder.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/AggregationQueryBuilder.java index 76c8c6872e..540e2d8cf4 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/AggregationQueryBuilder.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/AggregationQueryBuilder.java @@ -7,12 +7,10 @@ package org.opensearch.sql.prometheus.storage.querybuilder; -import java.sql.Ref; import java.util.List; import java.util.Set; import java.util.stream.Collectors; import lombok.NoArgsConstructor; -import org.apache.commons.lang3.StringUtils; import org.opensearch.sql.expression.NamedExpression; import org.opensearch.sql.expression.ReferenceExpression; import org.opensearch.sql.expression.aggregation.NamedAggregator; @@ -20,49 +18,51 @@ import org.opensearch.sql.expression.span.SpanExpression; /** - * This class builds aggregation query for the given stats commands. - * In the generated query a placeholder(%s) is added in place of metric selection query - * and later replaced by metric selection query. + * This class builds aggregation query for the given stats commands. In the generated query a + * placeholder(%s) is added in place of metric selection query and later replaced by metric + * selection query. */ @NoArgsConstructor public class AggregationQueryBuilder { - private static final Set allowedStatsFunctions = Set.of( - BuiltinFunctionName.MAX.getName().getFunctionName(), - BuiltinFunctionName.MIN.getName().getFunctionName(), - BuiltinFunctionName.COUNT.getName().getFunctionName(), - BuiltinFunctionName.SUM.getName().getFunctionName(), - BuiltinFunctionName.AVG.getName().getFunctionName() - ); - + private static final Set allowedStatsFunctions = + Set.of( + BuiltinFunctionName.MAX.getName().getFunctionName(), + BuiltinFunctionName.MIN.getName().getFunctionName(), + BuiltinFunctionName.COUNT.getName().getFunctionName(), + BuiltinFunctionName.SUM.getName().getFunctionName(), + BuiltinFunctionName.AVG.getName().getFunctionName()); /** * Build Aggregation query from series selector query from expression. * * @return query string. */ - public static String build(List namedAggregatorList, - List groupByList) { + public static String build( + List namedAggregatorList, List groupByList) { if (namedAggregatorList.size() > 1) { throw new RuntimeException( "Prometheus Catalog doesn't multiple aggregations in stats command"); } - if (!allowedStatsFunctions - .contains(namedAggregatorList.get(0).getFunctionName().getFunctionName())) { - throw new RuntimeException(String.format( - "Prometheus Catalog only supports %s aggregations.", allowedStatsFunctions)); + if (!allowedStatsFunctions.contains( + namedAggregatorList.get(0).getFunctionName().getFunctionName())) { + throw new RuntimeException( + String.format( + "Prometheus Catalog only supports %s aggregations.", allowedStatsFunctions)); } StringBuilder aggregateQuery = new StringBuilder(); - aggregateQuery.append(namedAggregatorList.get(0).getFunctionName().getFunctionName()) + aggregateQuery + .append(namedAggregatorList.get(0).getFunctionName().getFunctionName()) .append(" "); if (groupByList != null && !groupByList.isEmpty()) { - groupByList = groupByList.stream() - .filter(expression -> !(expression.getDelegated() instanceof SpanExpression)) - .collect(Collectors.toList()); + groupByList = + groupByList.stream() + .filter(expression -> !(expression.getDelegated() instanceof SpanExpression)) + .collect(Collectors.toList()); if (groupByList.size() > 0) { aggregateQuery.append("by("); aggregateQuery.append( @@ -80,5 +80,4 @@ public static String build(List namedAggregatorList, .append("(%s))"); return aggregateQuery.toString(); } - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/SeriesSelectionQueryBuilder.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/SeriesSelectionQueryBuilder.java index 461b5341f8..d824fcb5b3 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/SeriesSelectionQueryBuilder.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/SeriesSelectionQueryBuilder.java @@ -7,7 +7,6 @@ package org.opensearch.sql.prometheus.storage.querybuilder; - import static org.opensearch.sql.prometheus.data.constants.PrometheusFieldConstants.TIMESTAMP; import java.util.stream.Collectors; @@ -19,14 +18,10 @@ import org.opensearch.sql.expression.ReferenceExpression; import org.opensearch.sql.expression.function.BuiltinFunctionName; -/** - * This class builds metric selection query from the filter condition - * and metric name. - */ +/** This class builds metric selection query from the filter condition and metric name. */ @NoArgsConstructor public class SeriesSelectionQueryBuilder { - /** * Build Prometheus series selector query from expression. * @@ -35,8 +30,8 @@ public class SeriesSelectionQueryBuilder { */ public static String build(String metricName, Expression filterCondition) { if (filterCondition != null) { - SeriesSelectionExpressionNodeVisitor seriesSelectionExpressionNodeVisitor - = new SeriesSelectionExpressionNodeVisitor(); + SeriesSelectionExpressionNodeVisitor seriesSelectionExpressionNodeVisitor = + new SeriesSelectionExpressionNodeVisitor(); String selectorQuery = filterCondition.accept(seriesSelectionExpressionNodeVisitor, null); if (selectorQuery != null) { return metricName + "{" + selectorQuery + "}"; @@ -54,9 +49,9 @@ public String visitFunction(FunctionExpression func, Object context) { .filter(StringUtils::isNotEmpty) .collect(Collectors.joining(" , ")); } else if ((BuiltinFunctionName.LTE.getName().equals(func.getFunctionName()) - || BuiltinFunctionName.GTE.getName().equals(func.getFunctionName()) - || BuiltinFunctionName.LESS.getName().equals(func.getFunctionName()) - || BuiltinFunctionName.GREATER.getName().equals(func.getFunctionName())) + || BuiltinFunctionName.GTE.getName().equals(func.getFunctionName()) + || BuiltinFunctionName.LESS.getName().equals(func.getFunctionName()) + || BuiltinFunctionName.GREATER.getName().equals(func.getFunctionName())) && ((ReferenceExpression) func.getArguments().get(0)).getAttr().equals(TIMESTAMP)) { return null; } else if (BuiltinFunctionName.EQUAL.getName().equals(func.getFunctionName())) { @@ -65,11 +60,10 @@ public String visitFunction(FunctionExpression func, Object context) { + func.getArguments().get(1); } else { throw new RuntimeException( - String.format("Prometheus Datasource doesn't support %s " - + "in where command.", + String.format( + "Prometheus Datasource doesn't support %s " + "in where command.", func.getFunctionName().getFunctionName())); } } } - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/StepParameterResolver.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/StepParameterResolver.java index 2078950a5d..4c23ea9086 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/StepParameterResolver.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/StepParameterResolver.java @@ -15,25 +15,20 @@ import org.opensearch.sql.expression.NamedExpression; import org.opensearch.sql.expression.span.SpanExpression; -/** - * This class resolves step parameter required for - * query_range api of prometheus. - */ +/** This class resolves step parameter required for query_range api of prometheus. */ @NoArgsConstructor public class StepParameterResolver { /** - * Extract step from groupByList or apply heuristic arithmetic - * on endTime and startTime. - * + * Extract step from groupByList or apply heuristic arithmetic on endTime and startTime. * * @param startTime startTime. * @param endTime endTime. * @param groupByList groupByList. * @return Step String. */ - public static String resolve(@NonNull Long startTime, @NonNull Long endTime, - List groupByList) { + public static String resolve( + @NonNull Long startTime, @NonNull Long endTime, List groupByList) { Optional spanExpression = getSpanExpression(groupByList); if (spanExpression.isPresent()) { if (StringUtils.isEmpty(spanExpression.get().getUnit().getName())) { @@ -48,7 +43,7 @@ public static String resolve(@NonNull Long startTime, @NonNull Long endTime, } private static Optional getSpanExpression( - List namedExpressionList) { + List namedExpressionList) { if (namedExpressionList == null) { return Optional.empty(); } @@ -57,7 +52,4 @@ private static Optional getSpanExpression( .map(expression -> (SpanExpression) expression.getDelegated()) .findFirst(); } - - - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/TimeRangeParametersResolver.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/TimeRangeParametersResolver.java index b462f6bafe..c7766f22d6 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/TimeRangeParametersResolver.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/querybuilder/TimeRangeParametersResolver.java @@ -22,16 +22,14 @@ @NoArgsConstructor public class TimeRangeParametersResolver extends ExpressionNodeVisitor { - private Long startTime; private Long endTime; /** - * Build Range Query Parameters from filter expression. - * If the filter condition consists of @timestamp, startTime and - * endTime are derived. or else it will be defaulted to now() and now()-1hr. - * If one of starttime and endtime are provided, the other will be derived from them - * by fixing the time range duration to 1hr. + * Build Range Query Parameters from filter expression. If the filter condition consists + * of @timestamp, startTime and endTime are derived. or else it will be defaulted to now() and + * now()-1hr. If one of starttime and endtime are provided, the other will be derived from them by + * fixing the time range duration to 1hr. * * @param filterCondition expression. * @return query string @@ -72,13 +70,10 @@ public Void visitFunction(FunctionExpression func, Object context) { } } } else { - func.getArguments() - .stream() + func.getArguments().stream() .filter(arg -> arg instanceof FunctionExpression) .forEach(arg -> visitFunction((FunctionExpression) arg, context)); } return null; } - - } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTable.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTable.java index dca946da57..b5557e7298 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTable.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTable.java @@ -5,7 +5,6 @@ package org.opensearch.sql.prometheus.storage.system; - import static org.opensearch.sql.utils.SystemIndexUtils.systemTable; import com.google.common.annotations.VisibleForTesting; @@ -25,13 +24,9 @@ import org.opensearch.sql.storage.Table; import org.opensearch.sql.utils.SystemIndexUtils; -/** - * Prometheus System Table Implementation. - */ +/** Prometheus System Table Implementation. */ public class PrometheusSystemTable implements Table { - /** - * System Index Name. - */ + /** System Index Name. */ private final Pair systemIndexBundle; private final DataSourceSchemaName dataSourceSchemaName; @@ -54,8 +49,7 @@ public PhysicalPlan implement(LogicalPlan plan) { @VisibleForTesting @RequiredArgsConstructor - public class PrometheusSystemTableDefaultImplementor - extends DefaultImplementor { + public class PrometheusSystemTableDefaultImplementor extends DefaultImplementor { @Override public PhysicalPlan visitRelation(LogicalRelation node, Object context) { @@ -67,12 +61,14 @@ private Pair buildIndexBun PrometheusClient client, String indexName) { SystemIndexUtils.SystemTable systemTable = systemTable(indexName); if (systemTable.isSystemInfoTable()) { - return Pair.of(PrometheusSystemTableSchema.SYS_TABLE_TABLES, + return Pair.of( + PrometheusSystemTableSchema.SYS_TABLE_TABLES, new PrometheusListMetricsRequest(client, dataSourceSchemaName)); } else { - return Pair.of(PrometheusSystemTableSchema.SYS_TABLE_MAPPINGS, - new PrometheusDescribeMetricRequest(client, - dataSourceSchemaName, systemTable.getTableName())); + return Pair.of( + PrometheusSystemTableSchema.SYS_TABLE_MAPPINGS, + new PrometheusDescribeMetricRequest( + client, dataSourceSchemaName, systemTable.getTableName())); } } } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableScan.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableScan.java index 5c0bc656fe..907e8a0c15 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableScan.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableScan.java @@ -13,16 +13,13 @@ import org.opensearch.sql.prometheus.request.system.PrometheusSystemRequest; import org.opensearch.sql.storage.TableScanOperator; -/** - * Prometheus table scan operator. - */ +/** Prometheus table scan operator. */ @RequiredArgsConstructor @EqualsAndHashCode(onlyExplicitlyIncluded = true, callSuper = false) @ToString(onlyExplicitlyIncluded = true) public class PrometheusSystemTableScan extends TableScanOperator { - @EqualsAndHashCode.Include - private final PrometheusSystemRequest request; + @EqualsAndHashCode.Include private final PrometheusSystemRequest request; private Iterator iterator; diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableSchema.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableSchema.java index 668a208c79..9272731dce 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableSchema.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableSchema.java @@ -18,22 +18,23 @@ @Getter @RequiredArgsConstructor public enum PrometheusSystemTableSchema { - - SYS_TABLE_TABLES(new ImmutableMap.Builder() - .put("TABLE_CATALOG", STRING) - .put("TABLE_SCHEMA", STRING) - .put("TABLE_NAME", STRING) - .put("TABLE_TYPE", STRING) - .put("UNIT", STRING) - .put("REMARKS", STRING) - .build()), - SYS_TABLE_MAPPINGS(new ImmutableMap.Builder() - .put("TABLE_CATALOG", STRING) - .put("TABLE_SCHEMA", STRING) - .put("TABLE_NAME", STRING) - .put("COLUMN_NAME", STRING) - .put("DATA_TYPE", STRING) - .build()); + SYS_TABLE_TABLES( + new ImmutableMap.Builder() + .put("TABLE_CATALOG", STRING) + .put("TABLE_SCHEMA", STRING) + .put("TABLE_NAME", STRING) + .put("TABLE_TYPE", STRING) + .put("UNIT", STRING) + .put("REMARKS", STRING) + .build()), + SYS_TABLE_MAPPINGS( + new ImmutableMap.Builder() + .put("TABLE_CATALOG", STRING) + .put("TABLE_SCHEMA", STRING) + .put("TABLE_NAME", STRING) + .put("COLUMN_NAME", STRING) + .put("DATA_TYPE", STRING) + .build()); private final Map mapping; } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/utils/TableFunctionUtils.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/utils/TableFunctionUtils.java index 35edc83614..24bec1ede3 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/utils/TableFunctionUtils.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/utils/TableFunctionUtils.java @@ -16,52 +16,54 @@ import org.opensearch.sql.expression.Expression; import org.opensearch.sql.expression.NamedArgumentExpression; -/** - * Utility class for common table function methods. - */ +/** Utility class for common table function methods. */ @UtilityClass public class TableFunctionUtils { /** - * Validates if function arguments are valid - * in both the cases when the arguments are passed by position or name. + * Validates if function arguments are valid in both the cases when the arguments are passed by + * position or name. * * @param arguments arguments of function provided in the input order. * @param argumentNames ordered argument names of the function. */ - public static void validatePrometheusTableFunctionArguments(List arguments, - List argumentNames) { - Boolean argumentsPassedByName = arguments.stream() - .noneMatch(arg -> StringUtils.isEmpty(((NamedArgumentExpression) arg).getArgName())); - Boolean argumentsPassedByPosition = arguments.stream() - .allMatch(arg -> StringUtils.isEmpty(((NamedArgumentExpression) arg).getArgName())); + public static void validatePrometheusTableFunctionArguments( + List arguments, List argumentNames) { + Boolean argumentsPassedByName = + arguments.stream() + .noneMatch(arg -> StringUtils.isEmpty(((NamedArgumentExpression) arg).getArgName())); + Boolean argumentsPassedByPosition = + arguments.stream() + .allMatch(arg -> StringUtils.isEmpty(((NamedArgumentExpression) arg).getArgName())); if (!(argumentsPassedByName || argumentsPassedByPosition)) { throw new SemanticCheckException("Arguments should be either passed by name or position"); } if (arguments.size() != argumentNames.size()) { throw new SemanticCheckException( - generateErrorMessageForMissingArguments(argumentsPassedByPosition, arguments, - argumentNames)); + generateErrorMessageForMissingArguments( + argumentsPassedByPosition, arguments, argumentNames)); } } /** - * Get Named Arguments of Table Function Arguments. - * If they are passed by position create new ones or else return the same arguments passed. + * Get Named Arguments of Table Function Arguments. If they are passed by position create new ones + * or else return the same arguments passed. * * @param arguments arguments of function provided in the input order. * @param argumentNames ordered argument names of the function. */ - public static List getNamedArgumentsOfTableFunction(List arguments, - List argumentNames) { - boolean argumentsPassedByPosition = arguments.stream() - .allMatch(arg -> StringUtils.isEmpty(((NamedArgumentExpression) arg).getArgName())); + public static List getNamedArgumentsOfTableFunction( + List arguments, List argumentNames) { + boolean argumentsPassedByPosition = + arguments.stream() + .allMatch(arg -> StringUtils.isEmpty(((NamedArgumentExpression) arg).getArgName())); if (argumentsPassedByPosition) { List namedArguments = new ArrayList<>(); for (int i = 0; i < arguments.size(); i++) { - namedArguments.add(new NamedArgumentExpression(argumentNames.get(i), - ((NamedArgumentExpression) arguments.get(i)).getValue())); + namedArguments.add( + new NamedArgumentExpression( + argumentNames.get(i), ((NamedArgumentExpression) arguments.get(i)).getValue())); } return namedArguments; } @@ -73,17 +75,17 @@ private static String generateErrorMessageForMissingArguments( List arguments, List argumentNames) { if (areArgumentsPassedByPosition) { - return String.format("Missing arguments:[%s]", + return String.format( + "Missing arguments:[%s]", String.join(",", argumentNames.subList(arguments.size(), argumentNames.size()))); } else { Set requiredArguments = new HashSet<>(argumentNames); Set providedArguments = - arguments.stream().map(expression -> ((NamedArgumentExpression) expression).getArgName()) + arguments.stream() + .map(expression -> ((NamedArgumentExpression) expression).getArgName()) .collect(Collectors.toSet()); requiredArguments.removeAll(providedArguments); return String.format("Missing arguments:[%s]", String.join(",", requiredArguments)); } } - - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/client/PrometheusClientImplTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/client/PrometheusClientImplTest.java index b26a45e301..735a1a1052 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/client/PrometheusClientImplTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/client/PrometheusClientImplTest.java @@ -43,7 +43,6 @@ public class PrometheusClientImplTest { private MockWebServer mockWebServer; private PrometheusClient prometheusClient; - @BeforeEach void setUp() throws IOException { this.mockWebServer = new MockWebServer(); @@ -52,13 +51,13 @@ void setUp() throws IOException { new PrometheusClientImpl(new OkHttpClient(), mockWebServer.url("").uri().normalize()); } - @Test @SneakyThrows void testQueryRange() { - MockResponse mockResponse = new MockResponse() - .addHeader("Content-Type", "application/json; charset=utf-8") - .setBody(getJson("query_range_response.json")); + MockResponse mockResponse = + new MockResponse() + .addHeader("Content-Type", "application/json; charset=utf-8") + .setBody(getJson("query_range_response.json")); mockWebServer.enqueue(mockResponse); JSONObject jsonObject = prometheusClient.queryRange(QUERY, STARTTIME, ENDTIME, STEP); assertTrue(new JSONObject(getJson("query_range_result.json")).similar(jsonObject)); @@ -69,13 +68,15 @@ void testQueryRange() { @Test @SneakyThrows void testQueryRangeWith2xxStatusAndError() { - MockResponse mockResponse = new MockResponse() - .addHeader("Content-Type", "application/json; charset=utf-8") - .setBody(getJson("error_response.json")); + MockResponse mockResponse = + new MockResponse() + .addHeader("Content-Type", "application/json; charset=utf-8") + .setBody(getJson("error_response.json")); mockWebServer.enqueue(mockResponse); - RuntimeException runtimeException - = assertThrows(RuntimeException.class, - () -> prometheusClient.queryRange(QUERY, STARTTIME, ENDTIME, STEP)); + RuntimeException runtimeException = + assertThrows( + RuntimeException.class, + () -> prometheusClient.queryRange(QUERY, STARTTIME, ENDTIME, STEP)); assertEquals("Error", runtimeException.getMessage()); RecordedRequest recordedRequest = mockWebServer.takeRequest(); verifyQueryRangeCall(recordedRequest); @@ -84,13 +85,15 @@ void testQueryRangeWith2xxStatusAndError() { @Test @SneakyThrows void testQueryRangeWithNon2xxError() { - MockResponse mockResponse = new MockResponse() - .addHeader("Content-Type", "application/json; charset=utf-8") - .setResponseCode(400); + MockResponse mockResponse = + new MockResponse() + .addHeader("Content-Type", "application/json; charset=utf-8") + .setResponseCode(400); mockWebServer.enqueue(mockResponse); - RuntimeException runtimeException - = assertThrows(RuntimeException.class, - () -> prometheusClient.queryRange(QUERY, STARTTIME, ENDTIME, STEP)); + RuntimeException runtimeException = + assertThrows( + RuntimeException.class, + () -> prometheusClient.queryRange(QUERY, STARTTIME, ENDTIME, STEP)); assertTrue( runtimeException.getMessage().contains("Request to Prometheus is Unsuccessful with :")); RecordedRequest recordedRequest = mockWebServer.takeRequest(); @@ -100,16 +103,20 @@ void testQueryRangeWithNon2xxError() { @Test @SneakyThrows void testGetLabel() { - MockResponse mockResponse = new MockResponse() - .addHeader("Content-Type", "application/json; charset=utf-8") - .setBody(getJson("get_labels_response.json")); + MockResponse mockResponse = + new MockResponse() + .addHeader("Content-Type", "application/json; charset=utf-8") + .setBody(getJson("get_labels_response.json")); mockWebServer.enqueue(mockResponse); List response = prometheusClient.getLabels(METRIC_NAME); - assertEquals(new ArrayList() {{ - add("call"); - add("code"); - } - }, response); + assertEquals( + new ArrayList() { + { + add("call"); + add("code"); + } + }, + response); RecordedRequest recordedRequest = mockWebServer.takeRequest(); verifyGetLabelsCall(recordedRequest); } @@ -117,30 +124,34 @@ void testGetLabel() { @Test @SneakyThrows void testGetAllMetrics() { - MockResponse mockResponse = new MockResponse() - .addHeader("Content-Type", "application/json; charset=utf-8") - .setBody(getJson("all_metrics_response.json")); + MockResponse mockResponse = + new MockResponse() + .addHeader("Content-Type", "application/json; charset=utf-8") + .setBody(getJson("all_metrics_response.json")); mockWebServer.enqueue(mockResponse); Map> response = prometheusClient.getAllMetrics(); Map> expected = new HashMap<>(); - expected.put("go_gc_duration_seconds", - Collections.singletonList(new MetricMetadata("summary", - "A summary of the pause duration of garbage collection cycles.", ""))); - expected.put("go_goroutines", - Collections.singletonList(new MetricMetadata("gauge", - "Number of goroutines that currently exist.", ""))); + expected.put( + "go_gc_duration_seconds", + Collections.singletonList( + new MetricMetadata( + "summary", "A summary of the pause duration of garbage collection cycles.", ""))); + expected.put( + "go_goroutines", + Collections.singletonList( + new MetricMetadata("gauge", "Number of goroutines that currently exist.", ""))); assertEquals(expected, response); RecordedRequest recordedRequest = mockWebServer.takeRequest(); verifyGetAllMetricsCall(recordedRequest); } - @Test @SneakyThrows void testQueryExemplars() { - MockResponse mockResponse = new MockResponse() - .addHeader("Content-Type", "application/json; charset=utf-8") - .setBody(getJson("query_exemplars_response.json")); + MockResponse mockResponse = + new MockResponse() + .addHeader("Content-Type", "application/json; charset=utf-8") + .setBody(getJson("query_exemplars_response.json")); mockWebServer.enqueue(mockResponse); JSONArray jsonArray = prometheusClient.queryExemplars(QUERY, STARTTIME, ENDTIME); assertTrue(new JSONArray(getJson("query_exemplars_result.json")).similar(jsonArray)); diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/implementation/QueryExemplarsFunctionImplementationTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/implementation/QueryExemplarsFunctionImplementationTest.java index 025e3bde06..6009d3229c 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/implementation/QueryExemplarsFunctionImplementationTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/implementation/QueryExemplarsFunctionImplementationTest.java @@ -22,33 +22,34 @@ import org.opensearch.sql.expression.Expression; import org.opensearch.sql.expression.function.FunctionName; import org.opensearch.sql.prometheus.client.PrometheusClient; -import org.opensearch.sql.prometheus.functions.implementation.QueryExemplarFunctionImplementation; import org.opensearch.sql.prometheus.request.PrometheusQueryExemplarsRequest; import org.opensearch.sql.prometheus.storage.QueryExemplarsTable; - @ExtendWith(MockitoExtension.class) class QueryExemplarsFunctionImplementationTest { - @Mock - private PrometheusClient client; - + @Mock private PrometheusClient client; @Test void testValueOfAndTypeAndToString() { FunctionName functionName = new FunctionName("query_exemplars"); - List namedArgumentExpressionList - = List.of(DSL.namedArgument("query", DSL.literal("http_latency")), - DSL.namedArgument("starttime", DSL.literal(12345)), - DSL.namedArgument("endtime", DSL.literal(12345))); - QueryExemplarFunctionImplementation queryExemplarFunctionImplementation - = + List namedArgumentExpressionList = + List.of( + DSL.namedArgument("query", DSL.literal("http_latency")), + DSL.namedArgument("starttime", DSL.literal(12345)), + DSL.namedArgument("endtime", DSL.literal(12345))); + QueryExemplarFunctionImplementation queryExemplarFunctionImplementation = new QueryExemplarFunctionImplementation(functionName, namedArgumentExpressionList, client); - UnsupportedOperationException exception = assertThrows(UnsupportedOperationException.class, - () -> queryExemplarFunctionImplementation.valueOf()); - assertEquals("Prometheus defined function [query_exemplars] is only " - + "supported in SOURCE clause with prometheus connector catalog", exception.getMessage()); - assertEquals("query_exemplars(query=\"http_latency\", starttime=12345, endtime=12345)", + UnsupportedOperationException exception = + assertThrows( + UnsupportedOperationException.class, + () -> queryExemplarFunctionImplementation.valueOf()); + assertEquals( + "Prometheus defined function [query_exemplars] is only " + + "supported in SOURCE clause with prometheus connector catalog", + exception.getMessage()); + assertEquals( + "query_exemplars(query=\"http_latency\", starttime=12345, endtime=12345)", queryExemplarFunctionImplementation.toString()); assertEquals(ExprCoreType.STRUCT, queryExemplarFunctionImplementation.type()); } @@ -56,15 +57,15 @@ void testValueOfAndTypeAndToString() { @Test void testApplyArguments() { FunctionName functionName = new FunctionName("query_exemplars"); - List namedArgumentExpressionList - = List.of(DSL.namedArgument("query", DSL.literal("http_latency")), - DSL.namedArgument("starttime", DSL.literal(12345)), - DSL.namedArgument("endtime", DSL.literal(1234))); - QueryExemplarFunctionImplementation queryExemplarFunctionImplementation - = + List namedArgumentExpressionList = + List.of( + DSL.namedArgument("query", DSL.literal("http_latency")), + DSL.namedArgument("starttime", DSL.literal(12345)), + DSL.namedArgument("endtime", DSL.literal(1234))); + QueryExemplarFunctionImplementation queryExemplarFunctionImplementation = new QueryExemplarFunctionImplementation(functionName, namedArgumentExpressionList, client); - QueryExemplarsTable queryExemplarsTable - = (QueryExemplarsTable) queryExemplarFunctionImplementation.applyArguments(); + QueryExemplarsTable queryExemplarsTable = + (QueryExemplarsTable) queryExemplarFunctionImplementation.applyArguments(); assertNotNull(queryExemplarsTable.getExemplarsRequest()); PrometheusQueryExemplarsRequest request = queryExemplarsTable.getExemplarsRequest(); assertEquals("http_latency", request.getQuery()); @@ -75,17 +76,17 @@ void testApplyArguments() { @Test void testApplyArgumentsException() { FunctionName functionName = new FunctionName("query_exemplars"); - List namedArgumentExpressionList - = List.of(DSL.namedArgument("query", DSL.literal("http_latency")), - DSL.namedArgument("starttime", DSL.literal(12345)), - DSL.namedArgument("end_time", DSL.literal(1234))); - QueryExemplarFunctionImplementation queryExemplarFunctionImplementation - = + List namedArgumentExpressionList = + List.of( + DSL.namedArgument("query", DSL.literal("http_latency")), + DSL.namedArgument("starttime", DSL.literal(12345)), + DSL.namedArgument("end_time", DSL.literal(1234))); + QueryExemplarFunctionImplementation queryExemplarFunctionImplementation = new QueryExemplarFunctionImplementation(functionName, namedArgumentExpressionList, client); - ExpressionEvaluationException exception = assertThrows(ExpressionEvaluationException.class, - () -> queryExemplarFunctionImplementation.applyArguments()); + ExpressionEvaluationException exception = + assertThrows( + ExpressionEvaluationException.class, + () -> queryExemplarFunctionImplementation.applyArguments()); assertEquals("Invalid Function Argument:end_time", exception.getMessage()); } - - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/implementation/QueryRangeFunctionImplementationTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/implementation/QueryRangeFunctionImplementationTest.java index 9732999a92..288bc35b0f 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/implementation/QueryRangeFunctionImplementationTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/implementation/QueryRangeFunctionImplementationTest.java @@ -8,11 +8,9 @@ package org.opensearch.sql.prometheus.functions.implementation; import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.List; import org.junit.jupiter.api.Test; @@ -25,33 +23,34 @@ import org.opensearch.sql.expression.Expression; import org.opensearch.sql.expression.function.FunctionName; import org.opensearch.sql.prometheus.client.PrometheusClient; -import org.opensearch.sql.prometheus.functions.implementation.QueryRangeFunctionImplementation; import org.opensearch.sql.prometheus.request.PrometheusQueryRequest; import org.opensearch.sql.prometheus.storage.PrometheusMetricTable; - @ExtendWith(MockitoExtension.class) class QueryRangeFunctionImplementationTest { - @Mock - private PrometheusClient client; - + @Mock private PrometheusClient client; @Test void testValueOfAndTypeAndToString() { FunctionName functionName = new FunctionName("query_range"); - List namedArgumentExpressionList - = List.of(DSL.namedArgument("query", DSL.literal("http_latency")), - DSL.namedArgument("starttime", DSL.literal(12345)), - DSL.namedArgument("endtime", DSL.literal(12345)), - DSL.namedArgument("step", DSL.literal(14))); - QueryRangeFunctionImplementation queryRangeFunctionImplementation - = new QueryRangeFunctionImplementation(functionName, namedArgumentExpressionList, client); - UnsupportedOperationException exception = assertThrows(UnsupportedOperationException.class, - () -> queryRangeFunctionImplementation.valueOf()); - assertEquals("Prometheus defined function [query_range] is only " - + "supported in SOURCE clause with prometheus connector catalog", exception.getMessage()); - assertEquals("query_range(query=\"http_latency\", starttime=12345, endtime=12345, step=14)", + List namedArgumentExpressionList = + List.of( + DSL.namedArgument("query", DSL.literal("http_latency")), + DSL.namedArgument("starttime", DSL.literal(12345)), + DSL.namedArgument("endtime", DSL.literal(12345)), + DSL.namedArgument("step", DSL.literal(14))); + QueryRangeFunctionImplementation queryRangeFunctionImplementation = + new QueryRangeFunctionImplementation(functionName, namedArgumentExpressionList, client); + UnsupportedOperationException exception = + assertThrows( + UnsupportedOperationException.class, () -> queryRangeFunctionImplementation.valueOf()); + assertEquals( + "Prometheus defined function [query_range] is only " + + "supported in SOURCE clause with prometheus connector catalog", + exception.getMessage()); + assertEquals( + "query_range(query=\"http_latency\", starttime=12345, endtime=12345, step=14)", queryRangeFunctionImplementation.toString()); assertEquals(ExprCoreType.STRUCT, queryRangeFunctionImplementation.type()); } @@ -59,19 +58,20 @@ void testValueOfAndTypeAndToString() { @Test void testApplyArguments() { FunctionName functionName = new FunctionName("query_range"); - List namedArgumentExpressionList - = List.of(DSL.namedArgument("query", DSL.literal("http_latency")), - DSL.namedArgument("starttime", DSL.literal(12345)), - DSL.namedArgument("endtime", DSL.literal(1234)), - DSL.namedArgument("step", DSL.literal(14))); - QueryRangeFunctionImplementation queryRangeFunctionImplementation - = new QueryRangeFunctionImplementation(functionName, namedArgumentExpressionList, client); - PrometheusMetricTable prometheusMetricTable - = (PrometheusMetricTable) queryRangeFunctionImplementation.applyArguments(); + List namedArgumentExpressionList = + List.of( + DSL.namedArgument("query", DSL.literal("http_latency")), + DSL.namedArgument("starttime", DSL.literal(12345)), + DSL.namedArgument("endtime", DSL.literal(1234)), + DSL.namedArgument("step", DSL.literal(14))); + QueryRangeFunctionImplementation queryRangeFunctionImplementation = + new QueryRangeFunctionImplementation(functionName, namedArgumentExpressionList, client); + PrometheusMetricTable prometheusMetricTable = + (PrometheusMetricTable) queryRangeFunctionImplementation.applyArguments(); assertNull(prometheusMetricTable.getMetricName()); assertNotNull(prometheusMetricTable.getPrometheusQueryRequest()); - PrometheusQueryRequest prometheusQueryRequest - = prometheusMetricTable.getPrometheusQueryRequest(); + PrometheusQueryRequest prometheusQueryRequest = + prometheusMetricTable.getPrometheusQueryRequest(); assertEquals("http_latency", prometheusQueryRequest.getPromQl().toString()); assertEquals(12345, prometheusQueryRequest.getStartTime()); assertEquals(1234, prometheusQueryRequest.getEndTime()); @@ -81,17 +81,18 @@ void testApplyArguments() { @Test void testApplyArgumentsException() { FunctionName functionName = new FunctionName("query_range"); - List namedArgumentExpressionList - = List.of(DSL.namedArgument("query", DSL.literal("http_latency")), - DSL.namedArgument("starttime", DSL.literal(12345)), - DSL.namedArgument("end_time", DSL.literal(1234)), - DSL.namedArgument("step", DSL.literal(14))); - QueryRangeFunctionImplementation queryRangeFunctionImplementation - = new QueryRangeFunctionImplementation(functionName, namedArgumentExpressionList, client); - ExpressionEvaluationException exception = assertThrows(ExpressionEvaluationException.class, - () -> queryRangeFunctionImplementation.applyArguments()); + List namedArgumentExpressionList = + List.of( + DSL.namedArgument("query", DSL.literal("http_latency")), + DSL.namedArgument("starttime", DSL.literal(12345)), + DSL.namedArgument("end_time", DSL.literal(1234)), + DSL.namedArgument("step", DSL.literal(14))); + QueryRangeFunctionImplementation queryRangeFunctionImplementation = + new QueryRangeFunctionImplementation(functionName, namedArgumentExpressionList, client); + ExpressionEvaluationException exception = + assertThrows( + ExpressionEvaluationException.class, + () -> queryRangeFunctionImplementation.applyArguments()); assertEquals("Invalid Function Argument:end_time", exception.getMessage()); } - - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/resolver/QueryExemplarsTableFunctionResolverTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/resolver/QueryExemplarsTableFunctionResolverTest.java index 3e26b46c8f..af8ebf48e2 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/resolver/QueryExemplarsTableFunctionResolverTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/resolver/QueryExemplarsTableFunctionResolverTest.java @@ -35,34 +35,34 @@ @ExtendWith(MockitoExtension.class) class QueryExemplarsTableFunctionResolverTest { - @Mock - private PrometheusClient client; + @Mock private PrometheusClient client; - @Mock - private FunctionProperties functionProperties; + @Mock private FunctionProperties functionProperties; @Test void testResolve() { - QueryExemplarsTableFunctionResolver queryExemplarsTableFunctionResolver - = new QueryExemplarsTableFunctionResolver(client); + QueryExemplarsTableFunctionResolver queryExemplarsTableFunctionResolver = + new QueryExemplarsTableFunctionResolver(client); FunctionName functionName = FunctionName.of("query_exemplars"); - List expressions - = List.of(DSL.namedArgument("query", DSL.literal("http_latency")), - DSL.namedArgument("starttime", DSL.literal(12345)), - DSL.namedArgument("endtime", DSL.literal(12345))); - FunctionSignature functionSignature = new FunctionSignature(functionName, expressions - .stream().map(Expression::type).collect(Collectors.toList())); - Pair resolution - = queryExemplarsTableFunctionResolver.resolve(functionSignature); + List expressions = + List.of( + DSL.namedArgument("query", DSL.literal("http_latency")), + DSL.namedArgument("starttime", DSL.literal(12345)), + DSL.namedArgument("endtime", DSL.literal(12345))); + FunctionSignature functionSignature = + new FunctionSignature( + functionName, expressions.stream().map(Expression::type).collect(Collectors.toList())); + Pair resolution = + queryExemplarsTableFunctionResolver.resolve(functionSignature); assertEquals(functionName, resolution.getKey().getFunctionName()); assertEquals(functionName, queryExemplarsTableFunctionResolver.getFunctionName()); assertEquals(List.of(STRING, LONG, LONG), resolution.getKey().getParamTypeList()); FunctionBuilder functionBuilder = resolution.getValue(); - TableFunctionImplementation functionImplementation - = (TableFunctionImplementation) functionBuilder.apply(functionProperties, expressions); + TableFunctionImplementation functionImplementation = + (TableFunctionImplementation) functionBuilder.apply(functionProperties, expressions); assertTrue(functionImplementation instanceof QueryExemplarFunctionImplementation); - QueryExemplarsTable queryExemplarsTable - = (QueryExemplarsTable) functionImplementation.applyArguments(); + QueryExemplarsTable queryExemplarsTable = + (QueryExemplarsTable) functionImplementation.applyArguments(); assertNotNull(queryExemplarsTable.getExemplarsRequest()); PrometheusQueryExemplarsRequest prometheusQueryExemplarsRequest = queryExemplarsTable.getExemplarsRequest(); @@ -70,5 +70,4 @@ void testResolve() { assertEquals(12345L, prometheusQueryExemplarsRequest.getStartTime()); assertEquals(12345L, prometheusQueryExemplarsRequest.getEndTime()); } - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/resolver/QueryRangeTableFunctionResolverTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/resolver/QueryRangeTableFunctionResolverTest.java index 0f7aa91abc..48050bcb15 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/resolver/QueryRangeTableFunctionResolverTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/resolver/QueryRangeTableFunctionResolverTest.java @@ -31,42 +31,41 @@ import org.opensearch.sql.expression.function.TableFunctionImplementation; import org.opensearch.sql.prometheus.client.PrometheusClient; import org.opensearch.sql.prometheus.functions.implementation.QueryRangeFunctionImplementation; -import org.opensearch.sql.prometheus.functions.resolver.QueryRangeTableFunctionResolver; import org.opensearch.sql.prometheus.request.PrometheusQueryRequest; import org.opensearch.sql.prometheus.storage.PrometheusMetricTable; @ExtendWith(MockitoExtension.class) class QueryRangeTableFunctionResolverTest { - @Mock - private PrometheusClient client; + @Mock private PrometheusClient client; - @Mock - private FunctionProperties functionProperties; + @Mock private FunctionProperties functionProperties; @Test void testResolve() { - QueryRangeTableFunctionResolver queryRangeTableFunctionResolver - = new QueryRangeTableFunctionResolver(client); + QueryRangeTableFunctionResolver queryRangeTableFunctionResolver = + new QueryRangeTableFunctionResolver(client); FunctionName functionName = FunctionName.of("query_range"); - List expressions - = List.of(DSL.namedArgument("query", DSL.literal("http_latency")), - DSL.namedArgument("starttime", DSL.literal(12345)), - DSL.namedArgument("endtime", DSL.literal(12345)), - DSL.namedArgument("step", DSL.literal(14))); - FunctionSignature functionSignature = new FunctionSignature(functionName, expressions - .stream().map(Expression::type).collect(Collectors.toList())); - Pair resolution - = queryRangeTableFunctionResolver.resolve(functionSignature); + List expressions = + List.of( + DSL.namedArgument("query", DSL.literal("http_latency")), + DSL.namedArgument("starttime", DSL.literal(12345)), + DSL.namedArgument("endtime", DSL.literal(12345)), + DSL.namedArgument("step", DSL.literal(14))); + FunctionSignature functionSignature = + new FunctionSignature( + functionName, expressions.stream().map(Expression::type).collect(Collectors.toList())); + Pair resolution = + queryRangeTableFunctionResolver.resolve(functionSignature); assertEquals(functionName, resolution.getKey().getFunctionName()); assertEquals(functionName, queryRangeTableFunctionResolver.getFunctionName()); assertEquals(List.of(STRING, LONG, LONG, STRING), resolution.getKey().getParamTypeList()); FunctionBuilder functionBuilder = resolution.getValue(); - TableFunctionImplementation functionImplementation - = (TableFunctionImplementation) functionBuilder.apply(functionProperties, expressions); + TableFunctionImplementation functionImplementation = + (TableFunctionImplementation) functionBuilder.apply(functionProperties, expressions); assertTrue(functionImplementation instanceof QueryRangeFunctionImplementation); - PrometheusMetricTable prometheusMetricTable - = (PrometheusMetricTable) functionImplementation.applyArguments(); + PrometheusMetricTable prometheusMetricTable = + (PrometheusMetricTable) functionImplementation.applyArguments(); assertNotNull(prometheusMetricTable.getPrometheusQueryRequest()); PrometheusQueryRequest prometheusQueryRequest = prometheusMetricTable.getPrometheusQueryRequest(); @@ -78,29 +77,31 @@ void testResolve() { @Test void testArgumentsPassedByPosition() { - QueryRangeTableFunctionResolver queryRangeTableFunctionResolver - = new QueryRangeTableFunctionResolver(client); + QueryRangeTableFunctionResolver queryRangeTableFunctionResolver = + new QueryRangeTableFunctionResolver(client); FunctionName functionName = FunctionName.of("query_range"); - List expressions - = List.of(DSL.namedArgument(null, DSL.literal("http_latency")), - DSL.namedArgument(null, DSL.literal(12345)), - DSL.namedArgument(null, DSL.literal(12345)), - DSL.namedArgument(null, DSL.literal(14))); - FunctionSignature functionSignature = new FunctionSignature(functionName, expressions - .stream().map(Expression::type).collect(Collectors.toList())); - - Pair resolution - = queryRangeTableFunctionResolver.resolve(functionSignature); + List expressions = + List.of( + DSL.namedArgument(null, DSL.literal("http_latency")), + DSL.namedArgument(null, DSL.literal(12345)), + DSL.namedArgument(null, DSL.literal(12345)), + DSL.namedArgument(null, DSL.literal(14))); + FunctionSignature functionSignature = + new FunctionSignature( + functionName, expressions.stream().map(Expression::type).collect(Collectors.toList())); + + Pair resolution = + queryRangeTableFunctionResolver.resolve(functionSignature); assertEquals(functionName, resolution.getKey().getFunctionName()); assertEquals(functionName, queryRangeTableFunctionResolver.getFunctionName()); assertEquals(List.of(STRING, LONG, LONG, STRING), resolution.getKey().getParamTypeList()); FunctionBuilder functionBuilder = resolution.getValue(); - TableFunctionImplementation functionImplementation - = (TableFunctionImplementation) functionBuilder.apply(functionProperties, expressions); + TableFunctionImplementation functionImplementation = + (TableFunctionImplementation) functionBuilder.apply(functionProperties, expressions); assertTrue(functionImplementation instanceof QueryRangeFunctionImplementation); - PrometheusMetricTable prometheusMetricTable - = (PrometheusMetricTable) functionImplementation.applyArguments(); + PrometheusMetricTable prometheusMetricTable = + (PrometheusMetricTable) functionImplementation.applyArguments(); assertNotNull(prometheusMetricTable.getPrometheusQueryRequest()); PrometheusQueryRequest prometheusQueryRequest = prometheusMetricTable.getPrometheusQueryRequest(); @@ -110,32 +111,33 @@ void testArgumentsPassedByPosition() { assertEquals("14", prometheusQueryRequest.getStep()); } - @Test void testArgumentsPassedByNameWithDifferentOrder() { - QueryRangeTableFunctionResolver queryRangeTableFunctionResolver - = new QueryRangeTableFunctionResolver(client); + QueryRangeTableFunctionResolver queryRangeTableFunctionResolver = + new QueryRangeTableFunctionResolver(client); FunctionName functionName = FunctionName.of("query_range"); - List expressions - = List.of(DSL.namedArgument("query", DSL.literal("http_latency")), - DSL.namedArgument("endtime", DSL.literal(12345)), - DSL.namedArgument("step", DSL.literal(14)), - DSL.namedArgument("starttime", DSL.literal(12345))); - FunctionSignature functionSignature = new FunctionSignature(functionName, expressions - .stream().map(Expression::type).collect(Collectors.toList())); - - Pair resolution - = queryRangeTableFunctionResolver.resolve(functionSignature); + List expressions = + List.of( + DSL.namedArgument("query", DSL.literal("http_latency")), + DSL.namedArgument("endtime", DSL.literal(12345)), + DSL.namedArgument("step", DSL.literal(14)), + DSL.namedArgument("starttime", DSL.literal(12345))); + FunctionSignature functionSignature = + new FunctionSignature( + functionName, expressions.stream().map(Expression::type).collect(Collectors.toList())); + + Pair resolution = + queryRangeTableFunctionResolver.resolve(functionSignature); assertEquals(functionName, resolution.getKey().getFunctionName()); assertEquals(functionName, queryRangeTableFunctionResolver.getFunctionName()); assertEquals(List.of(STRING, LONG, LONG, STRING), resolution.getKey().getParamTypeList()); FunctionBuilder functionBuilder = resolution.getValue(); - TableFunctionImplementation functionImplementation - = (TableFunctionImplementation) functionBuilder.apply(functionProperties, expressions); + TableFunctionImplementation functionImplementation = + (TableFunctionImplementation) functionBuilder.apply(functionProperties, expressions); assertTrue(functionImplementation instanceof QueryRangeFunctionImplementation); - PrometheusMetricTable prometheusMetricTable - = (PrometheusMetricTable) functionImplementation.applyArguments(); + PrometheusMetricTable prometheusMetricTable = + (PrometheusMetricTable) functionImplementation.applyArguments(); assertNotNull(prometheusMetricTable.getPrometheusQueryRequest()); PrometheusQueryRequest prometheusQueryRequest = prometheusMetricTable.getPrometheusQueryRequest(); @@ -147,70 +149,81 @@ void testArgumentsPassedByNameWithDifferentOrder() { @Test void testMixedArgumentTypes() { - QueryRangeTableFunctionResolver queryRangeTableFunctionResolver - = new QueryRangeTableFunctionResolver(client); + QueryRangeTableFunctionResolver queryRangeTableFunctionResolver = + new QueryRangeTableFunctionResolver(client); FunctionName functionName = FunctionName.of("query_range"); - List expressions - = List.of(DSL.namedArgument("query", DSL.literal("http_latency")), - DSL.namedArgument(null, DSL.literal(12345)), - DSL.namedArgument(null, DSL.literal(12345)), - DSL.namedArgument(null, DSL.literal(14))); - FunctionSignature functionSignature = new FunctionSignature(functionName, expressions - .stream().map(Expression::type).collect(Collectors.toList())); - Pair resolution - = queryRangeTableFunctionResolver.resolve(functionSignature); + List expressions = + List.of( + DSL.namedArgument("query", DSL.literal("http_latency")), + DSL.namedArgument(null, DSL.literal(12345)), + DSL.namedArgument(null, DSL.literal(12345)), + DSL.namedArgument(null, DSL.literal(14))); + FunctionSignature functionSignature = + new FunctionSignature( + functionName, expressions.stream().map(Expression::type).collect(Collectors.toList())); + Pair resolution = + queryRangeTableFunctionResolver.resolve(functionSignature); assertEquals(functionName, resolution.getKey().getFunctionName()); assertEquals(functionName, queryRangeTableFunctionResolver.getFunctionName()); assertEquals(List.of(STRING, LONG, LONG, STRING), resolution.getKey().getParamTypeList()); - SemanticCheckException exception = assertThrows(SemanticCheckException.class, - () -> resolution.getValue().apply(functionProperties, expressions)); + SemanticCheckException exception = + assertThrows( + SemanticCheckException.class, + () -> resolution.getValue().apply(functionProperties, expressions)); assertEquals("Arguments should be either passed by name or position", exception.getMessage()); } @Test void testWrongArgumentsSizeWhenPassedByName() { - QueryRangeTableFunctionResolver queryRangeTableFunctionResolver - = new QueryRangeTableFunctionResolver(client); + QueryRangeTableFunctionResolver queryRangeTableFunctionResolver = + new QueryRangeTableFunctionResolver(client); FunctionName functionName = FunctionName.of("query_range"); - List expressions - = List.of(DSL.namedArgument("query", DSL.literal("http_latency")), - DSL.namedArgument("step", DSL.literal(12345))); - FunctionSignature functionSignature = new FunctionSignature(functionName, expressions - .stream().map(Expression::type).collect(Collectors.toList())); - Pair resolution - = queryRangeTableFunctionResolver.resolve(functionSignature); + List expressions = + List.of( + DSL.namedArgument("query", DSL.literal("http_latency")), + DSL.namedArgument("step", DSL.literal(12345))); + FunctionSignature functionSignature = + new FunctionSignature( + functionName, expressions.stream().map(Expression::type).collect(Collectors.toList())); + Pair resolution = + queryRangeTableFunctionResolver.resolve(functionSignature); assertEquals(functionName, resolution.getKey().getFunctionName()); assertEquals(functionName, queryRangeTableFunctionResolver.getFunctionName()); assertEquals(List.of(STRING, LONG, LONG, STRING), resolution.getKey().getParamTypeList()); - SemanticCheckException exception = assertThrows(SemanticCheckException.class, - () -> resolution.getValue().apply(functionProperties, expressions)); + SemanticCheckException exception = + assertThrows( + SemanticCheckException.class, + () -> resolution.getValue().apply(functionProperties, expressions)); assertEquals("Missing arguments:[endtime,starttime]", exception.getMessage()); } @Test void testWrongArgumentsSizeWhenPassedByPosition() { - QueryRangeTableFunctionResolver queryRangeTableFunctionResolver - = new QueryRangeTableFunctionResolver(client); + QueryRangeTableFunctionResolver queryRangeTableFunctionResolver = + new QueryRangeTableFunctionResolver(client); FunctionName functionName = FunctionName.of("query_range"); - List expressions - = List.of(DSL.namedArgument(null, DSL.literal("http_latency")), - DSL.namedArgument(null, DSL.literal(12345))); - FunctionSignature functionSignature = new FunctionSignature(functionName, expressions - .stream().map(Expression::type).collect(Collectors.toList())); - Pair resolution - = queryRangeTableFunctionResolver.resolve(functionSignature); + List expressions = + List.of( + DSL.namedArgument(null, DSL.literal("http_latency")), + DSL.namedArgument(null, DSL.literal(12345))); + FunctionSignature functionSignature = + new FunctionSignature( + functionName, expressions.stream().map(Expression::type).collect(Collectors.toList())); + Pair resolution = + queryRangeTableFunctionResolver.resolve(functionSignature); assertEquals(functionName, resolution.getKey().getFunctionName()); assertEquals(functionName, queryRangeTableFunctionResolver.getFunctionName()); assertEquals(List.of(STRING, LONG, LONG, STRING), resolution.getKey().getParamTypeList()); - SemanticCheckException exception = assertThrows(SemanticCheckException.class, - () -> resolution.getValue().apply(functionProperties, expressions)); + SemanticCheckException exception = + assertThrows( + SemanticCheckException.class, + () -> resolution.getValue().apply(functionProperties, expressions)); assertEquals("Missing arguments:[endtime,step]", exception.getMessage()); } - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanBuilderTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanBuilderTest.java index 6fd782b417..bb7806f824 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanBuilderTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanBuilderTest.java @@ -7,7 +7,6 @@ package org.opensearch.sql.prometheus.functions.scan; - import static org.opensearch.sql.prometheus.constants.TestConstants.ENDTIME; import static org.opensearch.sql.prometheus.constants.TestConstants.QUERY; import static org.opensearch.sql.prometheus.constants.TestConstants.STARTTIME; @@ -22,40 +21,35 @@ public class QueryExemplarsFunctionTableScanBuilderTest { - @Mock - private PrometheusClient prometheusClient; + @Mock private PrometheusClient prometheusClient; - @Mock - private LogicalProject logicalProject; + @Mock private LogicalProject logicalProject; @Test void testBuild() { - PrometheusQueryExemplarsRequest exemplarsRequest - = new PrometheusQueryExemplarsRequest(); + PrometheusQueryExemplarsRequest exemplarsRequest = new PrometheusQueryExemplarsRequest(); exemplarsRequest.setQuery(QUERY); exemplarsRequest.setStartTime(STARTTIME); exemplarsRequest.setEndTime(ENDTIME); - QueryExemplarsFunctionTableScanBuilder queryExemplarsFunctionTableScanBuilder - = new QueryExemplarsFunctionTableScanBuilder(prometheusClient, exemplarsRequest); - TableScanOperator queryExemplarsFunctionTableScanOperator - = queryExemplarsFunctionTableScanBuilder.build(); + QueryExemplarsFunctionTableScanBuilder queryExemplarsFunctionTableScanBuilder = + new QueryExemplarsFunctionTableScanBuilder(prometheusClient, exemplarsRequest); + TableScanOperator queryExemplarsFunctionTableScanOperator = + queryExemplarsFunctionTableScanBuilder.build(); Assertions.assertNotNull(queryExemplarsFunctionTableScanOperator); - Assertions.assertTrue(queryExemplarsFunctionTableScanOperator - instanceof QueryExemplarsFunctionTableScanOperator); + Assertions.assertTrue( + queryExemplarsFunctionTableScanOperator instanceof QueryExemplarsFunctionTableScanOperator); } @Test void testPushProject() { - PrometheusQueryExemplarsRequest exemplarsRequest - = new PrometheusQueryExemplarsRequest(); + PrometheusQueryExemplarsRequest exemplarsRequest = new PrometheusQueryExemplarsRequest(); exemplarsRequest.setQuery(QUERY); exemplarsRequest.setStartTime(STARTTIME); exemplarsRequest.setEndTime(ENDTIME); - QueryExemplarsFunctionTableScanBuilder queryExemplarsFunctionTableScanBuilder - = new QueryExemplarsFunctionTableScanBuilder(prometheusClient, exemplarsRequest); - Assertions.assertTrue(queryExemplarsFunctionTableScanBuilder - .pushDownProject(logicalProject)); + QueryExemplarsFunctionTableScanBuilder queryExemplarsFunctionTableScanBuilder = + new QueryExemplarsFunctionTableScanBuilder(prometheusClient, exemplarsRequest); + Assertions.assertTrue(queryExemplarsFunctionTableScanBuilder.pushDownProject(logicalProject)); } } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanOperatorTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanOperatorTest.java index d4e31d4d1e..5b8cf34fc2 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanOperatorTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryExemplarsFunctionTableScanOperatorTest.java @@ -41,22 +41,21 @@ @ExtendWith(MockitoExtension.class) public class QueryExemplarsFunctionTableScanOperatorTest { - @Mock - private PrometheusClient prometheusClient; + @Mock private PrometheusClient prometheusClient; @Test @SneakyThrows void testQueryResponseIterator() { - PrometheusQueryExemplarsRequest prometheusQueryExemplarsRequest - = new PrometheusQueryExemplarsRequest(); + PrometheusQueryExemplarsRequest prometheusQueryExemplarsRequest = + new PrometheusQueryExemplarsRequest(); prometheusQueryExemplarsRequest.setQuery(QUERY); prometheusQueryExemplarsRequest.setStartTime(STARTTIME); prometheusQueryExemplarsRequest.setEndTime(ENDTIME); - QueryExemplarsFunctionTableScanOperator queryExemplarsFunctionTableScanOperator - = new QueryExemplarsFunctionTableScanOperator(prometheusClient, - prometheusQueryExemplarsRequest); + QueryExemplarsFunctionTableScanOperator queryExemplarsFunctionTableScanOperator = + new QueryExemplarsFunctionTableScanOperator( + prometheusClient, prometheusQueryExemplarsRequest); when(prometheusClient.queryExemplars(any(), any(), any())) .thenReturn(new JSONArray(getJson("query_exemplars_result.json"))); @@ -68,24 +67,28 @@ void testQueryResponseIterator() { seriesLabelsHashMap.put("service", new ExprStringValue("bar")); seriesLabelsHashMap.put("job", new ExprStringValue("prometheus")); LinkedHashMap exemplarMap = new LinkedHashMap<>(); - exemplarMap.put("labels", new ExprTupleValue(new LinkedHashMap<>() { - { - put("traceID", new ExprStringValue("EpTxMJ40fUus7aGY")); - } - }) - ); + exemplarMap.put( + "labels", + new ExprTupleValue( + new LinkedHashMap<>() { + { + put("traceID", new ExprStringValue("EpTxMJ40fUus7aGY")); + } + })); exemplarMap.put("timestamp", new ExprTimestampValue(Instant.ofEpochMilli(1600096945479L))); exemplarMap.put("value", new ExprDoubleValue(6)); List exprValueList = new ArrayList<>(); exprValueList.add(new ExprTupleValue(exemplarMap)); ExprCollectionValue exemplars = new ExprCollectionValue(exprValueList); ExprTupleValue seriesLabels = new ExprTupleValue(seriesLabelsHashMap); - ExprTupleValue firstRow = new ExprTupleValue(new LinkedHashMap<>() { - { - put("seriesLabels", seriesLabels); - put("exemplars", exemplars); - } - }); + ExprTupleValue firstRow = + new ExprTupleValue( + new LinkedHashMap<>() { + { + put("seriesLabels", seriesLabels); + put("exemplars", exemplars); + } + }); assertEquals(firstRow, queryExemplarsFunctionTableScanOperator.next()); } @@ -93,15 +96,15 @@ void testQueryResponseIterator() { @Test @SneakyThrows void testEmptyQueryWithNoMatrixKeyInResultJson() { - PrometheusQueryExemplarsRequest prometheusQueryExemplarsRequest - = new PrometheusQueryExemplarsRequest(); + PrometheusQueryExemplarsRequest prometheusQueryExemplarsRequest = + new PrometheusQueryExemplarsRequest(); prometheusQueryExemplarsRequest.setQuery(QUERY); prometheusQueryExemplarsRequest.setStartTime(STARTTIME); prometheusQueryExemplarsRequest.setEndTime(ENDTIME); - QueryExemplarsFunctionTableScanOperator queryExemplarsFunctionTableScanOperator - = new QueryExemplarsFunctionTableScanOperator(prometheusClient, - prometheusQueryExemplarsRequest); + QueryExemplarsFunctionTableScanOperator queryExemplarsFunctionTableScanOperator = + new QueryExemplarsFunctionTableScanOperator( + prometheusClient, prometheusQueryExemplarsRequest); when(prometheusClient.queryExemplars(any(), any(), any())) .thenReturn(new JSONArray(getJson("query_exemplars_empty_result.json"))); @@ -113,15 +116,15 @@ void testEmptyQueryWithNoMatrixKeyInResultJson() { @SneakyThrows void testQuerySchema() { - PrometheusQueryExemplarsRequest prometheusQueryExemplarsRequest - = new PrometheusQueryExemplarsRequest(); + PrometheusQueryExemplarsRequest prometheusQueryExemplarsRequest = + new PrometheusQueryExemplarsRequest(); prometheusQueryExemplarsRequest.setQuery(QUERY); prometheusQueryExemplarsRequest.setStartTime(STARTTIME); prometheusQueryExemplarsRequest.setEndTime(ENDTIME); - QueryExemplarsFunctionTableScanOperator queryExemplarsFunctionTableScanOperator - = new QueryExemplarsFunctionTableScanOperator(prometheusClient, - prometheusQueryExemplarsRequest); + QueryExemplarsFunctionTableScanOperator queryExemplarsFunctionTableScanOperator = + new QueryExemplarsFunctionTableScanOperator( + prometheusClient, prometheusQueryExemplarsRequest); when(prometheusClient.queryExemplars(any(), any(), any())) .thenReturn(new JSONArray(getJson("query_exemplars_result.json"))); @@ -140,53 +143,53 @@ void testQuerySchema() { @SneakyThrows void testEmptyQueryWithException() { - PrometheusQueryExemplarsRequest prometheusQueryExemplarsRequest - = new PrometheusQueryExemplarsRequest(); + PrometheusQueryExemplarsRequest prometheusQueryExemplarsRequest = + new PrometheusQueryExemplarsRequest(); prometheusQueryExemplarsRequest.setQuery(QUERY); prometheusQueryExemplarsRequest.setStartTime(STARTTIME); prometheusQueryExemplarsRequest.setEndTime(ENDTIME); - QueryExemplarsFunctionTableScanOperator queryExemplarsFunctionTableScanOperator - = new QueryExemplarsFunctionTableScanOperator(prometheusClient, - prometheusQueryExemplarsRequest); + QueryExemplarsFunctionTableScanOperator queryExemplarsFunctionTableScanOperator = + new QueryExemplarsFunctionTableScanOperator( + prometheusClient, prometheusQueryExemplarsRequest); when(prometheusClient.queryExemplars(any(), any(), any())) .thenThrow(new IOException("Error Message")); - RuntimeException runtimeException - = assertThrows(RuntimeException.class, queryExemplarsFunctionTableScanOperator::open); - assertEquals("Error fetching data from prometheus server: Error Message", - runtimeException.getMessage()); + RuntimeException runtimeException = + assertThrows(RuntimeException.class, queryExemplarsFunctionTableScanOperator::open); + assertEquals( + "Error fetching data from prometheus server: Error Message", runtimeException.getMessage()); } - @Test @SneakyThrows void testExplain() { - PrometheusQueryExemplarsRequest prometheusQueryExemplarsRequest - = new PrometheusQueryExemplarsRequest(); + PrometheusQueryExemplarsRequest prometheusQueryExemplarsRequest = + new PrometheusQueryExemplarsRequest(); prometheusQueryExemplarsRequest.setQuery(QUERY); prometheusQueryExemplarsRequest.setStartTime(STARTTIME); prometheusQueryExemplarsRequest.setEndTime(ENDTIME); - QueryExemplarsFunctionTableScanOperator queryExemplarsFunctionTableScanOperator - = new QueryExemplarsFunctionTableScanOperator(prometheusClient, - prometheusQueryExemplarsRequest); - Assertions.assertEquals("query_exemplars(test_query, 1664767694133, 1664771294133)", + QueryExemplarsFunctionTableScanOperator queryExemplarsFunctionTableScanOperator = + new QueryExemplarsFunctionTableScanOperator( + prometheusClient, prometheusQueryExemplarsRequest); + Assertions.assertEquals( + "query_exemplars(test_query, 1664767694133, 1664771294133)", queryExemplarsFunctionTableScanOperator.explain()); } @Test @SneakyThrows void testClose() { - PrometheusQueryExemplarsRequest prometheusQueryExemplarsRequest - = new PrometheusQueryExemplarsRequest(); + PrometheusQueryExemplarsRequest prometheusQueryExemplarsRequest = + new PrometheusQueryExemplarsRequest(); prometheusQueryExemplarsRequest.setQuery(QUERY); prometheusQueryExemplarsRequest.setStartTime(STARTTIME); prometheusQueryExemplarsRequest.setEndTime(ENDTIME); - QueryExemplarsFunctionTableScanOperator queryExemplarsFunctionTableScanOperator - = new QueryExemplarsFunctionTableScanOperator(prometheusClient, - prometheusQueryExemplarsRequest); + QueryExemplarsFunctionTableScanOperator queryExemplarsFunctionTableScanOperator = + new QueryExemplarsFunctionTableScanOperator( + prometheusClient, prometheusQueryExemplarsRequest); queryExemplarsFunctionTableScanOperator.close(); } } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanBuilderTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanBuilderTest.java index 8532a35395..dca79d6905 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanBuilderTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanBuilderTest.java @@ -7,7 +7,6 @@ package org.opensearch.sql.prometheus.functions.scan; - import static org.opensearch.sql.prometheus.constants.TestConstants.ENDTIME; import static org.opensearch.sql.prometheus.constants.TestConstants.QUERY; import static org.opensearch.sql.prometheus.constants.TestConstants.STARTTIME; @@ -23,11 +22,9 @@ public class QueryRangeFunctionTableScanBuilderTest { - @Mock - private PrometheusClient prometheusClient; + @Mock private PrometheusClient prometheusClient; - @Mock - private LogicalProject logicalProject; + @Mock private LogicalProject logicalProject; @Test void testBuild() { @@ -37,13 +34,13 @@ void testBuild() { prometheusQueryRequest.setEndTime(ENDTIME); prometheusQueryRequest.setStep(STEP); - QueryRangeFunctionTableScanBuilder queryRangeFunctionTableScanBuilder - = new QueryRangeFunctionTableScanBuilder(prometheusClient, prometheusQueryRequest); - TableScanOperator queryRangeFunctionTableScanOperator - = queryRangeFunctionTableScanBuilder.build(); + QueryRangeFunctionTableScanBuilder queryRangeFunctionTableScanBuilder = + new QueryRangeFunctionTableScanBuilder(prometheusClient, prometheusQueryRequest); + TableScanOperator queryRangeFunctionTableScanOperator = + queryRangeFunctionTableScanBuilder.build(); Assertions.assertNotNull(queryRangeFunctionTableScanOperator); - Assertions.assertTrue(queryRangeFunctionTableScanOperator - instanceof QueryRangeFunctionTableScanOperator); + Assertions.assertTrue( + queryRangeFunctionTableScanOperator instanceof QueryRangeFunctionTableScanOperator); } @Test @@ -54,8 +51,8 @@ void testPushProject() { prometheusQueryRequest.setEndTime(ENDTIME); prometheusQueryRequest.setStep(STEP); - QueryRangeFunctionTableScanBuilder queryRangeFunctionTableScanBuilder - = new QueryRangeFunctionTableScanBuilder(prometheusClient, prometheusQueryRequest); + QueryRangeFunctionTableScanBuilder queryRangeFunctionTableScanBuilder = + new QueryRangeFunctionTableScanBuilder(prometheusClient, prometheusQueryRequest); Assertions.assertTrue(queryRangeFunctionTableScanBuilder.pushDownProject(logicalProject)); } } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanOperatorTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanOperatorTest.java index b476471153..e59a2bf7c4 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanOperatorTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/functions/scan/QueryRangeFunctionTableScanOperatorTest.java @@ -45,8 +45,7 @@ @ExtendWith(MockitoExtension.class) class QueryRangeFunctionTableScanOperatorTest { - @Mock - private PrometheusClient prometheusClient; + @Mock private PrometheusClient prometheusClient; @Test @SneakyThrows @@ -58,41 +57,63 @@ void testQueryResponseIterator() { prometheusQueryRequest.setEndTime(ENDTIME); prometheusQueryRequest.setStep(STEP); - QueryRangeFunctionTableScanOperator queryRangeFunctionTableScanOperator - = new QueryRangeFunctionTableScanOperator(prometheusClient, prometheusQueryRequest); + QueryRangeFunctionTableScanOperator queryRangeFunctionTableScanOperator = + new QueryRangeFunctionTableScanOperator(prometheusClient, prometheusQueryRequest); when(prometheusClient.queryRange(any(), any(), any(), any())) .thenReturn(new JSONObject(getJson("query_range_result.json"))); queryRangeFunctionTableScanOperator.open(); Assertions.assertTrue(queryRangeFunctionTableScanOperator.hasNext()); - LinkedHashMap labelsMap = new LinkedHashMap<>() {{ - put("instance", new ExprStringValue("localhost:9090")); - put("__name__", new ExprStringValue("up")); - put("job", new ExprStringValue("prometheus")); - }}; - ExprTupleValue firstRow = new ExprTupleValue(new LinkedHashMap<>() {{ - put(LABELS, new ExprTupleValue(labelsMap)); - put(TIMESTAMP, new ExprCollectionValue(Collections - .singletonList(new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))))); - put(VALUE, new ExprCollectionValue(Collections.singletonList(new ExprDoubleValue(1)))); - } - }); + LinkedHashMap labelsMap = + new LinkedHashMap<>() { + { + put("instance", new ExprStringValue("localhost:9090")); + put("__name__", new ExprStringValue("up")); + put("job", new ExprStringValue("prometheus")); + } + }; + ExprTupleValue firstRow = + new ExprTupleValue( + new LinkedHashMap<>() { + { + put(LABELS, new ExprTupleValue(labelsMap)); + put( + TIMESTAMP, + new ExprCollectionValue( + Collections.singletonList( + new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))))); + put( + VALUE, + new ExprCollectionValue(Collections.singletonList(new ExprDoubleValue(1)))); + } + }); assertEquals(firstRow, queryRangeFunctionTableScanOperator.next()); Assertions.assertTrue(queryRangeFunctionTableScanOperator.hasNext()); - LinkedHashMap labelsMap2 = new LinkedHashMap<>() {{ - put("instance", new ExprStringValue("localhost:9091")); - put("__name__", new ExprStringValue("up")); - put("job", new ExprStringValue("node")); - }}; - ExprTupleValue secondRow = new ExprTupleValue(new LinkedHashMap<>() {{ - put(LABELS, new ExprTupleValue(labelsMap2)); - put(TIMESTAMP, new ExprCollectionValue(Collections - .singletonList(new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))))); - put(VALUE, new ExprCollectionValue(Collections.singletonList(new ExprDoubleValue(0)))); - } - }); + LinkedHashMap labelsMap2 = + new LinkedHashMap<>() { + { + put("instance", new ExprStringValue("localhost:9091")); + put("__name__", new ExprStringValue("up")); + put("job", new ExprStringValue("node")); + } + }; + ExprTupleValue secondRow = + new ExprTupleValue( + new LinkedHashMap<>() { + { + put(LABELS, new ExprTupleValue(labelsMap2)); + put( + TIMESTAMP, + new ExprCollectionValue( + Collections.singletonList( + new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))))); + put( + VALUE, + new ExprCollectionValue(Collections.singletonList(new ExprDoubleValue(0)))); + } + }); assertEquals(secondRow, queryRangeFunctionTableScanOperator.next()); Assertions.assertFalse(queryRangeFunctionTableScanOperator.hasNext()); } @@ -106,16 +127,17 @@ void testEmptyQueryWithNoMatrixKeyInResultJson() { prometheusQueryRequest.setEndTime(ENDTIME); prometheusQueryRequest.setStep(STEP); - QueryRangeFunctionTableScanOperator queryRangeFunctionTableScanOperator - = new QueryRangeFunctionTableScanOperator(prometheusClient, prometheusQueryRequest); + QueryRangeFunctionTableScanOperator queryRangeFunctionTableScanOperator = + new QueryRangeFunctionTableScanOperator(prometheusClient, prometheusQueryRequest); when(prometheusClient.queryRange(any(), any(), any(), any())) .thenReturn(new JSONObject(getJson("no_matrix_query_range_result.json"))); - RuntimeException runtimeException - = assertThrows(RuntimeException.class, queryRangeFunctionTableScanOperator::open); + RuntimeException runtimeException = + assertThrows(RuntimeException.class, queryRangeFunctionTableScanOperator::open); assertEquals( "Unexpected Result Type: vector during Prometheus Response Parsing. " - + "'matrix' resultType is expected", runtimeException.getMessage()); + + "'matrix' resultType is expected", + runtimeException.getMessage()); } @Test @@ -127,8 +149,8 @@ void testQuerySchema() { prometheusQueryRequest.setEndTime(ENDTIME); prometheusQueryRequest.setStep(STEP); - QueryRangeFunctionTableScanOperator queryRangeFunctionTableScanOperator - = new QueryRangeFunctionTableScanOperator(prometheusClient, prometheusQueryRequest); + QueryRangeFunctionTableScanOperator queryRangeFunctionTableScanOperator = + new QueryRangeFunctionTableScanOperator(prometheusClient, prometheusQueryRequest); when(prometheusClient.queryRange(any(), any(), any(), any())) .thenReturn(new JSONObject(getJson("query_range_result.json"))); @@ -150,18 +172,17 @@ void testEmptyQueryWithException() { prometheusQueryRequest.setEndTime(ENDTIME); prometheusQueryRequest.setStep(STEP); - QueryRangeFunctionTableScanOperator queryRangeFunctionTableScanOperator - = new QueryRangeFunctionTableScanOperator(prometheusClient, prometheusQueryRequest); + QueryRangeFunctionTableScanOperator queryRangeFunctionTableScanOperator = + new QueryRangeFunctionTableScanOperator(prometheusClient, prometheusQueryRequest); when(prometheusClient.queryRange(any(), any(), any(), any())) .thenThrow(new IOException("Error Message")); - RuntimeException runtimeException - = assertThrows(RuntimeException.class, queryRangeFunctionTableScanOperator::open); - assertEquals("Error fetching data from prometheus server: Error Message", - runtimeException.getMessage()); + RuntimeException runtimeException = + assertThrows(RuntimeException.class, queryRangeFunctionTableScanOperator::open); + assertEquals( + "Error fetching data from prometheus server: Error Message", runtimeException.getMessage()); } - @Test @SneakyThrows void testExplain() { @@ -171,10 +192,11 @@ void testExplain() { prometheusQueryRequest.setEndTime(ENDTIME); prometheusQueryRequest.setStep(STEP); - QueryRangeFunctionTableScanOperator queryRangeFunctionTableScanOperator - = new QueryRangeFunctionTableScanOperator(prometheusClient, prometheusQueryRequest); + QueryRangeFunctionTableScanOperator queryRangeFunctionTableScanOperator = + new QueryRangeFunctionTableScanOperator(prometheusClient, prometheusQueryRequest); - Assertions.assertEquals("query_range(test_query, 1664767694133, 1664771294133, 14)", + Assertions.assertEquals( + "query_range(test_query, 1664767694133, 1664771294133, 14)", queryRangeFunctionTableScanOperator.explain()); } @@ -187,8 +209,8 @@ void testClose() { prometheusQueryRequest.setEndTime(ENDTIME); prometheusQueryRequest.setStep(STEP); - QueryRangeFunctionTableScanOperator queryRangeFunctionTableScanOperator - = new QueryRangeFunctionTableScanOperator(prometheusClient, prometheusQueryRequest); + QueryRangeFunctionTableScanOperator queryRangeFunctionTableScanOperator = + new QueryRangeFunctionTableScanOperator(prometheusClient, prometheusQueryRequest); queryRangeFunctionTableScanOperator.close(); } } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicOptimizerTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicOptimizerTest.java index a1d1cef91d..33c48e2f2d 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicOptimizerTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/planner/logical/PrometheusLogicOptimizerTest.java @@ -32,60 +32,50 @@ @ExtendWith(MockitoExtension.class) public class PrometheusLogicOptimizerTest { - @Mock - private Table table; + @Mock private Table table; @Test void project_filter_merge_with_relation() { assertEquals( project( - indexScan("prometheus_http_total_requests", - DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200")))) - ), + indexScan( + "prometheus_http_total_requests", + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))))), optimize( project( filter( relation("prometheus_http_total_requests", table), - DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))) - )) - ) - ); + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))))))); } @Test void aggregation_merge_relation() { assertEquals( project( - indexScanAgg("prometheus_http_total_requests", ImmutableList - .of(DSL.named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), + indexScanAgg( + "prometheus_http_total_requests", + ImmutableList.of(DSL.named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), ImmutableList.of(DSL.named("code", DSL.ref("code", STRING)))), DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE))), optimize( project( aggregation( relation("prometheus_http_total_requests", table), - ImmutableList - .of(DSL.named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of(DSL.named("code", - DSL.ref("code", STRING)))), - DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE))) - ) - ); + ImmutableList.of(DSL.named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of(DSL.named("code", DSL.ref("code", STRING)))), + DSL.named("AVG(intV)", DSL.ref("AVG(intV)", DOUBLE))))); } - @Test void aggregation_merge_filter_relation() { assertEquals( project( - indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + indexScanAgg( + "prometheus_http_total_requests", + DSL.and( + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))), - ImmutableList - .of(DSL.named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of(DSL.named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), ImmutableList.of(DSL.named("job", DSL.ref("job", STRING)))), DSL.named("AVG(@value)", DSL.ref("AVG(@value)", DOUBLE))), optimize( @@ -94,25 +84,16 @@ void aggregation_merge_filter_relation() { filter( relation("prometheus_http_total_requests", table), DSL.and( - DSL.equal(DSL.ref("code", STRING), - DSL.literal(stringValue("200"))), - DSL.equal(DSL.ref("handler", STRING), - DSL.literal(stringValue("/ready/")))) - ), - ImmutableList - .of(DSL.named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of(DSL.named("job", - DSL.ref("job", STRING)))), - DSL.named("AVG(@value)", DSL.ref("AVG(@value)", DOUBLE))) - ) - ); + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.equal( + DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))))), + ImmutableList.of(DSL.named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of(DSL.named("job", DSL.ref("job", STRING)))), + DSL.named("AVG(@value)", DSL.ref("AVG(@value)", DOUBLE))))); } - private LogicalPlan optimize(LogicalPlan plan) { final LogicalPlanOptimizer optimizer = PrometheusLogicalPlanOptimizerFactory.create(); return optimizer.optimize(plan); } - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/request/PrometheusDescribeMetricRequestTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/request/PrometheusDescribeMetricRequestTest.java index dfc9aee7dc..9add7896cf 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/request/PrometheusDescribeMetricRequestTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/request/PrometheusDescribeMetricRequestTest.java @@ -37,54 +37,61 @@ @ExtendWith(MockitoExtension.class) public class PrometheusDescribeMetricRequestTest { - @Mock - private PrometheusClient prometheusClient; + @Mock private PrometheusClient prometheusClient; @Test @SneakyThrows void testGetFieldTypes() { - when(prometheusClient.getLabels(METRIC_NAME)).thenReturn(new ArrayList() {{ - add("call"); - add("code"); - } - }); - Map expected = new HashMap<>() {{ - put("call", ExprCoreType.STRING); - put("code", ExprCoreType.STRING); - put(VALUE, ExprCoreType.DOUBLE); - put(TIMESTAMP, ExprCoreType.TIMESTAMP); - }}; - PrometheusDescribeMetricRequest prometheusDescribeMetricRequest - = new PrometheusDescribeMetricRequest(prometheusClient, - new DataSourceSchemaName("prometheus", "default"), METRIC_NAME); + when(prometheusClient.getLabels(METRIC_NAME)) + .thenReturn( + new ArrayList() { + { + add("call"); + add("code"); + } + }); + Map expected = + new HashMap<>() { + { + put("call", ExprCoreType.STRING); + put("code", ExprCoreType.STRING); + put(VALUE, ExprCoreType.DOUBLE); + put(TIMESTAMP, ExprCoreType.TIMESTAMP); + } + }; + PrometheusDescribeMetricRequest prometheusDescribeMetricRequest = + new PrometheusDescribeMetricRequest( + prometheusClient, new DataSourceSchemaName("prometheus", "default"), METRIC_NAME); assertEquals(expected, prometheusDescribeMetricRequest.getFieldTypes()); verify(prometheusClient, times(1)).getLabels(METRIC_NAME); } - @Test @SneakyThrows void testGetFieldTypesWithEmptyMetricName() { - Map expected = new HashMap<>() {{ - put(VALUE, ExprCoreType.DOUBLE); - put(TIMESTAMP, ExprCoreType.TIMESTAMP); - }}; - assertThrows(NullPointerException.class, - () -> new PrometheusDescribeMetricRequest(prometheusClient, - new DataSourceSchemaName("prometheus", "default"), - null)); + Map expected = + new HashMap<>() { + { + put(VALUE, ExprCoreType.DOUBLE); + put(TIMESTAMP, ExprCoreType.TIMESTAMP); + } + }; + assertThrows( + NullPointerException.class, + () -> + new PrometheusDescribeMetricRequest( + prometheusClient, new DataSourceSchemaName("prometheus", "default"), null)); } - @Test @SneakyThrows void testGetFieldTypesWhenException() { when(prometheusClient.getLabels(METRIC_NAME)).thenThrow(new RuntimeException("ERROR Message")); - PrometheusDescribeMetricRequest prometheusDescribeMetricRequest - = new PrometheusDescribeMetricRequest(prometheusClient, - new DataSourceSchemaName("prometheus", "default"), METRIC_NAME); - RuntimeException exception = assertThrows(RuntimeException.class, - prometheusDescribeMetricRequest::getFieldTypes); + PrometheusDescribeMetricRequest prometheusDescribeMetricRequest = + new PrometheusDescribeMetricRequest( + prometheusClient, new DataSourceSchemaName("prometheus", "default"), METRIC_NAME); + RuntimeException exception = + assertThrows(RuntimeException.class, prometheusDescribeMetricRequest::getFieldTypes); verify(prometheusClient, times(1)).getLabels(METRIC_NAME); assertEquals("ERROR Message", exception.getMessage()); } @@ -93,27 +100,30 @@ void testGetFieldTypesWhenException() { @SneakyThrows void testGetFieldTypesWhenIOException() { when(prometheusClient.getLabels(METRIC_NAME)).thenThrow(new IOException("ERROR Message")); - PrometheusDescribeMetricRequest prometheusDescribeMetricRequest - = new PrometheusDescribeMetricRequest(prometheusClient, - new DataSourceSchemaName("prometheus", "default"), METRIC_NAME); - RuntimeException exception = assertThrows(RuntimeException.class, - prometheusDescribeMetricRequest::getFieldTypes); - assertEquals("Error while fetching labels for http_requests_total" - + " from prometheus: ERROR Message", exception.getMessage()); + PrometheusDescribeMetricRequest prometheusDescribeMetricRequest = + new PrometheusDescribeMetricRequest( + prometheusClient, new DataSourceSchemaName("prometheus", "default"), METRIC_NAME); + RuntimeException exception = + assertThrows(RuntimeException.class, prometheusDescribeMetricRequest::getFieldTypes); + assertEquals( + "Error while fetching labels for http_requests_total" + " from prometheus: ERROR Message", + exception.getMessage()); verify(prometheusClient, times(1)).getLabels(METRIC_NAME); } @Test @SneakyThrows void testSearch() { - when(prometheusClient.getLabels(METRIC_NAME)).thenReturn(new ArrayList<>() { - { - add("call"); - } - }); - PrometheusDescribeMetricRequest prometheusDescribeMetricRequest - = new PrometheusDescribeMetricRequest(prometheusClient, - new DataSourceSchemaName("test", "default"), METRIC_NAME); + when(prometheusClient.getLabels(METRIC_NAME)) + .thenReturn( + new ArrayList<>() { + { + add("call"); + } + }); + PrometheusDescribeMetricRequest prometheusDescribeMetricRequest = + new PrometheusDescribeMetricRequest( + prometheusClient, new DataSourceSchemaName("test", "default"), METRIC_NAME); List result = prometheusDescribeMetricRequest.search(); assertEquals(3, result.size()); assertEquals(expectedRow(), result.get(0)); @@ -129,5 +139,4 @@ private ExprValue expectedRow() { valueMap.put("DATA_TYPE", stringValue(ExprCoreType.STRING.legacyTypeName().toLowerCase())); return new ExprTupleValue(valueMap); } - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/request/PrometheusListMetricsRequestTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/request/PrometheusListMetricsRequestTest.java index bf5bb22e96..09f63463b5 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/request/PrometheusListMetricsRequestTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/request/PrometheusListMetricsRequestTest.java @@ -35,45 +35,46 @@ @ExtendWith(MockitoExtension.class) public class PrometheusListMetricsRequestTest { - @Mock - private PrometheusClient prometheusClient; + @Mock private PrometheusClient prometheusClient; @Test @SneakyThrows void testSearch() { Map> metricsResult = new HashMap<>(); - metricsResult.put("go_gc_duration_seconds", - Collections.singletonList(new MetricMetadata("summary", - "A summary of the pause duration of garbage collection cycles.", ""))); - metricsResult.put("go_goroutines", - Collections.singletonList(new MetricMetadata("gauge", - "Number of goroutines that currently exist.", ""))); + metricsResult.put( + "go_gc_duration_seconds", + Collections.singletonList( + new MetricMetadata( + "summary", "A summary of the pause duration of garbage collection cycles.", ""))); + metricsResult.put( + "go_goroutines", + Collections.singletonList( + new MetricMetadata("gauge", "Number of goroutines that currently exist.", ""))); when(prometheusClient.getAllMetrics()).thenReturn(metricsResult); - PrometheusListMetricsRequest prometheusListMetricsRequest - = new PrometheusListMetricsRequest(prometheusClient, - new DataSourceSchemaName("prometheus", "information_schema")); + PrometheusListMetricsRequest prometheusListMetricsRequest = + new PrometheusListMetricsRequest( + prometheusClient, new DataSourceSchemaName("prometheus", "information_schema")); List result = prometheusListMetricsRequest.search(); assertEquals(expectedRow(), result.get(0)); assertEquals(2, result.size()); verify(prometheusClient, times(1)).getAllMetrics(); } - @Test @SneakyThrows void testSearchWhenIOException() { when(prometheusClient.getAllMetrics()).thenThrow(new IOException("ERROR Message")); - PrometheusListMetricsRequest prometheusListMetricsRequest - = new PrometheusListMetricsRequest(prometheusClient, - new DataSourceSchemaName("prometheus", "information_schema")); - RuntimeException exception = assertThrows(RuntimeException.class, - prometheusListMetricsRequest::search); - assertEquals("Error while fetching metric list for from prometheus: ERROR Message", + PrometheusListMetricsRequest prometheusListMetricsRequest = + new PrometheusListMetricsRequest( + prometheusClient, new DataSourceSchemaName("prometheus", "information_schema")); + RuntimeException exception = + assertThrows(RuntimeException.class, prometheusListMetricsRequest::search); + assertEquals( + "Error while fetching metric list for from prometheus: ERROR Message", exception.getMessage()); verify(prometheusClient, times(1)).getAllMetrics(); } - private ExprTupleValue expectedRow() { LinkedHashMap valueMap = new LinkedHashMap<>(); valueMap.put("TABLE_CATALOG", stringValue("prometheus")); @@ -81,9 +82,8 @@ private ExprTupleValue expectedRow() { valueMap.put("TABLE_NAME", stringValue("go_gc_duration_seconds")); valueMap.put("TABLE_TYPE", stringValue("summary")); valueMap.put("UNIT", stringValue("")); - valueMap.put("REMARKS", - stringValue("A summary of the pause duration of garbage collection cycles.")); + valueMap.put( + "REMARKS", stringValue("A summary of the pause duration of garbage collection cycles.")); return new ExprTupleValue(valueMap); } - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusMetricScanTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusMetricScanTest.java index 68e03c758c..00ddc973bc 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusMetricScanTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusMetricScanTest.java @@ -16,7 +16,6 @@ import static org.opensearch.sql.prometheus.constants.TestConstants.QUERY; import static org.opensearch.sql.prometheus.constants.TestConstants.STARTTIME; import static org.opensearch.sql.prometheus.constants.TestConstants.STEP; -import static org.opensearch.sql.prometheus.data.constants.PrometheusFieldConstants.LABELS; import static org.opensearch.sql.prometheus.data.constants.PrometheusFieldConstants.TIMESTAMP; import static org.opensearch.sql.prometheus.data.constants.PrometheusFieldConstants.VALUE; import static org.opensearch.sql.prometheus.utils.TestUtils.getJson; @@ -45,8 +44,7 @@ @ExtendWith(MockitoExtension.class) public class PrometheusMetricScanTest { - @Mock - private PrometheusClient prometheusClient; + @Mock private PrometheusClient prometheusClient; @Test @SneakyThrows @@ -61,24 +59,30 @@ void testQueryResponseIterator() { .thenReturn(new JSONObject(getJson("query_range_result.json"))); prometheusMetricScan.open(); Assertions.assertTrue(prometheusMetricScan.hasNext()); - ExprTupleValue firstRow = new ExprTupleValue(new LinkedHashMap<>() {{ - put(TIMESTAMP, new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); - put(VALUE, new ExprDoubleValue(1)); - put("instance", new ExprStringValue("localhost:9090")); - put("__name__", new ExprStringValue("up")); - put("job", new ExprStringValue("prometheus")); - } - }); + ExprTupleValue firstRow = + new ExprTupleValue( + new LinkedHashMap<>() { + { + put(TIMESTAMP, new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); + put(VALUE, new ExprDoubleValue(1)); + put("instance", new ExprStringValue("localhost:9090")); + put("__name__", new ExprStringValue("up")); + put("job", new ExprStringValue("prometheus")); + } + }); assertEquals(firstRow, prometheusMetricScan.next()); Assertions.assertTrue(prometheusMetricScan.hasNext()); - ExprTupleValue secondRow = new ExprTupleValue(new LinkedHashMap<>() {{ - put("@timestamp", new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); - put("@value", new ExprDoubleValue(0)); - put("instance", new ExprStringValue("localhost:9091")); - put("__name__", new ExprStringValue("up")); - put("job", new ExprStringValue("node")); - } - }); + ExprTupleValue secondRow = + new ExprTupleValue( + new LinkedHashMap<>() { + { + put("@timestamp", new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); + put("@value", new ExprDoubleValue(0)); + put("instance", new ExprStringValue("localhost:9091")); + put("__name__", new ExprStringValue("up")); + put("job", new ExprStringValue("node")); + } + }); assertEquals(secondRow, prometheusMetricScan.next()); Assertions.assertFalse(prometheusMetricScan.hasNext()); } @@ -86,8 +90,7 @@ void testQueryResponseIterator() { @Test @SneakyThrows void testQueryResponseIteratorWithGivenPrometheusResponseFieldNames() { - PrometheusResponseFieldNames prometheusResponseFieldNames - = new PrometheusResponseFieldNames(); + PrometheusResponseFieldNames prometheusResponseFieldNames = new PrometheusResponseFieldNames(); prometheusResponseFieldNames.setValueFieldName("count()"); prometheusResponseFieldNames.setValueType(INTEGER); prometheusResponseFieldNames.setTimestampFieldName(TIMESTAMP); @@ -102,34 +105,38 @@ void testQueryResponseIteratorWithGivenPrometheusResponseFieldNames() { .thenReturn(new JSONObject(getJson("query_range_result.json"))); prometheusMetricScan.open(); Assertions.assertTrue(prometheusMetricScan.hasNext()); - ExprTupleValue firstRow = new ExprTupleValue(new LinkedHashMap<>() {{ - put(TIMESTAMP, new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); - put("count()", new ExprIntegerValue(1)); - put("instance", new ExprStringValue("localhost:9090")); - put("__name__", new ExprStringValue("up")); - put("job", new ExprStringValue("prometheus")); - } - }); + ExprTupleValue firstRow = + new ExprTupleValue( + new LinkedHashMap<>() { + { + put(TIMESTAMP, new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); + put("count()", new ExprIntegerValue(1)); + put("instance", new ExprStringValue("localhost:9090")); + put("__name__", new ExprStringValue("up")); + put("job", new ExprStringValue("prometheus")); + } + }); assertEquals(firstRow, prometheusMetricScan.next()); Assertions.assertTrue(prometheusMetricScan.hasNext()); - ExprTupleValue secondRow = new ExprTupleValue(new LinkedHashMap<>() {{ - put(TIMESTAMP, new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); - put("count()", new ExprIntegerValue(0)); - put("instance", new ExprStringValue("localhost:9091")); - put("__name__", new ExprStringValue("up")); - put("job", new ExprStringValue("node")); - } - }); + ExprTupleValue secondRow = + new ExprTupleValue( + new LinkedHashMap<>() { + { + put(TIMESTAMP, new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); + put("count()", new ExprIntegerValue(0)); + put("instance", new ExprStringValue("localhost:9091")); + put("__name__", new ExprStringValue("up")); + put("job", new ExprStringValue("node")); + } + }); assertEquals(secondRow, prometheusMetricScan.next()); Assertions.assertFalse(prometheusMetricScan.hasNext()); } - @Test @SneakyThrows void testQueryResponseIteratorWithGivenPrometheusResponseWithLongInAggType() { - PrometheusResponseFieldNames prometheusResponseFieldNames - = new PrometheusResponseFieldNames(); + PrometheusResponseFieldNames prometheusResponseFieldNames = new PrometheusResponseFieldNames(); prometheusResponseFieldNames.setValueFieldName("testAgg"); prometheusResponseFieldNames.setValueType(LONG); prometheusResponseFieldNames.setTimestampFieldName(TIMESTAMP); @@ -144,24 +151,30 @@ void testQueryResponseIteratorWithGivenPrometheusResponseWithLongInAggType() { .thenReturn(new JSONObject(getJson("query_range_result.json"))); prometheusMetricScan.open(); Assertions.assertTrue(prometheusMetricScan.hasNext()); - ExprTupleValue firstRow = new ExprTupleValue(new LinkedHashMap<>() {{ - put(TIMESTAMP, new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); - put("testAgg", new ExprLongValue(1)); - put("instance", new ExprStringValue("localhost:9090")); - put("__name__", new ExprStringValue("up")); - put("job", new ExprStringValue("prometheus")); - } - }); + ExprTupleValue firstRow = + new ExprTupleValue( + new LinkedHashMap<>() { + { + put(TIMESTAMP, new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); + put("testAgg", new ExprLongValue(1)); + put("instance", new ExprStringValue("localhost:9090")); + put("__name__", new ExprStringValue("up")); + put("job", new ExprStringValue("prometheus")); + } + }); assertEquals(firstRow, prometheusMetricScan.next()); Assertions.assertTrue(prometheusMetricScan.hasNext()); - ExprTupleValue secondRow = new ExprTupleValue(new LinkedHashMap<>() {{ - put(TIMESTAMP, new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); - put("testAgg", new ExprLongValue(0)); - put("instance", new ExprStringValue("localhost:9091")); - put("__name__", new ExprStringValue("up")); - put("job", new ExprStringValue("node")); - } - }); + ExprTupleValue secondRow = + new ExprTupleValue( + new LinkedHashMap<>() { + { + put(TIMESTAMP, new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); + put("testAgg", new ExprLongValue(0)); + put("instance", new ExprStringValue("localhost:9091")); + put("__name__", new ExprStringValue("up")); + put("job", new ExprStringValue("node")); + } + }); assertEquals(secondRow, prometheusMetricScan.next()); Assertions.assertFalse(prometheusMetricScan.hasNext()); } @@ -169,8 +182,7 @@ void testQueryResponseIteratorWithGivenPrometheusResponseWithLongInAggType() { @Test @SneakyThrows void testQueryResponseIteratorWithGivenPrometheusResponseWithBackQuotedFieldNames() { - PrometheusResponseFieldNames prometheusResponseFieldNames - = new PrometheusResponseFieldNames(); + PrometheusResponseFieldNames prometheusResponseFieldNames = new PrometheusResponseFieldNames(); prometheusResponseFieldNames.setValueFieldName("testAgg"); prometheusResponseFieldNames.setValueType(LONG); prometheusResponseFieldNames.setTimestampFieldName(TIMESTAMP); @@ -187,29 +199,34 @@ void testQueryResponseIteratorWithGivenPrometheusResponseWithBackQuotedFieldName .thenReturn(new JSONObject(getJson("query_range_result.json"))); prometheusMetricScan.open(); Assertions.assertTrue(prometheusMetricScan.hasNext()); - ExprTupleValue firstRow = new ExprTupleValue(new LinkedHashMap<>() {{ - put(TIMESTAMP, new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); - put("testAgg", new ExprLongValue(1)); - put("`instance`", new ExprStringValue("localhost:9090")); - put("__name__", new ExprStringValue("up")); - put("job", new ExprStringValue("prometheus")); - } - }); + ExprTupleValue firstRow = + new ExprTupleValue( + new LinkedHashMap<>() { + { + put(TIMESTAMP, new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); + put("testAgg", new ExprLongValue(1)); + put("`instance`", new ExprStringValue("localhost:9090")); + put("__name__", new ExprStringValue("up")); + put("job", new ExprStringValue("prometheus")); + } + }); assertEquals(firstRow, prometheusMetricScan.next()); Assertions.assertTrue(prometheusMetricScan.hasNext()); - ExprTupleValue secondRow = new ExprTupleValue(new LinkedHashMap<>() {{ - put(TIMESTAMP, new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); - put("testAgg", new ExprLongValue(0)); - put("`instance`", new ExprStringValue("localhost:9091")); - put("__name__", new ExprStringValue("up")); - put("job", new ExprStringValue("node")); - } - }); + ExprTupleValue secondRow = + new ExprTupleValue( + new LinkedHashMap<>() { + { + put(TIMESTAMP, new ExprTimestampValue(Instant.ofEpochMilli(1435781430781L))); + put("testAgg", new ExprLongValue(0)); + put("`instance`", new ExprStringValue("localhost:9091")); + put("__name__", new ExprStringValue("up")); + put("job", new ExprStringValue("node")); + } + }); assertEquals(secondRow, prometheusMetricScan.next()); Assertions.assertFalse(prometheusMetricScan.hasNext()); } - @Test @SneakyThrows void testEmptyQueryResponseIterator() { @@ -236,11 +253,12 @@ void testEmptyQueryWithNoMatrixKeyInResultJson() { when(prometheusClient.queryRange(any(), any(), any(), any())) .thenReturn(new JSONObject(getJson("no_matrix_query_range_result.json"))); - RuntimeException runtimeException - = Assertions.assertThrows(RuntimeException.class, prometheusMetricScan::open); + RuntimeException runtimeException = + Assertions.assertThrows(RuntimeException.class, prometheusMetricScan::open); assertEquals( "Unexpected Result Type: vector during Prometheus Response Parsing. " - + "'matrix' resultType is expected", runtimeException.getMessage()); + + "'matrix' resultType is expected", + runtimeException.getMessage()); } @Test @@ -254,13 +272,12 @@ void testEmptyQueryWithException() { when(prometheusClient.queryRange(any(), any(), any(), any())) .thenThrow(new IOException("Error Message")); - RuntimeException runtimeException - = assertThrows(RuntimeException.class, prometheusMetricScan::open); - assertEquals("Error fetching data from prometheus server. Error Message", - runtimeException.getMessage()); + RuntimeException runtimeException = + assertThrows(RuntimeException.class, prometheusMetricScan::open); + assertEquals( + "Error fetching data from prometheus server. Error Message", runtimeException.getMessage()); } - @Test @SneakyThrows void testExplain() { @@ -274,5 +291,4 @@ void testExplain() { + "endTime=1664771294133, step=14)", prometheusMetricScan.explain()); } - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusMetricTableTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusMetricTableTest.java index d43c38fc68..8bdab9244b 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusMetricTableTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusMetricTableTest.java @@ -62,15 +62,14 @@ @ExtendWith(MockitoExtension.class) class PrometheusMetricTableTest { - @Mock - private PrometheusClient client; + @Mock private PrometheusClient client; @Test @SneakyThrows void testGetFieldTypesFromMetric() { when(client.getLabels(TestConstants.METRIC_NAME)).thenReturn(List.of("label1", "label2")); - PrometheusMetricTable prometheusMetricTable - = new PrometheusMetricTable(client, TestConstants.METRIC_NAME); + PrometheusMetricTable prometheusMetricTable = + new PrometheusMetricTable(client, TestConstants.METRIC_NAME); Map expectedFieldTypes = new HashMap<>(); expectedFieldTypes.put("label1", ExprCoreType.STRING); expectedFieldTypes.put("label2", ExprCoreType.STRING); @@ -84,7 +83,7 @@ void testGetFieldTypesFromMetric() { assertNull(prometheusMetricTable.getPrometheusQueryRequest()); assertNotNull(prometheusMetricTable.getMetricName()); - //testing Caching + // testing Caching fieldTypes = prometheusMetricTable.getFieldTypes(); assertEquals(expectedFieldTypes, fieldTypes); @@ -96,8 +95,8 @@ void testGetFieldTypesFromMetric() { @Test @SneakyThrows void testGetFieldTypesFromPrometheusQueryRequest() { - PrometheusMetricTable prometheusMetricTable - = new PrometheusMetricTable(client, new PrometheusQueryRequest()); + PrometheusMetricTable prometheusMetricTable = + new PrometheusMetricTable(client, new PrometheusQueryRequest()); Map expectedFieldTypes = new HashMap<>(); expectedFieldTypes.put(VALUE, ExprCoreType.DOUBLE); expectedFieldTypes.put(TIMESTAMP, ExprCoreType.TIMESTAMP); @@ -117,14 +116,17 @@ void testImplementWithBasicMetricQuery() { new PrometheusMetricTable(client, "prometheus_http_requests_total"); List finalProjectList = new ArrayList<>(); finalProjectList.add(named("@value", ref("@value", ExprCoreType.DOUBLE))); - PhysicalPlan plan = prometheusMetricTable.implement( - project(relation("prometheus_http_requests_total", prometheusMetricTable), - finalProjectList, null)); + PhysicalPlan plan = + prometheusMetricTable.implement( + project( + relation("prometheus_http_requests_total", prometheusMetricTable), + finalProjectList, + null)); assertTrue(plan instanceof ProjectOperator); List projectList = ((ProjectOperator) plan).getProjectList(); - List outputFields - = projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); + List outputFields = + projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); assertEquals(List.of(VALUE), outputFields); assertTrue(((ProjectOperator) plan).getInput() instanceof PrometheusMetricScan); PrometheusMetricScan prometheusMetricScan = @@ -133,7 +135,6 @@ void testImplementWithBasicMetricQuery() { assertEquals(3600 / 250 + "s", prometheusMetricScan.getRequest().getStep()); } - @Test void testImplementPrometheusQueryWithStatsQueryAndNoFilter() { @@ -141,16 +142,23 @@ void testImplementPrometheusQueryWithStatsQueryAndNoFilter() { new PrometheusMetricTable(client, "prometheus_http_total_requests"); // IndexScanAgg without Filter - PhysicalPlan plan = prometheusMetricTable.implement( - filter( - indexScanAgg("prometheus_http_total_requests", ImmutableList - .of(named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of(named("code", DSL.ref("code", STRING)), - named("span", DSL.span(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal(40), "s")))), - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))))); + PhysicalPlan plan = + prometheusMetricTable.implement( + filter( + indexScanAgg( + "prometheus_http_total_requests", + ImmutableList.of(named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of( + named("code", DSL.ref("code", STRING)), + named( + "span", + DSL.span( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal(40), + "s")))), + DSL.and( + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))))); assertTrue(plan.getChild().get(0) instanceof PrometheusMetricScan); PrometheusQueryRequest prometheusQueryRequest = @@ -166,28 +174,31 @@ void testImplementPrometheusQueryWithStatsQueryAndFilter() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - // IndexScanAgg with Filter - PhysicalPlan plan = prometheusMetricTable.implement( - indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))), - ImmutableList - .of(named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of(named("job", DSL.ref("job", STRING)), - named("span", DSL.span(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal(40), "s"))))); + PhysicalPlan plan = + prometheusMetricTable.implement( + indexScanAgg( + "prometheus_http_total_requests", + DSL.and( + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))), + ImmutableList.of(named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of( + named("job", DSL.ref("job", STRING)), + named( + "span", + DSL.span( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal(40), + "s"))))); assertTrue(plan instanceof PrometheusMetricScan); PrometheusQueryRequest prometheusQueryRequest = ((PrometheusMetricScan) plan).getRequest(); assertEquals( "avg by(job) (avg_over_time" + "(prometheus_http_total_requests{code=\"200\" , handler=\"/ready/\"}[40s]))", prometheusQueryRequest.getPromQl()); - } - @Test void testImplementPrometheusQueryWithStatsQueryAndFilterAndProject() { @@ -198,77 +209,99 @@ void testImplementPrometheusQueryWithStatsQueryAndFilterAndProject() { List finalProjectList = new ArrayList<>(); finalProjectList.add(DSL.named(VALUE, DSL.ref(VALUE, STRING))); finalProjectList.add(DSL.named(TIMESTAMP, DSL.ref(TIMESTAMP, ExprCoreType.TIMESTAMP))); - PhysicalPlan plan = prometheusMetricTable.implement( - project(indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))), - ImmutableList - .of(DSL.named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of(DSL.named("job", DSL.ref("job", STRING)), - named("span", DSL.span(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal(40), "s")))), - finalProjectList, null)); + PhysicalPlan plan = + prometheusMetricTable.implement( + project( + indexScanAgg( + "prometheus_http_total_requests", + DSL.and( + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))), + ImmutableList.of(DSL.named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of( + DSL.named("job", DSL.ref("job", STRING)), + named( + "span", + DSL.span( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal(40), + "s")))), + finalProjectList, + null)); assertTrue(plan instanceof ProjectOperator); assertTrue(((ProjectOperator) plan).getInput() instanceof PrometheusMetricScan); - PrometheusQueryRequest request - = ((PrometheusMetricScan) ((ProjectOperator) plan).getInput()).getRequest(); + PrometheusQueryRequest request = + ((PrometheusMetricScan) ((ProjectOperator) plan).getInput()).getRequest(); assertEquals(request.getStep(), "40s"); - assertEquals("avg by(job) (avg_over_time" + assertEquals( + "avg by(job) (avg_over_time" + "(prometheus_http_total_requests{code=\"200\" , handler=\"/ready/\"}[40s]))", request.getPromQl()); List projectList = ((ProjectOperator) plan).getProjectList(); - List outputFields - = projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); + List outputFields = + projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); assertEquals(List.of(VALUE, TIMESTAMP), outputFields); } - @Test void testTimeRangeResolver() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - - //Both endTime and startTime are set. + // Both endTime and startTime are set. List finalProjectList = new ArrayList<>(); finalProjectList.add(DSL.named(VALUE, DSL.ref(VALUE, STRING))); finalProjectList.add(DSL.named(TIMESTAMP, DSL.ref(TIMESTAMP, ExprCoreType.TIMESTAMP))); Long endTime = new Date(System.currentTimeMillis()).getTime(); Long startTime = new Date(System.currentTimeMillis() - 4800 * 1000).getTime(); DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - PhysicalPlan plan = prometheusMetricTable.implement( - project(indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + PhysicalPlan plan = + prometheusMetricTable.implement( + project( + indexScanAgg( + "prometheus_http_total_requests", DSL.and( - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), - DSL.and(DSL.gte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(startTime)), - ExprCoreType.TIMESTAMP))), - DSL.lte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(endTime)), - ExprCoreType.TIMESTAMP)))))), - ImmutableList - .of(named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of(named("job", DSL.ref("job", STRING)), - named("span", DSL.span(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal(40), "s")))), - finalProjectList, null)); + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.and( + DSL.equal( + DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), + DSL.and( + DSL.gte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(startTime)), + ExprCoreType.TIMESTAMP))), + DSL.lte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(endTime)), + ExprCoreType.TIMESTAMP)))))), + ImmutableList.of(named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of( + named("job", DSL.ref("job", STRING)), + named( + "span", + DSL.span( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal(40), + "s")))), + finalProjectList, + null)); assertTrue(plan instanceof ProjectOperator); assertTrue(((ProjectOperator) plan).getInput() instanceof PrometheusMetricScan); - PrometheusQueryRequest request - = ((PrometheusMetricScan) ((ProjectOperator) plan).getInput()).getRequest(); + PrometheusQueryRequest request = + ((PrometheusMetricScan) ((ProjectOperator) plan).getInput()).getRequest(); assertEquals("40s", request.getStep()); - assertEquals("avg by(job) (avg_over_time" + assertEquals( + "avg by(job) (avg_over_time" + "(prometheus_http_total_requests{code=\"200\" , handler=\"/ready/\"}[40s]))", request.getPromQl()); List projectList = ((ProjectOperator) plan).getProjectList(); - List outputFields - = projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); + List outputFields = + projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); assertEquals(List.of(VALUE, TIMESTAMP), outputFields); } @@ -278,40 +311,51 @@ void testTimeRangeResolverWithOutEndTimeInFilter() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - - //Only endTime is set. + // Only endTime is set. List finalProjectList = new ArrayList<>(); finalProjectList.add(DSL.named(VALUE, DSL.ref(VALUE, STRING))); finalProjectList.add(DSL.named(TIMESTAMP, DSL.ref(TIMESTAMP, ExprCoreType.TIMESTAMP))); Long startTime = new Date(System.currentTimeMillis() - 4800 * 1000).getTime(); DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - PhysicalPlan plan = prometheusMetricTable.implement( - project(indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + PhysicalPlan plan = + prometheusMetricTable.implement( + project( + indexScanAgg( + "prometheus_http_total_requests", DSL.and( - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), - DSL.gte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(startTime)), - ExprCoreType.TIMESTAMP))))), - ImmutableList - .of(named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of(named("job", DSL.ref("job", STRING)), - named("span", DSL.span(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal(40), "s")))), - finalProjectList, null)); + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.and( + DSL.equal( + DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), + DSL.gte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(startTime)), + ExprCoreType.TIMESTAMP))))), + ImmutableList.of(named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of( + named("job", DSL.ref("job", STRING)), + named( + "span", + DSL.span( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal(40), + "s")))), + finalProjectList, + null)); assertTrue(plan instanceof ProjectOperator); assertTrue(((ProjectOperator) plan).getInput() instanceof PrometheusMetricScan); - PrometheusQueryRequest request - = ((PrometheusMetricScan) ((ProjectOperator) plan).getInput()).getRequest(); + PrometheusQueryRequest request = + ((PrometheusMetricScan) ((ProjectOperator) plan).getInput()).getRequest(); assertEquals("40s", request.getStep()); - assertEquals("avg by(job) (avg_over_time" + assertEquals( + "avg by(job) (avg_over_time" + "(prometheus_http_total_requests{code=\"200\" , handler=\"/ready/\"}[40s]))", request.getPromQl()); List projectList = ((ProjectOperator) plan).getProjectList(); - List outputFields - = projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); + List outputFields = + projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); assertEquals(List.of(VALUE, TIMESTAMP), outputFields); } @@ -321,78 +365,95 @@ void testTimeRangeResolverWithOutStartTimeInFilter() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - - //Both endTime and startTime are set. + // Both endTime and startTime are set. List finalProjectList = new ArrayList<>(); finalProjectList.add(DSL.named(VALUE, DSL.ref(VALUE, STRING))); finalProjectList.add(DSL.named(TIMESTAMP, DSL.ref(TIMESTAMP, ExprCoreType.TIMESTAMP))); Long endTime = new Date(System.currentTimeMillis() - 4800 * 1000).getTime(); DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - PhysicalPlan plan = prometheusMetricTable.implement( - project(indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + PhysicalPlan plan = + prometheusMetricTable.implement( + project( + indexScanAgg( + "prometheus_http_total_requests", DSL.and( - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), - DSL.lte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(endTime)), - ExprCoreType.TIMESTAMP))))), - ImmutableList - .of(named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of(named("job", DSL.ref("job", STRING)), - named("span", DSL.span(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal(40), "s")))), - finalProjectList, null)); + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.and( + DSL.equal( + DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), + DSL.lte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(endTime)), + ExprCoreType.TIMESTAMP))))), + ImmutableList.of(named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of( + named("job", DSL.ref("job", STRING)), + named( + "span", + DSL.span( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal(40), + "s")))), + finalProjectList, + null)); assertTrue(plan instanceof ProjectOperator); assertTrue(((ProjectOperator) plan).getInput() instanceof PrometheusMetricScan); - PrometheusQueryRequest request - = ((PrometheusMetricScan) ((ProjectOperator) plan).getInput()).getRequest(); + PrometheusQueryRequest request = + ((PrometheusMetricScan) ((ProjectOperator) plan).getInput()).getRequest(); assertEquals("40s", request.getStep()); - assertEquals("avg by(job) (avg_over_time" + assertEquals( + "avg by(job) (avg_over_time" + "(prometheus_http_total_requests{code=\"200\" , handler=\"/ready/\"}[40s]))", request.getPromQl()); List projectList = ((ProjectOperator) plan).getProjectList(); - List outputFields - = projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); + List outputFields = + projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); assertEquals(List.of(VALUE, TIMESTAMP), outputFields); } - @Test void testSpanResolverWithoutSpanExpression() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - List finalProjectList = new ArrayList<>(); finalProjectList.add(DSL.named(VALUE, DSL.ref(VALUE, STRING))); Long endTime = new Date(System.currentTimeMillis()).getTime(); Long startTime = new Date(System.currentTimeMillis() - 4800 * 1000).getTime(); DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - LogicalPlan plan = project(indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + LogicalPlan plan = + project( + indexScanAgg( + "prometheus_http_total_requests", + DSL.and( + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), DSL.and( DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), - DSL.and(DSL.gte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.and( + DSL.gte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), DSL.literal( - fromObjectValue(dateFormat.format(new Date(startTime)), + fromObjectValue( + dateFormat.format(new Date(startTime)), ExprCoreType.TIMESTAMP))), - DSL.lte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.lte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), DSL.literal( - fromObjectValue(dateFormat.format(new Date(endTime)), + fromObjectValue( + dateFormat.format(new Date(endTime)), ExprCoreType.TIMESTAMP)))))), - ImmutableList - .of(named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of(named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), null), - finalProjectList, null); - RuntimeException runtimeException - = Assertions.assertThrows(RuntimeException.class, - () -> prometheusMetricTable.implement(plan)); - Assertions.assertEquals("Prometheus Catalog doesn't support " - + "aggregations without span expression", + finalProjectList, + null); + RuntimeException runtimeException = + Assertions.assertThrows( + RuntimeException.class, () -> prometheusMetricTable.implement(plan)); + Assertions.assertEquals( + "Prometheus Catalog doesn't support " + "aggregations without span expression", runtimeException.getMessage()); } @@ -402,34 +463,41 @@ void testSpanResolverWithEmptyGroupByList() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - List finalProjectList = new ArrayList<>(); finalProjectList.add(DSL.named(VALUE, DSL.ref(VALUE, STRING))); Long endTime = new Date(System.currentTimeMillis()).getTime(); Long startTime = new Date(System.currentTimeMillis() - 4800 * 1000).getTime(); DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - LogicalPlan plan = project(indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + LogicalPlan plan = + project( + indexScanAgg( + "prometheus_http_total_requests", DSL.and( - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), - DSL.and(DSL.gte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(startTime)), - ExprCoreType.TIMESTAMP))), - DSL.lte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(endTime)), - ExprCoreType.TIMESTAMP)))))), - ImmutableList - .of(named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of()), - finalProjectList, null); - RuntimeException runtimeException - = Assertions.assertThrows(RuntimeException.class, - () -> prometheusMetricTable.implement(plan)); - Assertions.assertEquals("Prometheus Catalog doesn't support " - + "aggregations without span expression", + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.and( + DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), + DSL.and( + DSL.gte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(startTime)), + ExprCoreType.TIMESTAMP))), + DSL.lte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(endTime)), + ExprCoreType.TIMESTAMP)))))), + ImmutableList.of(named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of()), + finalProjectList, + null); + RuntimeException runtimeException = + Assertions.assertThrows( + RuntimeException.class, () -> prometheusMetricTable.implement(plan)); + Assertions.assertEquals( + "Prometheus Catalog doesn't support " + "aggregations without span expression", runtimeException.getMessage()); } @@ -439,44 +507,58 @@ void testSpanResolverWithSpanExpression() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - List finalProjectList = new ArrayList<>(); finalProjectList.add(DSL.named(VALUE, DSL.ref(VALUE, STRING))); finalProjectList.add(DSL.named(TIMESTAMP, DSL.ref(TIMESTAMP, ExprCoreType.TIMESTAMP))); Long endTime = new Date(System.currentTimeMillis()).getTime(); Long startTime = new Date(System.currentTimeMillis() - 4800 * 1000).getTime(); DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - PhysicalPlan plan = prometheusMetricTable.implement( - project(indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + PhysicalPlan plan = + prometheusMetricTable.implement( + project( + indexScanAgg( + "prometheus_http_total_requests", DSL.and( - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), - DSL.and(DSL.gte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(startTime)), - ExprCoreType.TIMESTAMP))), - DSL.lte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(endTime)), - ExprCoreType.TIMESTAMP)))))), - ImmutableList - .of(named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of(named("job", DSL.ref("job", STRING)), - named("span", DSL.span(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal(40), "s")))), - finalProjectList, null)); + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.and( + DSL.equal( + DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), + DSL.and( + DSL.gte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(startTime)), + ExprCoreType.TIMESTAMP))), + DSL.lte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(endTime)), + ExprCoreType.TIMESTAMP)))))), + ImmutableList.of(named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of( + named("job", DSL.ref("job", STRING)), + named( + "span", + DSL.span( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal(40), + "s")))), + finalProjectList, + null)); assertTrue(plan instanceof ProjectOperator); assertTrue(((ProjectOperator) plan).getInput() instanceof PrometheusMetricScan); - PrometheusQueryRequest request - = ((PrometheusMetricScan) ((ProjectOperator) plan).getInput()).getRequest(); + PrometheusQueryRequest request = + ((PrometheusMetricScan) ((ProjectOperator) plan).getInput()).getRequest(); assertEquals("40s", request.getStep()); - assertEquals("avg by(job) (avg_over_time" + assertEquals( + "avg by(job) (avg_over_time" + "(prometheus_http_total_requests{code=\"200\" , handler=\"/ready/\"}[40s]))", request.getPromQl()); List projectList = ((ProjectOperator) plan).getProjectList(); - List outputFields - = projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); + List outputFields = + projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); assertEquals(List.of(VALUE, TIMESTAMP), outputFields); } @@ -486,35 +568,45 @@ void testExpressionWithMissingTimeUnitInSpanExpression() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - List finalProjectList = new ArrayList<>(); finalProjectList.add(DSL.named(VALUE, DSL.ref(VALUE, STRING))); finalProjectList.add(DSL.named(TIMESTAMP, DSL.ref(TIMESTAMP, ExprCoreType.TIMESTAMP))); Long endTime = new Date(System.currentTimeMillis()).getTime(); Long startTime = new Date(System.currentTimeMillis() - 4800 * 1000).getTime(); DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - LogicalPlan logicalPlan = project(indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + LogicalPlan logicalPlan = + project( + indexScanAgg( + "prometheus_http_total_requests", + DSL.and( + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), DSL.and( DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), - DSL.and(DSL.gte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.and( + DSL.gte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), DSL.literal( - fromObjectValue(dateFormat.format(new Date(startTime)), + fromObjectValue( + dateFormat.format(new Date(startTime)), ExprCoreType.TIMESTAMP))), - DSL.lte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.lte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), DSL.literal( - fromObjectValue(dateFormat.format(new Date(endTime)), + fromObjectValue( + dateFormat.format(new Date(endTime)), ExprCoreType.TIMESTAMP)))))), - ImmutableList - .of(named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of(named("job", DSL.ref("job", STRING)), - named("span", DSL.span(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal(40), "")))), - finalProjectList, null); + ImmutableList.of(named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of( + named("job", DSL.ref("job", STRING)), + named( + "span", + DSL.span( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), DSL.literal(40), "")))), + finalProjectList, + null); RuntimeException exception = - Assertions.assertThrows(RuntimeException.class, - () -> prometheusMetricTable.implement(logicalPlan)); + Assertions.assertThrows( + RuntimeException.class, () -> prometheusMetricTable.implement(logicalPlan)); assertEquals("Missing TimeUnit in the span expression", exception.getMessage()); } @@ -524,44 +616,57 @@ void testPrometheusQueryWithOnlySpanExpressionInGroupByList() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - List finalProjectList = new ArrayList<>(); finalProjectList.add(DSL.named(VALUE, DSL.ref(VALUE, STRING))); finalProjectList.add(DSL.named(TIMESTAMP, DSL.ref(TIMESTAMP, ExprCoreType.TIMESTAMP))); Long endTime = new Date(System.currentTimeMillis()).getTime(); Long startTime = new Date(System.currentTimeMillis() - 4800 * 1000).getTime(); DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - PhysicalPlan plan = prometheusMetricTable.implement( - project(indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + PhysicalPlan plan = + prometheusMetricTable.implement( + project( + indexScanAgg( + "prometheus_http_total_requests", DSL.and( - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), - DSL.and(DSL.gte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(startTime)), - ExprCoreType.TIMESTAMP))), - DSL.lte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(endTime)), - ExprCoreType.TIMESTAMP)))))), - ImmutableList - .of(named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of( - named("span", DSL.span(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal(40), "s")))), - finalProjectList, null)); + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.and( + DSL.equal( + DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), + DSL.and( + DSL.gte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(startTime)), + ExprCoreType.TIMESTAMP))), + DSL.lte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(endTime)), + ExprCoreType.TIMESTAMP)))))), + ImmutableList.of(named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of( + named( + "span", + DSL.span( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal(40), + "s")))), + finalProjectList, + null)); assertTrue(plan instanceof ProjectOperator); assertTrue(((ProjectOperator) plan).getInput() instanceof PrometheusMetricScan); - PrometheusQueryRequest request - = ((PrometheusMetricScan) ((ProjectOperator) plan).getInput()).getRequest(); + PrometheusQueryRequest request = + ((PrometheusMetricScan) ((ProjectOperator) plan).getInput()).getRequest(); assertEquals("40s", request.getStep()); - assertEquals("avg (avg_over_time" + assertEquals( + "avg (avg_over_time" + "(prometheus_http_total_requests{code=\"200\" , handler=\"/ready/\"}[40s]))", request.getPromQl()); List projectList = ((ProjectOperator) plan).getProjectList(); - List outputFields - = projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); + List outputFields = + projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); assertEquals(List.of(VALUE, TIMESTAMP), outputFields); } @@ -571,44 +676,57 @@ void testStatsWithNoGroupByList() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - List finalProjectList = new ArrayList<>(); finalProjectList.add(DSL.named(VALUE, DSL.ref(VALUE, STRING))); finalProjectList.add(DSL.named(TIMESTAMP, DSL.ref(TIMESTAMP, ExprCoreType.TIMESTAMP))); Long endTime = new Date(System.currentTimeMillis()).getTime(); Long startTime = new Date(System.currentTimeMillis() - 4800 * 1000).getTime(); DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - PhysicalPlan plan = prometheusMetricTable.implement( - project(indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + PhysicalPlan plan = + prometheusMetricTable.implement( + project( + indexScanAgg( + "prometheus_http_total_requests", DSL.and( - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), - DSL.and(DSL.gte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(startTime)), - ExprCoreType.TIMESTAMP))), - DSL.lte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(endTime)), - ExprCoreType.TIMESTAMP)))))), - ImmutableList - .of(named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of(named("span", - DSL.span(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal(40), "s")))), - finalProjectList, null)); + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.and( + DSL.equal( + DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))), + DSL.and( + DSL.gte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(startTime)), + ExprCoreType.TIMESTAMP))), + DSL.lte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(endTime)), + ExprCoreType.TIMESTAMP)))))), + ImmutableList.of(named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of( + named( + "span", + DSL.span( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal(40), + "s")))), + finalProjectList, + null)); assertTrue(plan instanceof ProjectOperator); assertTrue(((ProjectOperator) plan).getInput() instanceof PrometheusMetricScan); - PrometheusQueryRequest request - = ((PrometheusMetricScan) ((ProjectOperator) plan).getInput()).getRequest(); + PrometheusQueryRequest request = + ((PrometheusMetricScan) ((ProjectOperator) plan).getInput()).getRequest(); assertEquals("40s", request.getStep()); - assertEquals("avg (avg_over_time" + assertEquals( + "avg (avg_over_time" + "(prometheus_http_total_requests{code=\"200\" , handler=\"/ready/\"}[40s]))", request.getPromQl()); List projectList = ((ProjectOperator) plan).getProjectList(); - List outputFields - = projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); + List outputFields = + projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); assertEquals(List.of(VALUE, TIMESTAMP), outputFields); } @@ -617,9 +735,11 @@ void testImplementWithUnexpectedLogicalNode() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); LogicalPlan plan = project(testLogicalPlanNode()); - RuntimeException runtimeException = Assertions.assertThrows(RuntimeException.class, - () -> prometheusMetricTable.implement(plan)); - assertEquals("unexpected plan node type class" + RuntimeException runtimeException = + Assertions.assertThrows( + RuntimeException.class, () -> prometheusMetricTable.implement(plan)); + assertEquals( + "unexpected plan node type class" + " org.opensearch.sql.prometheus.utils.LogicalPlanUtils$TestLogicalPlan", runtimeException.getMessage()); } @@ -629,37 +749,44 @@ void testMultipleAggregationsThrowsRuntimeException() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - LogicalPlan plan = project(indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))), - ImmutableList - .of(named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER))), - named("SUM(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of(named("job", DSL.ref("job", STRING))))); - - RuntimeException runtimeException = Assertions.assertThrows(RuntimeException.class, - () -> prometheusMetricTable.implement(plan)); - assertEquals("Prometheus Catalog doesn't multiple aggregations in stats command", + LogicalPlan plan = + project( + indexScanAgg( + "prometheus_http_total_requests", + DSL.and( + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))), + ImmutableList.of( + named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER))), + named("SUM(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of(named("job", DSL.ref("job", STRING))))); + + RuntimeException runtimeException = + Assertions.assertThrows( + RuntimeException.class, () -> prometheusMetricTable.implement(plan)); + assertEquals( + "Prometheus Catalog doesn't multiple aggregations in stats command", runtimeException.getMessage()); } - @Test void testUnSupportedAggregation() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - LogicalPlan plan = project(indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))), - ImmutableList - .of(named("VAR_SAMP(@value)", - DSL.varSamp(DSL.ref("@value", INTEGER)))), - ImmutableList.of(named("job", DSL.ref("job", STRING))))); - - RuntimeException runtimeException = Assertions.assertThrows(RuntimeException.class, - () -> prometheusMetricTable.implement(plan)); + LogicalPlan plan = + project( + indexScanAgg( + "prometheus_http_total_requests", + DSL.and( + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))), + ImmutableList.of( + named("VAR_SAMP(@value)", DSL.varSamp(DSL.ref("@value", INTEGER)))), + ImmutableList.of(named("job", DSL.ref("job", STRING))))); + + RuntimeException runtimeException = + Assertions.assertThrows( + RuntimeException.class, () -> prometheusMetricTable.implement(plan)); assertTrue(runtimeException.getMessage().contains("Prometheus Catalog only supports")); } @@ -667,13 +794,16 @@ void testUnSupportedAggregation() { void testImplementWithORConditionInWhereClause() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - LogicalPlan plan = indexScan("prometheus_http_total_requests", - DSL.or(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))))); - RuntimeException exception - = assertThrows(RuntimeException.class, () -> prometheusMetricTable.implement(plan)); - assertEquals("Prometheus Datasource doesn't support or in where command.", - exception.getMessage()); + LogicalPlan plan = + indexScan( + "prometheus_http_total_requests", + DSL.or( + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))))); + RuntimeException exception = + assertThrows(RuntimeException.class, () -> prometheusMetricTable.implement(plan)); + assertEquals( + "Prometheus Datasource doesn't support or in where command.", exception.getMessage()); } @Test @@ -683,21 +813,26 @@ void testImplementWithRelationAndFilter() { finalProjectList.add(DSL.named(TIMESTAMP, DSL.ref(TIMESTAMP, ExprCoreType.TIMESTAMP))); PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - LogicalPlan logicalPlan = project(indexScan("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))))), - finalProjectList, null); + LogicalPlan logicalPlan = + project( + indexScan( + "prometheus_http_total_requests", + DSL.and( + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))))), + finalProjectList, + null); PhysicalPlan physicalPlan = prometheusMetricTable.implement(logicalPlan); assertTrue(physicalPlan instanceof ProjectOperator); assertTrue(((ProjectOperator) physicalPlan).getInput() instanceof PrometheusMetricScan); - PrometheusQueryRequest request - = ((PrometheusMetricScan) ((ProjectOperator) physicalPlan).getInput()).getRequest(); + PrometheusQueryRequest request = + ((PrometheusMetricScan) ((ProjectOperator) physicalPlan).getInput()).getRequest(); assertEquals((3600 / 250) + "s", request.getStep()); - assertEquals("prometheus_http_total_requests{code=\"200\" , handler=\"/ready/\"}", - request.getPromQl()); + assertEquals( + "prometheus_http_total_requests{code=\"200\" , handler=\"/ready/\"}", request.getPromQl()); List projectList = ((ProjectOperator) physicalPlan).getProjectList(); - List outputFields - = projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); + List outputFields = + projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); assertEquals(List.of(VALUE, TIMESTAMP), outputFields); } @@ -710,27 +845,30 @@ void testImplementWithRelationAndTimestampFilter() { Long endTime = new Date(System.currentTimeMillis()).getTime(); PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - LogicalPlan logicalPlan = project(indexScan("prometheus_http_total_requests", - DSL.lte(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(endTime)), - ExprCoreType.TIMESTAMP))) - ), finalProjectList, null); + LogicalPlan logicalPlan = + project( + indexScan( + "prometheus_http_total_requests", + DSL.lte( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(endTime)), ExprCoreType.TIMESTAMP)))), + finalProjectList, + null); PhysicalPlan physicalPlan = prometheusMetricTable.implement(logicalPlan); assertTrue(physicalPlan instanceof ProjectOperator); assertTrue(((ProjectOperator) physicalPlan).getInput() instanceof PrometheusMetricScan); - PrometheusQueryRequest request - = ((PrometheusMetricScan) ((ProjectOperator) physicalPlan).getInput()).getRequest(); + PrometheusQueryRequest request = + ((PrometheusMetricScan) ((ProjectOperator) physicalPlan).getInput()).getRequest(); assertEquals((3600 / 250) + "s", request.getStep()); - assertEquals("prometheus_http_total_requests", - request.getPromQl()); + assertEquals("prometheus_http_total_requests", request.getPromQl()); List projectList = ((ProjectOperator) physicalPlan).getProjectList(); - List outputFields - = projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); + List outputFields = + projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); assertEquals(List.of(VALUE, TIMESTAMP), outputFields); } - @Test void testImplementWithRelationAndTimestampLTFilter() { List finalProjectList = new ArrayList<>(); @@ -740,27 +878,30 @@ void testImplementWithRelationAndTimestampLTFilter() { Long endTime = new Date(System.currentTimeMillis()).getTime(); PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - LogicalPlan logicalPlan = project(indexScan("prometheus_http_total_requests", - DSL.less(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(endTime)), - ExprCoreType.TIMESTAMP))) - ), finalProjectList, null); + LogicalPlan logicalPlan = + project( + indexScan( + "prometheus_http_total_requests", + DSL.less( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(endTime)), ExprCoreType.TIMESTAMP)))), + finalProjectList, + null); PhysicalPlan physicalPlan = prometheusMetricTable.implement(logicalPlan); assertTrue(physicalPlan instanceof ProjectOperator); assertTrue(((ProjectOperator) physicalPlan).getInput() instanceof PrometheusMetricScan); - PrometheusQueryRequest request - = ((PrometheusMetricScan) ((ProjectOperator) physicalPlan).getInput()).getRequest(); + PrometheusQueryRequest request = + ((PrometheusMetricScan) ((ProjectOperator) physicalPlan).getInput()).getRequest(); assertEquals((3600 / 250) + "s", request.getStep()); - assertEquals("prometheus_http_total_requests", - request.getPromQl()); + assertEquals("prometheus_http_total_requests", request.getPromQl()); List projectList = ((ProjectOperator) physicalPlan).getProjectList(); - List outputFields - = projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); + List outputFields = + projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); assertEquals(List.of(VALUE, TIMESTAMP), outputFields); } - @Test void testImplementWithRelationAndTimestampGTFilter() { List finalProjectList = new ArrayList<>(); @@ -770,23 +911,27 @@ void testImplementWithRelationAndTimestampGTFilter() { Long endTime = new Date(System.currentTimeMillis()).getTime(); PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - LogicalPlan logicalPlan = project(indexScan("prometheus_http_total_requests", - DSL.greater(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal( - fromObjectValue(dateFormat.format(new Date(endTime)), - ExprCoreType.TIMESTAMP))) - ), finalProjectList, null); + LogicalPlan logicalPlan = + project( + indexScan( + "prometheus_http_total_requests", + DSL.greater( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal( + fromObjectValue( + dateFormat.format(new Date(endTime)), ExprCoreType.TIMESTAMP)))), + finalProjectList, + null); PhysicalPlan physicalPlan = prometheusMetricTable.implement(logicalPlan); assertTrue(physicalPlan instanceof ProjectOperator); assertTrue(((ProjectOperator) physicalPlan).getInput() instanceof PrometheusMetricScan); - PrometheusQueryRequest request - = ((PrometheusMetricScan) ((ProjectOperator) physicalPlan).getInput()).getRequest(); + PrometheusQueryRequest request = + ((PrometheusMetricScan) ((ProjectOperator) physicalPlan).getInput()).getRequest(); assertEquals((3600 / 250) + "s", request.getStep()); - assertEquals("prometheus_http_total_requests", - request.getPromQl()); + assertEquals("prometheus_http_total_requests", request.getPromQl()); List projectList = ((ProjectOperator) physicalPlan).getProjectList(); - List outputFields - = projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); + List outputFields = + projectList.stream().map(NamedExpression::getName).collect(Collectors.toList()); assertEquals(List.of(VALUE, TIMESTAMP), outputFields); } @@ -796,10 +941,9 @@ void testOptimize() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, prometheusQueryRequest); List finalProjectList = new ArrayList<>(); - LogicalPlan inputPlan = project(relation("query_range", prometheusMetricTable), - finalProjectList, null); - LogicalPlan optimizedPlan = prometheusMetricTable.optimize( - inputPlan); + LogicalPlan inputPlan = + project(relation("query_range", prometheusMetricTable), finalProjectList, null); + LogicalPlan optimizedPlan = prometheusMetricTable.optimize(inputPlan); assertEquals(inputPlan, optimizedPlan); } @@ -810,7 +954,8 @@ void testUnsupportedOperation() { new PrometheusMetricTable(client, prometheusQueryRequest); assertThrows(UnsupportedOperationException.class, prometheusMetricTable::exists); - assertThrows(UnsupportedOperationException.class, + assertThrows( + UnsupportedOperationException.class, () -> prometheusMetricTable.create(Collections.emptyMap())); } @@ -820,25 +965,29 @@ void testImplementPrometheusQueryWithBackQuotedFieldNamesInStatsQuery() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - // IndexScanAgg with Filter - PhysicalPlan plan = prometheusMetricTable.implement( - indexScanAgg("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))), - ImmutableList - .of(named("AVG(@value)", - DSL.avg(DSL.ref("@value", INTEGER)))), - ImmutableList.of(named("`job`", DSL.ref("job", STRING)), - named("span", DSL.span(DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), - DSL.literal(40), "s"))))); + PhysicalPlan plan = + prometheusMetricTable.implement( + indexScanAgg( + "prometheus_http_total_requests", + DSL.and( + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))), + ImmutableList.of(named("AVG(@value)", DSL.avg(DSL.ref("@value", INTEGER)))), + ImmutableList.of( + named("`job`", DSL.ref("job", STRING)), + named( + "span", + DSL.span( + DSL.ref("@timestamp", ExprCoreType.TIMESTAMP), + DSL.literal(40), + "s"))))); assertTrue(plan instanceof PrometheusMetricScan); PrometheusQueryRequest prometheusQueryRequest = ((PrometheusMetricScan) plan).getRequest(); assertEquals( "avg by(job) (avg_over_time" + "(prometheus_http_total_requests{code=\"200\" , handler=\"/ready/\"}[40s]))", prometheusQueryRequest.getPromQl()); - } @Test @@ -848,14 +997,16 @@ void testImplementPrometheusQueryWithFilterQuery() { new PrometheusMetricTable(client, "prometheus_http_total_requests"); // IndexScanAgg without Filter - PhysicalPlan plan = prometheusMetricTable.implement( - indexScan("prometheus_http_total_requests", - DSL.and(DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))))); + PhysicalPlan plan = + prometheusMetricTable.implement( + indexScan( + "prometheus_http_total_requests", + DSL.and( + DSL.equal(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/")))))); assertTrue(plan instanceof PrometheusMetricScan); - PrometheusQueryRequest prometheusQueryRequest = - ((PrometheusMetricScan) plan).getRequest(); + PrometheusQueryRequest prometheusQueryRequest = ((PrometheusMetricScan) plan).getRequest(); assertEquals( "prometheus_http_total_requests{code=\"200\" , handler=\"/ready/\"}", prometheusQueryRequest.getPromQl()); @@ -867,15 +1018,22 @@ void testImplementPrometheusQueryWithUnsupportedFilterQuery() { PrometheusMetricTable prometheusMetricTable = new PrometheusMetricTable(client, "prometheus_http_total_requests"); - RuntimeException exception = assertThrows(RuntimeException.class, - () -> prometheusMetricTable.implement(indexScan("prometheus_http_total_requests", - DSL.and(DSL.lte(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))))))); - assertEquals("Prometheus Datasource doesn't support <= in where command.", - exception.getMessage()); + RuntimeException exception = + assertThrows( + RuntimeException.class, + () -> + prometheusMetricTable.implement( + indexScan( + "prometheus_http_total_requests", + DSL.and( + DSL.lte(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.equal( + DSL.ref("handler", STRING), + DSL.literal(stringValue("/ready/"))))))); + assertEquals( + "Prometheus Datasource doesn't support <= in where command.", exception.getMessage()); } - @Test void testCreateScanBuilderWithQueryRangeTableFunction() { PrometheusQueryRequest prometheusQueryRequest = new PrometheusQueryRequest(); @@ -895,5 +1053,4 @@ void testCreateScanBuilderWithPPLQuery() { TableScanBuilder tableScanBuilder = prometheusMetricTable.createScanBuilder(); Assertions.assertNull(tableScanBuilder); } - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusStorageEngineTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusStorageEngineTest.java index 4e8d470373..b925fe6538 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusStorageEngineTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusStorageEngineTest.java @@ -29,8 +29,7 @@ @ExtendWith(MockitoExtension.class) class PrometheusStorageEngineTest { - @Mock - private PrometheusClient client; + @Mock private PrometheusClient client; @Test public void getTable() { @@ -43,15 +42,12 @@ public void getTable() { @Test public void getFunctions() { PrometheusStorageEngine engine = new PrometheusStorageEngine(client); - Collection functionResolverCollection - = engine.getFunctions(); + Collection functionResolverCollection = engine.getFunctions(); assertNotNull(functionResolverCollection); assertEquals(2, functionResolverCollection.size()); Iterator iterator = functionResolverCollection.iterator(); - assertTrue( - iterator.next() instanceof QueryRangeTableFunctionResolver); - assertTrue( - iterator.next() instanceof QueryExemplarsTableFunctionResolver); + assertTrue(iterator.next() instanceof QueryRangeTableFunctionResolver); + assertTrue(iterator.next() instanceof QueryExemplarsTableFunctionResolver); } @Test @@ -65,8 +61,8 @@ public void getSystemTable() { @Test public void getSystemTableForAllTablesInfo() { PrometheusStorageEngine engine = new PrometheusStorageEngine(client); - Table table - = engine.getTable(new DataSourceSchemaName("prometheus", "information_schema"), "tables"); + Table table = + engine.getTable(new DataSourceSchemaName("prometheus", "information_schema"), "tables"); assertNotNull(table); assertTrue(table instanceof PrometheusSystemTable); } @@ -74,10 +70,12 @@ public void getSystemTableForAllTablesInfo() { @Test public void getSystemTableWithWrongInformationSchemaTable() { PrometheusStorageEngine engine = new PrometheusStorageEngine(client); - SemanticCheckException exception = assertThrows(SemanticCheckException.class, - () -> engine.getTable(new DataSourceSchemaName("prometheus", "information_schema"), - "test")); + SemanticCheckException exception = + assertThrows( + SemanticCheckException.class, + () -> + engine.getTable( + new DataSourceSchemaName("prometheus", "information_schema"), "test")); assertEquals("Information Schema doesn't contain test table", exception.getMessage()); } - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusStorageFactoryTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusStorageFactoryTest.java index 41ac7ff144..c2e8e5325a 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusStorageFactoryTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/PrometheusStorageFactoryTest.java @@ -17,7 +17,6 @@ import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; -import org.opensearch.cluster.ClusterName; import org.opensearch.sql.common.setting.Settings; import org.opensearch.sql.datasource.model.DataSource; import org.opensearch.sql.datasource.model.DataSourceMetadata; @@ -27,8 +26,7 @@ @ExtendWith(MockitoExtension.class) public class PrometheusStorageFactoryTest { - @Mock - private Settings settings; + @Mock private Settings settings; @Test void testGetConnectorType() { @@ -47,8 +45,7 @@ void testGetStorageEngineWithBasicAuth() { properties.put("prometheus.auth.type", "basicauth"); properties.put("prometheus.auth.username", "admin"); properties.put("prometheus.auth.password", "admin"); - StorageEngine storageEngine - = prometheusStorageFactory.getStorageEngine(properties); + StorageEngine storageEngine = prometheusStorageFactory.getStorageEngine(properties); Assertions.assertTrue(storageEngine instanceof PrometheusStorageEngine); } @@ -63,12 +60,10 @@ void testGetStorageEngineWithAWSSigV4Auth() { properties.put("prometheus.auth.region", "us-east-1"); properties.put("prometheus.auth.secret_key", "accessKey"); properties.put("prometheus.auth.access_key", "secretKey"); - StorageEngine storageEngine - = prometheusStorageFactory.getStorageEngine(properties); + StorageEngine storageEngine = prometheusStorageFactory.getStorageEngine(properties); Assertions.assertTrue(storageEngine instanceof PrometheusStorageEngine); } - @Test @SneakyThrows void testGetStorageEngineWithMissingURI() { @@ -78,10 +73,12 @@ void testGetStorageEngineWithMissingURI() { properties.put("prometheus.auth.region", "us-east-1"); properties.put("prometheus.auth.secret_key", "accessKey"); properties.put("prometheus.auth.access_key", "secretKey"); - IllegalArgumentException exception = Assertions.assertThrows(IllegalArgumentException.class, - () -> prometheusStorageFactory.getStorageEngine(properties)); - Assertions.assertEquals("Missing [prometheus.uri] fields " - + "in the Prometheus connector properties.", + IllegalArgumentException exception = + Assertions.assertThrows( + IllegalArgumentException.class, + () -> prometheusStorageFactory.getStorageEngine(properties)); + Assertions.assertEquals( + "Missing [prometheus.uri] fields " + "in the Prometheus connector properties.", exception.getMessage()); } @@ -94,14 +91,15 @@ void testGetStorageEngineWithMissingRegionInAWS() { properties.put("prometheus.auth.type", "awssigv4"); properties.put("prometheus.auth.secret_key", "accessKey"); properties.put("prometheus.auth.access_key", "secretKey"); - IllegalArgumentException exception = Assertions.assertThrows(IllegalArgumentException.class, - () -> prometheusStorageFactory.getStorageEngine(properties)); - Assertions.assertEquals("Missing [prometheus.auth.region] fields in the " - + "Prometheus connector properties.", + IllegalArgumentException exception = + Assertions.assertThrows( + IllegalArgumentException.class, + () -> prometheusStorageFactory.getStorageEngine(properties)); + Assertions.assertEquals( + "Missing [prometheus.auth.region] fields in the " + "Prometheus connector properties.", exception.getMessage()); } - @Test @SneakyThrows void testGetStorageEngineWithLongConfigProperties() { @@ -111,9 +109,12 @@ void testGetStorageEngineWithLongConfigProperties() { properties.put("prometheus.auth.type", "awssigv4"); properties.put("prometheus.auth.secret_key", "accessKey"); properties.put("prometheus.auth.access_key", "secretKey"); - IllegalArgumentException exception = Assertions.assertThrows(IllegalArgumentException.class, - () -> prometheusStorageFactory.getStorageEngine(properties)); - Assertions.assertEquals("Missing [prometheus.auth.region] fields in the " + IllegalArgumentException exception = + Assertions.assertThrows( + IllegalArgumentException.class, + () -> prometheusStorageFactory.getStorageEngine(properties)); + Assertions.assertEquals( + "Missing [prometheus.auth.region] fields in the " + "Prometheus connector properties." + "Fields [prometheus.uri] exceeds more than 1000 characters.", exception.getMessage()); @@ -130,13 +131,14 @@ void testGetStorageEngineWithWrongAuthType() { properties.put("prometheus.auth.region", "us-east-1"); properties.put("prometheus.auth.secret_key", "accessKey"); properties.put("prometheus.auth.access_key", "secretKey"); - IllegalArgumentException exception = Assertions.assertThrows(IllegalArgumentException.class, - () -> prometheusStorageFactory.getStorageEngine(properties)); - Assertions.assertEquals("AUTH Type : random is not supported with Prometheus Connector", - exception.getMessage()); + IllegalArgumentException exception = + Assertions.assertThrows( + IllegalArgumentException.class, + () -> prometheusStorageFactory.getStorageEngine(properties)); + Assertions.assertEquals( + "AUTH Type : random is not supported with Prometheus Connector", exception.getMessage()); } - @Test @SneakyThrows void testGetStorageEngineWithNONEAuthType() { @@ -144,8 +146,7 @@ void testGetStorageEngineWithNONEAuthType() { PrometheusStorageFactory prometheusStorageFactory = new PrometheusStorageFactory(settings); HashMap properties = new HashMap<>(); properties.put("prometheus.uri", "https://test.com"); - StorageEngine storageEngine - = prometheusStorageFactory.getStorageEngine(properties); + StorageEngine storageEngine = prometheusStorageFactory.getStorageEngine(properties); Assertions.assertTrue(storageEngine instanceof PrometheusStorageEngine); } @@ -158,8 +159,9 @@ void testGetStorageEngineWithInvalidURISyntax() { properties.put("prometheus.auth.type", "basicauth"); properties.put("prometheus.auth.username", "admin"); properties.put("prometheus.auth.password", "admin"); - RuntimeException exception = Assertions.assertThrows(RuntimeException.class, - () -> prometheusStorageFactory.getStorageEngine(properties)); + RuntimeException exception = + Assertions.assertThrows( + RuntimeException.class, () -> prometheusStorageFactory.getStorageEngine(properties)); Assertions.assertTrue( exception.getMessage().contains("Invalid URI in prometheus properties: ")); } @@ -214,10 +216,13 @@ void createDataSourceWithInvalidHostname() { metadata.setProperties(properties); PrometheusStorageFactory prometheusStorageFactory = new PrometheusStorageFactory(settings); - RuntimeException exception = Assertions.assertThrows(RuntimeException.class, - () -> prometheusStorageFactory.createDataSource(metadata)); + RuntimeException exception = + Assertions.assertThrows( + RuntimeException.class, () -> prometheusStorageFactory.createDataSource(metadata)); Assertions.assertTrue( - exception.getMessage().contains("Invalid hostname in the uri: http://dummyprometheus:9090")); + exception + .getMessage() + .contains("Invalid hostname in the uri: http://dummyprometheus:9090")); } @Test @@ -234,8 +239,9 @@ void createDataSourceWithInvalidIp() { metadata.setProperties(properties); PrometheusStorageFactory prometheusStorageFactory = new PrometheusStorageFactory(settings); - RuntimeException exception = Assertions.assertThrows(RuntimeException.class, - () -> prometheusStorageFactory.createDataSource(metadata)); + RuntimeException exception = + Assertions.assertThrows( + RuntimeException.class, () -> prometheusStorageFactory.createDataSource(metadata)); Assertions.assertTrue( exception.getMessage().contains("Invalid hostname in the uri: http://231.54.11.987:9090")); } @@ -256,11 +262,15 @@ void createDataSourceWithHostnameNotMatchingWithAllowHostsConfig() { metadata.setProperties(properties); PrometheusStorageFactory prometheusStorageFactory = new PrometheusStorageFactory(settings); - RuntimeException exception = Assertions.assertThrows(RuntimeException.class, - () -> prometheusStorageFactory.createDataSource(metadata)); + RuntimeException exception = + Assertions.assertThrows( + RuntimeException.class, () -> prometheusStorageFactory.createDataSource(metadata)); Assertions.assertTrue( - exception.getMessage().contains("Disallowed hostname in the uri: http://localhost.com:9090. " - + "Validate with plugins.query.datasources.uri.allowhosts config")); + exception + .getMessage() + .contains( + "Disallowed hostname in the uri: http://localhost.com:9090. " + + "Validate with plugins.query.datasources.uri.allowhosts config")); } @Test @@ -280,5 +290,4 @@ void createDataSourceSuccessWithHostnameRestrictions() { DataSource dataSource = new PrometheusStorageFactory(settings).createDataSource(metadata); Assertions.assertTrue(dataSource.getStorageEngine() instanceof PrometheusStorageEngine); } - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/QueryExemplarsTableTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/QueryExemplarsTableTest.java index 19876d398d..7f49de981a 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/QueryExemplarsTableTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/QueryExemplarsTableTest.java @@ -30,14 +30,12 @@ @ExtendWith(MockitoExtension.class) class QueryExemplarsTableTest { - @Mock - private PrometheusClient client; + @Mock private PrometheusClient client; @Test @SneakyThrows void testGetFieldTypes() { - PrometheusQueryExemplarsRequest exemplarsRequest - = new PrometheusQueryExemplarsRequest(); + PrometheusQueryExemplarsRequest exemplarsRequest = new PrometheusQueryExemplarsRequest(); exemplarsRequest.setQuery(QUERY); exemplarsRequest.setStartTime(STARTTIME); exemplarsRequest.setEndTime(ENDTIME); @@ -50,8 +48,7 @@ void testGetFieldTypes() { @Test void testImplementWithBasicMetricQuery() { - PrometheusQueryExemplarsRequest exemplarsRequest - = new PrometheusQueryExemplarsRequest(); + PrometheusQueryExemplarsRequest exemplarsRequest = new PrometheusQueryExemplarsRequest(); exemplarsRequest.setQuery(QUERY); exemplarsRequest.setStartTime(STARTTIME); exemplarsRequest.setEndTime(ENDTIME); @@ -67,8 +64,7 @@ void testImplementWithBasicMetricQuery() { @Test void testCreateScanBuilderWithQueryRangeTableFunction() { - PrometheusQueryExemplarsRequest exemplarsRequest - = new PrometheusQueryExemplarsRequest(); + PrometheusQueryExemplarsRequest exemplarsRequest = new PrometheusQueryExemplarsRequest(); exemplarsRequest.setQuery(QUERY); exemplarsRequest.setStartTime(STARTTIME); exemplarsRequest.setEndTime(ENDTIME); @@ -77,5 +73,4 @@ void testCreateScanBuilderWithQueryRangeTableFunction() { Assertions.assertNotNull(tableScanBuilder); Assertions.assertTrue(tableScanBuilder instanceof QueryExemplarsFunctionTableScanBuilder); } - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/querybuilders/StepParameterResolverTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/querybuilders/StepParameterResolverTest.java index 37e24a56b5..397b7146f7 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/querybuilders/StepParameterResolverTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/querybuilders/StepParameterResolverTest.java @@ -18,9 +18,11 @@ public class StepParameterResolverTest { @Test void testNullChecks() { StepParameterResolver stepParameterResolver = new StepParameterResolver(); - Assertions.assertThrows(NullPointerException.class, + Assertions.assertThrows( + NullPointerException.class, () -> stepParameterResolver.resolve(null, new Date().getTime(), Collections.emptyList())); - Assertions.assertThrows(NullPointerException.class, + Assertions.assertThrows( + NullPointerException.class, () -> stepParameterResolver.resolve(new Date().getTime(), null, Collections.emptyList())); } } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/querybuilders/TimeRangeParametersResolverTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/querybuilders/TimeRangeParametersResolverTest.java index 73839e2152..6a280b7d98 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/querybuilders/TimeRangeParametersResolverTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/querybuilders/TimeRangeParametersResolverTest.java @@ -21,9 +21,11 @@ public class TimeRangeParametersResolverTest { @Test void testTimeRangeParametersWithoutTimestampFilter() { TimeRangeParametersResolver timeRangeParametersResolver = new TimeRangeParametersResolver(); - Pair result = timeRangeParametersResolver.resolve( - DSL.and(DSL.less(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), - DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))))); + Pair result = + timeRangeParametersResolver.resolve( + DSL.and( + DSL.less(DSL.ref("code", STRING), DSL.literal(stringValue("200"))), + DSL.equal(DSL.ref("handler", STRING), DSL.literal(stringValue("/ready/"))))); Assertions.assertNotNull(result); Assertions.assertEquals(3600, result.getSecond() - result.getFirst()); } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableScanTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableScanTest.java index 0d7ec4e2cc..ea299b87de 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableScanTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableScanTest.java @@ -22,8 +22,7 @@ @ExtendWith(MockitoExtension.class) public class PrometheusSystemTableScanTest { - @Mock - private PrometheusSystemRequest request; + @Mock private PrometheusSystemRequest request; @Test public void queryData() { diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableTest.java index 0721f82c07..7022ca9657 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/storage/system/PrometheusSystemTableTest.java @@ -35,51 +35,41 @@ @ExtendWith(MockitoExtension.class) public class PrometheusSystemTableTest { - @Mock - private PrometheusClient client; + @Mock private PrometheusClient client; - @Mock - private Table table; + @Mock private Table table; @Test void testGetFieldTypesOfMetaTable() { - PrometheusSystemTable systemIndex = new PrometheusSystemTable(client, - new DataSourceSchemaName("prometheus", "information_schema"), TABLE_INFO); + PrometheusSystemTable systemIndex = + new PrometheusSystemTable( + client, new DataSourceSchemaName("prometheus", "information_schema"), TABLE_INFO); final Map fieldTypes = systemIndex.getFieldTypes(); - assertThat(fieldTypes, anyOf( - hasEntry("TABLE_CATALOG", STRING) - )); - assertThat(fieldTypes, anyOf( - hasEntry("UNIT", STRING) - )); + assertThat(fieldTypes, anyOf(hasEntry("TABLE_CATALOG", STRING))); + assertThat(fieldTypes, anyOf(hasEntry("UNIT", STRING))); } @Test void testGetFieldTypesOfMappingTable() { - PrometheusSystemTable systemIndex = new PrometheusSystemTable(client, - new DataSourceSchemaName("prometheus", "information_schema"), mappingTable( - "test_metric")); + PrometheusSystemTable systemIndex = + new PrometheusSystemTable( + client, + new DataSourceSchemaName("prometheus", "information_schema"), + mappingTable("test_metric")); final Map fieldTypes = systemIndex.getFieldTypes(); - assertThat(fieldTypes, anyOf( - hasEntry("COLUMN_NAME", STRING) - )); + assertThat(fieldTypes, anyOf(hasEntry("COLUMN_NAME", STRING))); } - - @Test void implement() { - PrometheusSystemTable systemIndex = new PrometheusSystemTable(client, - new DataSourceSchemaName("prometheus", "information_schema"), TABLE_INFO); + PrometheusSystemTable systemIndex = + new PrometheusSystemTable( + client, new DataSourceSchemaName("prometheus", "information_schema"), TABLE_INFO); NamedExpression projectExpr = named("TABLE_NAME", ref("TABLE_NAME", STRING)); - final PhysicalPlan plan = systemIndex.implement( - project( - relation(TABLE_INFO, table), - projectExpr - )); + final PhysicalPlan plan = + systemIndex.implement(project(relation(TABLE_INFO, table), projectExpr)); assertTrue(plan instanceof ProjectOperator); assertTrue(plan.getChild().get(0) instanceof PrometheusSystemTableScan); } - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/utils/LogicalPlanUtils.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/utils/LogicalPlanUtils.java index 5fcebf52e6..570a987889 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/utils/LogicalPlanUtils.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/utils/LogicalPlanUtils.java @@ -19,43 +19,36 @@ public class LogicalPlanUtils { - /** - * Build PrometheusLogicalMetricScan. - */ + /** Build PrometheusLogicalMetricScan. */ public static LogicalPlan indexScan(String metricName, Expression filter) { - return PrometheusLogicalMetricScan.builder().metricName(metricName) - .filter(filter) - .build(); + return PrometheusLogicalMetricScan.builder().metricName(metricName).filter(filter).build(); } - /** - * Build PrometheusLogicalMetricAgg. - */ - public static LogicalPlan indexScanAgg(String metricName, Expression filter, - List aggregators, - List groupByList) { - return PrometheusLogicalMetricAgg.builder().metricName(metricName) + /** Build PrometheusLogicalMetricAgg. */ + public static LogicalPlan indexScanAgg( + String metricName, + Expression filter, + List aggregators, + List groupByList) { + return PrometheusLogicalMetricAgg.builder() + .metricName(metricName) .filter(filter) .aggregatorList(aggregators) .groupByList(groupByList) .build(); } - /** - * Build PrometheusLogicalMetricAgg. - */ - public static LogicalPlan indexScanAgg(String metricName, - List aggregators, - List groupByList) { - return PrometheusLogicalMetricAgg.builder().metricName(metricName) + /** Build PrometheusLogicalMetricAgg. */ + public static LogicalPlan indexScanAgg( + String metricName, List aggregators, List groupByList) { + return PrometheusLogicalMetricAgg.builder() + .metricName(metricName) .aggregatorList(aggregators) .groupByList(groupByList) .build(); } - /** - * Build PrometheusLogicalMetricAgg. - */ + /** Build PrometheusLogicalMetricAgg. */ public static LogicalPlan testLogicalPlanNode() { return new TestLogicalPlan(); } @@ -71,7 +64,4 @@ public R accept(LogicalPlanNodeVisitor visitor, C context) { return visitor.visitNode(this, null); } } - - - } diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/utils/TestUtils.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/utils/TestUtils.java index 1683858c49..a9fcc26101 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/utils/TestUtils.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/utils/TestUtils.java @@ -12,6 +12,7 @@ public class TestUtils { /** * Get Json document from the files in resources folder. + * * @param filename filename. * @return String. * @throws IOException IOException. @@ -21,5 +22,4 @@ public static String getJson(String filename) throws IOException { return new String( Objects.requireNonNull(classLoader.getResourceAsStream(filename)).readAllBytes()); } - } diff --git a/protocol/src/main/java/org/opensearch/sql/protocol/response/QueryResult.java b/protocol/src/main/java/org/opensearch/sql/protocol/response/QueryResult.java index 3ce1dd8875..03be0875cf 100644 --- a/protocol/src/main/java/org/opensearch/sql/protocol/response/QueryResult.java +++ b/protocol/src/main/java/org/opensearch/sql/protocol/response/QueryResult.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response; import java.util.Collection; @@ -20,22 +19,18 @@ import org.opensearch.sql.executor.pagination.Cursor; /** - * Query response that encapsulates query results and isolate {@link ExprValue} - * related from formatter implementation. + * Query response that encapsulates query results and isolate {@link ExprValue} related from + * formatter implementation. */ @RequiredArgsConstructor public class QueryResult implements Iterable { - @Getter - private final ExecutionEngine.Schema schema; + @Getter private final ExecutionEngine.Schema schema; - /** - * Results which are collection of expression. - */ + /** Results which are collection of expression. */ private final Collection exprValues; - @Getter - private final Cursor cursor; + @Getter private final Cursor cursor; public QueryResult(ExecutionEngine.Schema schema, Collection exprValues) { this(schema, exprValues, Cursor.None); @@ -43,6 +38,7 @@ public QueryResult(ExecutionEngine.Schema schema, Collection exprValu /** * size of results. + * * @return size of results */ public int size() { @@ -52,14 +48,18 @@ public int size() { /** * Parse column name from results. * - * @return mapping from column names to its expression type. - * note that column name could be original name or its alias if any. + * @return mapping from column names to its expression type. note that column name could be + * original name or its alias if any. */ public Map columnNameTypes() { Map colNameTypes = new LinkedHashMap<>(); - schema.getColumns().forEach(column -> colNameTypes.put( - getColumnName(column), - column.getExprType().typeName().toLowerCase(Locale.ROOT))); + schema + .getColumns() + .forEach( + column -> + colNameTypes.put( + getColumnName(column), + column.getExprType().typeName().toLowerCase(Locale.ROOT))); return colNameTypes; } @@ -78,9 +78,6 @@ private String getColumnName(Column column) { } private Object[] convertExprValuesToValues(Collection exprValues) { - return exprValues - .stream() - .map(ExprValue::value) - .toArray(Object[]::new); + return exprValues.stream().map(ExprValue::value).toArray(Object[]::new); } } diff --git a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/CommandResponseFormatter.java b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/CommandResponseFormatter.java index 68d9be558b..b781e1dbba 100644 --- a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/CommandResponseFormatter.java +++ b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/CommandResponseFormatter.java @@ -7,13 +7,11 @@ import lombok.Getter; import org.opensearch.sql.executor.execution.CommandPlan; -import org.opensearch.sql.opensearch.response.error.ErrorMessage; -import org.opensearch.sql.opensearch.response.error.ErrorMessageFactory; import org.opensearch.sql.protocol.response.QueryResult; /** - * A simple response formatter which contains no data. - * Supposed to use with {@link CommandPlan} only. + * A simple response formatter which contains no data. Supposed to use with {@link CommandPlan} + * only. */ public class CommandResponseFormatter extends JsonResponseFormatter { diff --git a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/CsvResponseFormatter.java b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/CsvResponseFormatter.java index 5c5b4be048..a61b54b258 100644 --- a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/CsvResponseFormatter.java +++ b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/CsvResponseFormatter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response.format; public class CsvResponseFormatter extends FlatResponseFormatter { @@ -14,5 +13,4 @@ public CsvResponseFormatter() { public CsvResponseFormatter(boolean sanitize) { super(",", sanitize); } - } diff --git a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/ErrorFormatter.java b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/ErrorFormatter.java index 40848e959b..5c85e5d65b 100644 --- a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/ErrorFormatter.java +++ b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/ErrorFormatter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response.format; import com.google.gson.Gson; @@ -17,35 +16,28 @@ @UtilityClass public class ErrorFormatter { - private static final Gson PRETTY_PRINT_GSON = AccessController.doPrivileged( - (PrivilegedAction) () -> new GsonBuilder() - .setPrettyPrinting() - .disableHtmlEscaping() - .create()); - private static final Gson GSON = AccessController.doPrivileged( - (PrivilegedAction) () -> new GsonBuilder().disableHtmlEscaping().create()); - - /** - * Util method to format {@link Throwable} response to JSON string in compact printing. - */ + private static final Gson PRETTY_PRINT_GSON = + AccessController.doPrivileged( + (PrivilegedAction) + () -> new GsonBuilder().setPrettyPrinting().disableHtmlEscaping().create()); + private static final Gson GSON = + AccessController.doPrivileged( + (PrivilegedAction) () -> new GsonBuilder().disableHtmlEscaping().create()); + + /** Util method to format {@link Throwable} response to JSON string in compact printing. */ public static String compactFormat(Throwable t) { - JsonError error = new ErrorFormatter.JsonError(t.getClass().getSimpleName(), - t.getMessage()); + JsonError error = new ErrorFormatter.JsonError(t.getClass().getSimpleName(), t.getMessage()); return compactJsonify(error); } - /** - * Util method to format {@link Throwable} response to JSON string in pretty printing. - */ - public static String prettyFormat(Throwable t) { - JsonError error = new ErrorFormatter.JsonError(t.getClass().getSimpleName(), - t.getMessage()); + /** Util method to format {@link Throwable} response to JSON string in pretty printing. */ + public static String prettyFormat(Throwable t) { + JsonError error = new ErrorFormatter.JsonError(t.getClass().getSimpleName(), t.getMessage()); return prettyJsonify(error); } public static String compactJsonify(Object jsonObject) { - return AccessController.doPrivileged( - (PrivilegedAction) () -> GSON.toJson(jsonObject)); + return AccessController.doPrivileged((PrivilegedAction) () -> GSON.toJson(jsonObject)); } public static String prettyJsonify(Object jsonObject) { diff --git a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/FlatResponseFormatter.java b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/FlatResponseFormatter.java index 0575647dad..8c67d524b8 100644 --- a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/FlatResponseFormatter.java +++ b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/FlatResponseFormatter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response.format; import com.google.common.collect.ImmutableList; @@ -48,9 +47,8 @@ public String format(Throwable t) { } /** - * Sanitize methods are migrated from legacy CSV result. - * Sanitize both headers and data lines by: - * 1) Second double quote entire cell if any comma is found. + * Sanitize methods are migrated from legacy CSV result. Sanitize both headers and data lines by: + * 1) Second double quote entire cell if any comma is found. */ @Getter @RequiredArgsConstructor @@ -84,29 +82,30 @@ private List getHeaders(QueryResult response, boolean sanitize) { private List> getData(QueryResult response, boolean sanitize) { ImmutableList.Builder> dataLines = new ImmutableList.Builder<>(); - response.iterator().forEachRemaining(row -> { - ImmutableList.Builder line = new ImmutableList.Builder<>(); - // replace null values with empty string - Arrays.asList(row).forEach(val -> line.add(val == null ? "" : val.toString())); - dataLines.add(line.build()); - }); + response + .iterator() + .forEachRemaining( + row -> { + ImmutableList.Builder line = new ImmutableList.Builder<>(); + // replace null values with empty string + Arrays.asList(row).forEach(val -> line.add(val == null ? "" : val.toString())); + dataLines.add(line.build()); + }); List> result = dataLines.build(); return sanitizeData(result); } - /** - * Sanitize headers because OpenSearch allows special character present in field names. - */ + /** Sanitize headers because OpenSearch allows special character present in field names. */ private List sanitizeHeaders(List headers) { if (sanitize) { return headers.stream() - .map(this::sanitizeCell) - .map(cell -> quoteIfRequired(INLINE_SEPARATOR, cell)) - .collect(Collectors.toList()); + .map(this::sanitizeCell) + .map(cell -> quoteIfRequired(INLINE_SEPARATOR, cell)) + .collect(Collectors.toList()); } else { return headers.stream() - .map(cell -> quoteIfRequired(INLINE_SEPARATOR, cell)) - .collect(Collectors.toList()); + .map(cell -> quoteIfRequired(INLINE_SEPARATOR, cell)) + .collect(Collectors.toList()); } } @@ -114,14 +113,16 @@ private List> sanitizeData(List> lines) { List> result = new ArrayList<>(); if (sanitize) { for (List line : lines) { - result.add(line.stream() + result.add( + line.stream() .map(this::sanitizeCell) .map(cell -> quoteIfRequired(INLINE_SEPARATOR, cell)) .collect(Collectors.toList())); } } else { for (List line : lines) { - result.add(line.stream() + result.add( + line.stream() .map(cell -> quoteIfRequired(INLINE_SEPARATOR, cell)) .collect(Collectors.toList())); } @@ -138,13 +139,11 @@ private String sanitizeCell(String cell) { private String quoteIfRequired(String separator, String cell) { final String quote = "\""; - return cell.contains(separator) - ? quote + cell.replaceAll("\"", "\"\"") + quote : cell; + return cell.contains(separator) ? quote + cell.replaceAll("\"", "\"\"") + quote : cell; } private boolean isStartWithSensitiveChar(String cell) { return SENSITIVE_CHAR.stream().anyMatch(cell::startsWith); } } - } diff --git a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/Format.java b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/Format.java index 4291c09df0..8f22a5380e 100644 --- a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/Format.java +++ b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/Format.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response.format; import com.google.common.base.Strings; @@ -20,8 +19,7 @@ public enum Format { RAW("raw"), VIZ("viz"); - @Getter - private final String formatName; + @Getter private final String formatName; private static final Map ALL_FORMATS; diff --git a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/JdbcResponseFormatter.java b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/JdbcResponseFormatter.java index 1ad3ffde34..8be22af532 100644 --- a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/JdbcResponseFormatter.java +++ b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/JdbcResponseFormatter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response.format; import java.util.List; @@ -40,9 +39,7 @@ protected Object buildJsonObject(QueryResult response) { json.datarows(fetchDataRows(response)); // Populate other fields - json.total(response.size()) - .size(response.size()) - .status(200); + json.total(response.size()).size(response.size()).status(200); if (!response.getCursor().equals(Cursor.None)) { json.cursor(response.getCursor().toString()); } @@ -54,10 +51,7 @@ protected Object buildJsonObject(QueryResult response) { public String format(Throwable t) { int status = getStatus(t); ErrorMessage message = ErrorMessageFactory.createErrorMessage(t, status); - Error error = new Error( - message.getType(), - message.getReason(), - message.getDetails()); + Error error = new Error(message.getType(), message.getReason(), message.getDetails()); return jsonify(new JdbcErrorResponse(error, status)); } @@ -66,8 +60,8 @@ private Column fetchColumn(Schema.Column col) { } /** - * Convert type that exists in both legacy and new engine but has different name. - * Return old type name to avoid breaking impact on client-side. + * Convert type that exists in both legacy and new engine but has different name. Return old type + * name to avoid breaking impact on client-side. */ private String convertToLegacyType(ExprType type) { return type.legacyTypeName().toLowerCase(); @@ -83,18 +77,16 @@ private Object[][] fetchDataRows(QueryResult response) { } private int getStatus(Throwable t) { - return (t instanceof SyntaxCheckException - || t instanceof QueryEngineException) ? 400 : 503; + return (t instanceof SyntaxCheckException || t instanceof QueryEngineException) ? 400 : 503; } - /** - * org.json requires these inner data classes be public (and static) - */ + /** org.json requires these inner data classes be public (and static) */ @Builder @Getter public static class JdbcResponse { @Singular("column") private final List schema; + private final Object[][] datarows; private final long total; private final long size; @@ -125,5 +117,4 @@ public static class Error { private final String reason; private final String details; } - } diff --git a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/JsonResponseFormatter.java b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/JsonResponseFormatter.java index 810a7d0c2d..115ee77b2b 100644 --- a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/JsonResponseFormatter.java +++ b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/JsonResponseFormatter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response.format; import static org.opensearch.sql.protocol.response.format.ErrorFormatter.compactFormat; @@ -24,16 +23,13 @@ @RequiredArgsConstructor public abstract class JsonResponseFormatter implements ResponseFormatter { - /** - * JSON format styles: pretty format or compact format without indent and space. - */ + /** JSON format styles: pretty format or compact format without indent and space. */ public enum Style { - PRETTY, COMPACT + PRETTY, + COMPACT } - /** - * JSON format style. - */ + /** JSON format style. */ private final Style style; public static final String CONTENT_TYPE = "application/json; charset=UTF-8"; @@ -45,8 +41,8 @@ public String format(R response) { @Override public String format(Throwable t) { - return AccessController.doPrivileged((PrivilegedAction) () -> - (style == PRETTY) ? prettyFormat(t) : compactFormat(t)); + return AccessController.doPrivileged( + (PrivilegedAction) () -> (style == PRETTY) ? prettyFormat(t) : compactFormat(t)); } public String contentType() { @@ -62,7 +58,8 @@ public String contentType() { protected abstract Object buildJsonObject(R response); protected String jsonify(Object jsonObject) { - return AccessController.doPrivileged((PrivilegedAction) () -> - (style == PRETTY) ? prettyJsonify(jsonObject) : compactJsonify(jsonObject)); + return AccessController.doPrivileged( + (PrivilegedAction) + () -> (style == PRETTY) ? prettyJsonify(jsonObject) : compactJsonify(jsonObject)); } } diff --git a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/RawResponseFormatter.java b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/RawResponseFormatter.java index 8fe88b2f95..75883073f5 100644 --- a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/RawResponseFormatter.java +++ b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/RawResponseFormatter.java @@ -3,16 +3,12 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response.format; -/** - * Response formatter to format response to csv or raw format. - */ -//@RequiredArgsConstructor +/** Response formatter to format response to csv or raw format. */ +// @RequiredArgsConstructor public class RawResponseFormatter extends FlatResponseFormatter { public RawResponseFormatter() { super("|", false); } - } diff --git a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/ResponseFormatter.java b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/ResponseFormatter.java index 6d9cc093c5..6738cfbc9c 100644 --- a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/ResponseFormatter.java +++ b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/ResponseFormatter.java @@ -3,12 +3,9 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response.format; -/** - * Response formatter to format response to different formats. - */ +/** Response formatter to format response to different formats. */ public interface ResponseFormatter { /** @@ -33,5 +30,4 @@ public interface ResponseFormatter { * @return string */ String contentType(); - } diff --git a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/SimpleJsonResponseFormatter.java b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/SimpleJsonResponseFormatter.java index ad705ccafa..c00174dc9f 100644 --- a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/SimpleJsonResponseFormatter.java +++ b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/SimpleJsonResponseFormatter.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response.format; import java.util.List; @@ -43,8 +42,7 @@ public SimpleJsonResponseFormatter(Style style) { public Object buildJsonObject(QueryResult response) { JsonResponse.JsonResponseBuilder json = JsonResponse.builder(); - json.total(response.size()) - .size(response.size()); + json.total(response.size()).size(response.size()); response.columnNameTypes().forEach((name, type) -> json.column(new Column(name, type))); @@ -61,9 +59,7 @@ private Object[][] fetchDataRows(QueryResult response) { return rows; } - /** - * org.json requires these inner data classes be public (and static) - */ + /** org.json requires these inner data classes be public (and static) */ @Builder @Getter public static class JsonResponse { @@ -82,5 +78,4 @@ public static class Column { private final String name; private final String type; } - } diff --git a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/VisualizationResponseFormatter.java b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/VisualizationResponseFormatter.java index 7e971c9099..d5d220dd8d 100644 --- a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/VisualizationResponseFormatter.java +++ b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/VisualizationResponseFormatter.java @@ -72,21 +72,20 @@ protected Object buildJsonObject(QueryResult response) { public String format(Throwable t) { int status = getStatus(t); ErrorMessage message = ErrorMessageFactory.createErrorMessage(t, status); - VisualizationResponseFormatter.Error error = new Error( - message.getType(), - message.getReason(), - message.getDetails()); + VisualizationResponseFormatter.Error error = + new Error(message.getType(), message.getReason(), message.getDetails()); return jsonify(new VisualizationErrorResponse(error, status)); } private int getStatus(Throwable t) { - return (t instanceof SyntaxCheckException - || t instanceof QueryEngineException) ? 400 : 503; + return (t instanceof SyntaxCheckException || t instanceof QueryEngineException) ? 400 : 503; } private Map> fetchData(QueryResult response) { Map> columnMap = new LinkedHashMap<>(); - response.getSchema().getColumns() + response + .getSchema() + .getColumns() .forEach(column -> columnMap.put(column.getName(), new LinkedList<>())); for (Object[] dataRow : response) { @@ -107,16 +106,17 @@ private Metadata constructMetadata(QueryResult response) { private List fetchFields(QueryResult response) { List columns = response.getSchema().getColumns(); ImmutableList.Builder fields = ImmutableList.builder(); - columns.forEach(column -> { - Field field = new Field(column.getName(), convertToLegacyType(column.getExprType())); - fields.add(field); - }); + columns.forEach( + column -> { + Field field = new Field(column.getName(), convertToLegacyType(column.getExprType())); + fields.add(field); + }); return fields.build(); } /** - * Convert type that exists in both legacy and new engine but has different name. - * Return old type name to avoid breaking impact on client-side. + * Convert type that exists in both legacy and new engine but has different name. Return old type + * name to avoid breaking impact on client-side. */ private String convertToLegacyType(ExprType type) { return type.legacyTypeName().toLowerCase(); diff --git a/protocol/src/test/java/org/opensearch/sql/protocol/response/QueryResultTest.java b/protocol/src/test/java/org/opensearch/sql/protocol/response/QueryResultTest.java index 4c58e189b8..e03169e9f8 100644 --- a/protocol/src/test/java/org/opensearch/sql/protocol/response/QueryResultTest.java +++ b/protocol/src/test/java/org/opensearch/sql/protocol/response/QueryResultTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response; import static org.junit.jupiter.api.Assertions.assertArrayEquals; @@ -23,86 +22,77 @@ class QueryResultTest { - private ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("name", null, STRING), - new ExecutionEngine.Schema.Column("age", null, INTEGER))); - + private ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of( + new ExecutionEngine.Schema.Column("name", null, STRING), + new ExecutionEngine.Schema.Column("age", null, INTEGER))); @Test void size() { - QueryResult response = new QueryResult( - schema, - Arrays.asList( - tupleValue(ImmutableMap.of("name", "John", "age", 20)), - tupleValue(ImmutableMap.of("name", "Allen", "age", 30)), - tupleValue(ImmutableMap.of("name", "Smith", "age", 40)) - ), Cursor.None); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue(ImmutableMap.of("name", "John", "age", 20)), + tupleValue(ImmutableMap.of("name", "Allen", "age", 30)), + tupleValue(ImmutableMap.of("name", "Smith", "age", 40))), + Cursor.None); assertEquals(3, response.size()); } @Test void columnNameTypes() { - QueryResult response = new QueryResult( - schema, - Collections.singletonList( - tupleValue(ImmutableMap.of("name", "John", "age", 20)) - ), Cursor.None); + QueryResult response = + new QueryResult( + schema, + Collections.singletonList(tupleValue(ImmutableMap.of("name", "John", "age", 20))), + Cursor.None); - assertEquals( - ImmutableMap.of("name", "string", "age", "integer"), - response.columnNameTypes() - ); + assertEquals(ImmutableMap.of("name", "string", "age", "integer"), response.columnNameTypes()); } @Test void columnNameTypesWithAlias() { - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("name", "n", STRING))); - QueryResult response = new QueryResult( - schema, - Collections.singletonList(tupleValue(ImmutableMap.of("n", "John"))), - Cursor.None); - - assertEquals( - ImmutableMap.of("n", "string"), - response.columnNameTypes() - ); + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of(new ExecutionEngine.Schema.Column("name", "n", STRING))); + QueryResult response = + new QueryResult( + schema, + Collections.singletonList(tupleValue(ImmutableMap.of("n", "John"))), + Cursor.None); + + assertEquals(ImmutableMap.of("n", "string"), response.columnNameTypes()); } @Test void columnNameTypesFromEmptyExprValues() { - QueryResult response = new QueryResult( - schema, - Collections.emptyList(), Cursor.None); - assertEquals( - ImmutableMap.of("name", "string", "age", "integer"), - response.columnNameTypes() - ); + QueryResult response = new QueryResult(schema, Collections.emptyList(), Cursor.None); + assertEquals(ImmutableMap.of("name", "string", "age", "integer"), response.columnNameTypes()); } @Test void columnNameTypesFromExprValuesWithMissing() { - QueryResult response = new QueryResult( - schema, - Arrays.asList( - tupleValue(ImmutableMap.of("name", "John")), - tupleValue(ImmutableMap.of("name", "John", "age", 20)) - )); - - assertEquals( - ImmutableMap.of("name", "string", "age", "integer"), - response.columnNameTypes() - ); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue(ImmutableMap.of("name", "John")), + tupleValue(ImmutableMap.of("name", "John", "age", 20)))); + + assertEquals(ImmutableMap.of("name", "string", "age", "integer"), response.columnNameTypes()); } @Test void iterate() { - QueryResult response = new QueryResult( - schema, - Arrays.asList( - tupleValue(ImmutableMap.of("name", "John", "age", 20)), - tupleValue(ImmutableMap.of("name", "Allen", "age", 30)) - ), Cursor.None); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue(ImmutableMap.of("name", "John", "age", 20)), + tupleValue(ImmutableMap.of("name", "Allen", "age", 30))), + Cursor.None); int i = 0; for (Object[] objects : response) { @@ -116,5 +106,4 @@ void iterate() { i++; } } - } diff --git a/protocol/src/test/java/org/opensearch/sql/protocol/response/format/CommandResponseFormatterTest.java b/protocol/src/test/java/org/opensearch/sql/protocol/response/format/CommandResponseFormatterTest.java index 85efbab369..4b20f6b1dc 100644 --- a/protocol/src/test/java/org/opensearch/sql/protocol/response/format/CommandResponseFormatterTest.java +++ b/protocol/src/test/java/org/opensearch/sql/protocol/response/format/CommandResponseFormatterTest.java @@ -29,32 +29,34 @@ public class CommandResponseFormatterTest { @Test public void produces_always_same_output_for_any_query_response() { var formatter = new CommandResponseFormatter(); - assertEquals(formatter.format(mock(QueryResult.class)), - formatter.format(mock(QueryResult.class))); + assertEquals( + formatter.format(mock(QueryResult.class)), formatter.format(mock(QueryResult.class))); - QueryResult response = new QueryResult( - new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("name", "name", STRING), - new ExecutionEngine.Schema.Column("address", "address", OpenSearchTextType.of()), - new ExecutionEngine.Schema.Column("age", "age", INTEGER))), - ImmutableList.of( - tupleValue(ImmutableMap.builder() - .put("name", "John") - .put("address", "Seattle") - .put("age", 20) - .build())), - new Cursor("test_cursor")); + QueryResult response = + new QueryResult( + new ExecutionEngine.Schema( + ImmutableList.of( + new ExecutionEngine.Schema.Column("name", "name", STRING), + new ExecutionEngine.Schema.Column( + "address", "address", OpenSearchTextType.of()), + new ExecutionEngine.Schema.Column("age", "age", INTEGER))), + ImmutableList.of( + tupleValue( + ImmutableMap.builder() + .put("name", "John") + .put("address", "Seattle") + .put("age", 20) + .build())), + new Cursor("test_cursor")); - assertEquals("{\n" - + " \"succeeded\": true\n" - + "}", - formatter.format(response)); + assertEquals("{\n" + " \"succeeded\": true\n" + "}", formatter.format(response)); } @Test public void formats_error_as_default_formatter() { var exception = new Exception("pewpew", new RuntimeException("meow meow")); - assertEquals(new JdbcResponseFormatter(PRETTY).format(exception), + assertEquals( + new JdbcResponseFormatter(PRETTY).format(exception), new CommandResponseFormatter().format(exception)); } diff --git a/protocol/src/test/java/org/opensearch/sql/protocol/response/format/CsvResponseFormatterTest.java b/protocol/src/test/java/org/opensearch/sql/protocol/response/format/CsvResponseFormatterTest.java index 82b4f372b3..13670dc7d6 100644 --- a/protocol/src/test/java/org/opensearch/sql/protocol/response/format/CsvResponseFormatterTest.java +++ b/protocol/src/test/java/org/opensearch/sql/protocol/response/format/CsvResponseFormatterTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response.format; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -24,20 +23,23 @@ import org.opensearch.sql.executor.ExecutionEngine; import org.opensearch.sql.protocol.response.QueryResult; -/** - * Unit test for {@link CsvResponseFormatter}. - */ +/** Unit test for {@link CsvResponseFormatter}. */ public class CsvResponseFormatterTest { private static final CsvResponseFormatter formatter = new CsvResponseFormatter(); @Test void formatResponse() { - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("name", "name", STRING), - new ExecutionEngine.Schema.Column("age", "age", INTEGER))); - QueryResult response = new QueryResult(schema, Arrays.asList( - tupleValue(ImmutableMap.of("name", "John", "age", 20)), - tupleValue(ImmutableMap.of("name", "Smith", "age", 30)))); + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of( + new ExecutionEngine.Schema.Column("name", "name", STRING), + new ExecutionEngine.Schema.Column("age", "age", INTEGER))); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue(ImmutableMap.of("name", "John", "age", 20)), + tupleValue(ImmutableMap.of("name", "Smith", "age", 30)))); CsvResponseFormatter formatter = new CsvResponseFormatter(); String expected = "name,age%nJohn,20%nSmith,30"; assertEquals(format(expected), formatter.format(response)); @@ -45,49 +47,69 @@ void formatResponse() { @Test void sanitizeHeaders() { - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("=firstname", null, STRING), - new ExecutionEngine.Schema.Column("+lastname", null, STRING), - new ExecutionEngine.Schema.Column("-city", null, STRING), - new ExecutionEngine.Schema.Column("@age", null, INTEGER))); - QueryResult response = new QueryResult(schema, Arrays.asList( - tupleValue(ImmutableMap.of( - "=firstname", "John", "+lastname", "Smith", "-city", "Seattle", "@age", 20)))); - String expected = "'=firstname,'+lastname,'-city,'@age%n" - + "John,Smith,Seattle,20"; + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of( + new ExecutionEngine.Schema.Column("=firstname", null, STRING), + new ExecutionEngine.Schema.Column("+lastname", null, STRING), + new ExecutionEngine.Schema.Column("-city", null, STRING), + new ExecutionEngine.Schema.Column("@age", null, INTEGER))); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue( + ImmutableMap.of( + "=firstname", + "John", + "+lastname", + "Smith", + "-city", + "Seattle", + "@age", + 20)))); + String expected = "'=firstname,'+lastname,'-city,'@age%n" + "John,Smith,Seattle,20"; assertEquals(format(expected), formatter.format(response)); } @Test void sanitizeData() { - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("city", "city", STRING))); - QueryResult response = new QueryResult(schema, Arrays.asList( - tupleValue(ImmutableMap.of("city", "Seattle")), - tupleValue(ImmutableMap.of("city", "=Seattle")), - tupleValue(ImmutableMap.of("city", "+Seattle")), - tupleValue(ImmutableMap.of("city", "-Seattle")), - tupleValue(ImmutableMap.of("city", "@Seattle")), - tupleValue(ImmutableMap.of("city", "Seattle=")))); - String expected = "city%n" - + "Seattle%n" - + "'=Seattle%n" - + "'+Seattle%n" - + "'-Seattle%n" - + "'@Seattle%n" - + "Seattle="; + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of(new ExecutionEngine.Schema.Column("city", "city", STRING))); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue(ImmutableMap.of("city", "Seattle")), + tupleValue(ImmutableMap.of("city", "=Seattle")), + tupleValue(ImmutableMap.of("city", "+Seattle")), + tupleValue(ImmutableMap.of("city", "-Seattle")), + tupleValue(ImmutableMap.of("city", "@Seattle")), + tupleValue(ImmutableMap.of("city", "Seattle=")))); + String expected = + "city%n" + + "Seattle%n" + + "'=Seattle%n" + + "'+Seattle%n" + + "'-Seattle%n" + + "'@Seattle%n" + + "Seattle="; assertEquals(format(expected), formatter.format(response)); } @Test void quoteIfRequired() { - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("na,me", "na,me", STRING), - new ExecutionEngine.Schema.Column(",,age", ",,age", INTEGER))); - QueryResult response = new QueryResult(schema, Arrays.asList( - tupleValue(ImmutableMap.of("na,me", "John,Smith", ",,age", "30,,,")))); - String expected = "\"na,me\",\",,age\"%n" - + "\"John,Smith\",\"30,,,\""; + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of( + new ExecutionEngine.Schema.Column("na,me", "na,me", STRING), + new ExecutionEngine.Schema.Column(",,age", ",,age", INTEGER))); + QueryResult response = + new QueryResult( + schema, + Arrays.asList(tupleValue(ImmutableMap.of("na,me", "John,Smith", ",,age", "30,,,")))); + String expected = "\"na,me\",\",,age\"%n" + "\"John,Smith\",\"30,,,\""; assertEquals(format(expected), formatter.format(response)); } @@ -102,32 +124,36 @@ void formatError() { @Test void escapeSanitize() { CsvResponseFormatter escapeFormatter = new CsvResponseFormatter(false); - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("city", "city", STRING))); - QueryResult response = new QueryResult(schema, Arrays.asList( - tupleValue(ImmutableMap.of("city", "=Seattle")), - tupleValue(ImmutableMap.of("city", ",,Seattle")))); - String expected = "city%n" - + "=Seattle%n" - + "\",,Seattle\""; + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of(new ExecutionEngine.Schema.Column("city", "city", STRING))); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue(ImmutableMap.of("city", "=Seattle")), + tupleValue(ImmutableMap.of("city", ",,Seattle")))); + String expected = "city%n" + "=Seattle%n" + "\",,Seattle\""; assertEquals(format(expected), escapeFormatter.format(response)); } @Test void replaceNullValues() { - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("name", "name", STRING), - new ExecutionEngine.Schema.Column("city", "city", STRING))); - QueryResult response = new QueryResult(schema, Arrays.asList( - tupleValue(ImmutableMap.of("name", "John","city", "Seattle")), - ExprTupleValue.fromExprValueMap( - ImmutableMap.of("firstname", LITERAL_NULL, "city", stringValue("Seattle"))), - ExprTupleValue.fromExprValueMap( - ImmutableMap.of("firstname", stringValue("John"), "city", LITERAL_MISSING)))); - String expected = "name,city%n" - + "John,Seattle%n" - + ",Seattle%n" - + "John,"; + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of( + new ExecutionEngine.Schema.Column("name", "name", STRING), + new ExecutionEngine.Schema.Column("city", "city", STRING))); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue(ImmutableMap.of("name", "John", "city", "Seattle")), + ExprTupleValue.fromExprValueMap( + ImmutableMap.of("firstname", LITERAL_NULL, "city", stringValue("Seattle"))), + ExprTupleValue.fromExprValueMap( + ImmutableMap.of("firstname", stringValue("John"), "city", LITERAL_MISSING)))); + String expected = "name,city%n" + "John,Seattle%n" + ",Seattle%n" + "John,"; assertEquals(format(expected), formatter.format(response)); } @@ -135,5 +161,4 @@ void replaceNullValues() { void testContentType() { assertEquals(formatter.contentType(), CONTENT_TYPE); } - } diff --git a/protocol/src/test/java/org/opensearch/sql/protocol/response/format/FormatTest.java b/protocol/src/test/java/org/opensearch/sql/protocol/response/format/FormatTest.java index e0e4355a24..7293048916 100644 --- a/protocol/src/test/java/org/opensearch/sql/protocol/response/format/FormatTest.java +++ b/protocol/src/test/java/org/opensearch/sql/protocol/response/format/FormatTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response.format; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -13,9 +12,7 @@ import java.util.Optional; import org.junit.jupiter.api.Test; -/** - * Unit test for {@link Format}. - */ +/** Unit test for {@link Format}. */ public class FormatTest { @Test @@ -58,5 +55,4 @@ void unsupportedFormat() { Optional format = Format.of("notsupport"); assertFalse(format.isPresent()); } - } diff --git a/protocol/src/test/java/org/opensearch/sql/protocol/response/format/JdbcResponseFormatterTest.java b/protocol/src/test/java/org/opensearch/sql/protocol/response/format/JdbcResponseFormatterTest.java index 9c79b1bf89..16dd1590ee 100644 --- a/protocol/src/test/java/org/opensearch/sql/protocol/response/format/JdbcResponseFormatterTest.java +++ b/protocol/src/test/java/org/opensearch/sql/protocol/response/format/JdbcResponseFormatterTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response.format; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -43,26 +42,35 @@ class JdbcResponseFormatterTest { @Test void format_response() { - QueryResult response = new QueryResult( - new Schema(ImmutableList.of( - new Column("name", "name", STRING), - new Column("address1", "address1", OpenSearchTextType.of()), - new Column("address2", "address2", OpenSearchTextType.of(Map.of("words", - OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword)))), - new Column("location", "location", STRUCT), - new Column("employer", "employer", ARRAY), - new Column("age", "age", INTEGER))), - ImmutableList.of( - tupleValue(ImmutableMap.builder() - .put("name", "John") - .put("address1", "Seattle") - .put("address2", "WA") - .put("location", ImmutableMap.of("x", "1", "y", "2")) - .put("employments", ImmutableList.of( - ImmutableMap.of("name", "Amazon"), - ImmutableMap.of("name", "AWS"))) - .put("age", 20) - .build()))); + QueryResult response = + new QueryResult( + new Schema( + ImmutableList.of( + new Column("name", "name", STRING), + new Column("address1", "address1", OpenSearchTextType.of()), + new Column( + "address2", + "address2", + OpenSearchTextType.of( + Map.of( + "words", + OpenSearchDataType.of(OpenSearchDataType.MappingType.Keyword)))), + new Column("location", "location", STRUCT), + new Column("employer", "employer", ARRAY), + new Column("age", "age", INTEGER))), + ImmutableList.of( + tupleValue( + ImmutableMap.builder() + .put("name", "John") + .put("address1", "Seattle") + .put("address2", "WA") + .put("location", ImmutableMap.of("x", "1", "y", "2")) + .put( + "employments", + ImmutableList.of( + ImmutableMap.of("name", "Amazon"), ImmutableMap.of("name", "AWS"))) + .put("age", 20) + .build()))); assertJsonEquals( "{" @@ -76,7 +84,8 @@ void format_response() { + "]," + "\"datarows\":[" + "[\"John\",\"Seattle\",\"WA\",{\"x\":\"1\",\"y\":\"2\"}," - + "[{\"name\":\"Amazon\"}," + "{\"name\":\"AWS\"}]," + + "[{\"name\":\"Amazon\"}," + + "{\"name\":\"AWS\"}]," + "20]]," + "\"total\":1," + "\"size\":1," @@ -86,18 +95,21 @@ void format_response() { @Test void format_response_with_cursor() { - QueryResult response = new QueryResult( - new Schema(ImmutableList.of( - new Column("name", "name", STRING), - new Column("address", "address", OpenSearchTextType.of()), - new Column("age", "age", INTEGER))), - ImmutableList.of( - tupleValue(ImmutableMap.builder() - .put("name", "John") - .put("address", "Seattle") - .put("age", 20) - .build())), - new Cursor("test_cursor")); + QueryResult response = + new QueryResult( + new Schema( + ImmutableList.of( + new Column("name", "name", STRING), + new Column("address", "address", OpenSearchTextType.of()), + new Column("age", "age", INTEGER))), + ImmutableList.of( + tupleValue( + ImmutableMap.builder() + .put("name", "John") + .put("address", "Seattle") + .put("age", 20) + .build())), + new Cursor("test_cursor")); assertJsonEquals( "{" @@ -119,9 +131,9 @@ void format_response_with_cursor() { void format_response_with_missing_and_null_value() { QueryResult response = new QueryResult( - new Schema(ImmutableList.of( - new Column("name", null, STRING), - new Column("age", null, INTEGER))), + new Schema( + ImmutableList.of( + new Column("name", null, STRING), new Column("age", null, INTEGER))), Arrays.asList( ExprTupleValue.fromExprValueMap( ImmutableMap.of("name", stringValue("John"), "age", LITERAL_MISSING)), @@ -147,8 +159,7 @@ void format_client_error_response_due_to_syntax_exception() { + "\"details\":\"Invalid query syntax\"" + "}," + "\"status\":400}", - formatter.format(new SyntaxCheckException("Invalid query syntax")) - ); + formatter.format(new SyntaxCheckException("Invalid query syntax"))); } @Test @@ -161,8 +172,7 @@ void format_client_error_response_due_to_semantic_exception() { + "\"details\":\"Invalid query semantics\"" + "}," + "\"status\":400}", - formatter.format(new SemanticCheckException("Invalid query semantics")) - ); + formatter.format(new SemanticCheckException("Invalid query semantics"))); } @Test @@ -175,8 +185,7 @@ void format_server_error_response() { + "\"details\":\"Execution error\"" + "}," + "\"status\":503}", - formatter.format(new IllegalStateException("Execution error")) - ); + formatter.format(new IllegalStateException("Execution error"))); } @Test @@ -193,15 +202,12 @@ void format_server_error_response_due_to_opensearch() { + "from OpenSearch engine.\"" + "}," + "\"status\":503}", - formatter.format(new OpenSearchException("all shards failed", - new IllegalStateException("Execution error"))) - ); + formatter.format( + new OpenSearchException( + "all shards failed", new IllegalStateException("Execution error")))); } private static void assertJsonEquals(String expected, String actual) { - assertEquals( - JsonParser.parseString(expected), - JsonParser.parseString(actual)); + assertEquals(JsonParser.parseString(expected), JsonParser.parseString(actual)); } - } diff --git a/protocol/src/test/java/org/opensearch/sql/protocol/response/format/RawResponseFormatterTest.java b/protocol/src/test/java/org/opensearch/sql/protocol/response/format/RawResponseFormatterTest.java index b33a4f216a..af0ab1947b 100644 --- a/protocol/src/test/java/org/opensearch/sql/protocol/response/format/RawResponseFormatterTest.java +++ b/protocol/src/test/java/org/opensearch/sql/protocol/response/format/RawResponseFormatterTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response.format; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -24,69 +23,92 @@ import org.opensearch.sql.executor.ExecutionEngine; import org.opensearch.sql.protocol.response.QueryResult; -/** - * Unit test for {@link FlatResponseFormatter}. - */ +/** Unit test for {@link FlatResponseFormatter}. */ public class RawResponseFormatterTest { private FlatResponseFormatter rawFormatter = new RawResponseFormatter(); @Test void formatResponse() { - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("name", "name", STRING), - new ExecutionEngine.Schema.Column("age", "age", INTEGER))); - QueryResult response = new QueryResult(schema, Arrays.asList( - tupleValue(ImmutableMap.of("name", "John", "age", 20)), - tupleValue(ImmutableMap.of("name", "Smith", "age", 30)))); + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of( + new ExecutionEngine.Schema.Column("name", "name", STRING), + new ExecutionEngine.Schema.Column("age", "age", INTEGER))); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue(ImmutableMap.of("name", "John", "age", 20)), + tupleValue(ImmutableMap.of("name", "Smith", "age", 30)))); String expected = "name|age%nJohn|20%nSmith|30"; assertEquals(format(expected), rawFormatter.format(response)); } @Test void sanitizeHeaders() { - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("=firstname", null, STRING), - new ExecutionEngine.Schema.Column("+lastname", null, STRING), - new ExecutionEngine.Schema.Column("-city", null, STRING), - new ExecutionEngine.Schema.Column("@age", null, INTEGER))); - QueryResult response = new QueryResult(schema, Arrays.asList( - tupleValue(ImmutableMap.of( - "=firstname", "John", "+lastname", "Smith", "-city", "Seattle", "@age", 20)))); - String expected = "=firstname|+lastname|-city|@age%n" - + "John|Smith|Seattle|20"; + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of( + new ExecutionEngine.Schema.Column("=firstname", null, STRING), + new ExecutionEngine.Schema.Column("+lastname", null, STRING), + new ExecutionEngine.Schema.Column("-city", null, STRING), + new ExecutionEngine.Schema.Column("@age", null, INTEGER))); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue( + ImmutableMap.of( + "=firstname", + "John", + "+lastname", + "Smith", + "-city", + "Seattle", + "@age", + 20)))); + String expected = "=firstname|+lastname|-city|@age%n" + "John|Smith|Seattle|20"; assertEquals(format(expected), rawFormatter.format(response)); } @Test void sanitizeData() { - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("city", "city", STRING))); - QueryResult response = new QueryResult(schema, Arrays.asList( - tupleValue(ImmutableMap.of("city", "Seattle")), - tupleValue(ImmutableMap.of("city", "=Seattle")), - tupleValue(ImmutableMap.of("city", "+Seattle")), - tupleValue(ImmutableMap.of("city", "-Seattle")), - tupleValue(ImmutableMap.of("city", "@Seattle")), - tupleValue(ImmutableMap.of("city", "Seattle=")))); - String expected = "city%n" - + "Seattle%n" - + "=Seattle%n" - + "+Seattle%n" - + "-Seattle%n" - + "@Seattle%n" - + "Seattle="; + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of(new ExecutionEngine.Schema.Column("city", "city", STRING))); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue(ImmutableMap.of("city", "Seattle")), + tupleValue(ImmutableMap.of("city", "=Seattle")), + tupleValue(ImmutableMap.of("city", "+Seattle")), + tupleValue(ImmutableMap.of("city", "-Seattle")), + tupleValue(ImmutableMap.of("city", "@Seattle")), + tupleValue(ImmutableMap.of("city", "Seattle=")))); + String expected = + "city%n" + + "Seattle%n" + + "=Seattle%n" + + "+Seattle%n" + + "-Seattle%n" + + "@Seattle%n" + + "Seattle="; assertEquals(format(expected), rawFormatter.format(response)); } @Test void quoteIfRequired() { - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("na|me", "na|me", STRING), - new ExecutionEngine.Schema.Column("||age", "||age", INTEGER))); - QueryResult response = new QueryResult(schema, Arrays.asList( - tupleValue(ImmutableMap.of("na|me", "John|Smith", "||age", "30|||")))); - String expected = "\"na|me\"|\"||age\"%n" - + "\"John|Smith\"|\"30|||\""; + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of( + new ExecutionEngine.Schema.Column("na|me", "na|me", STRING), + new ExecutionEngine.Schema.Column("||age", "||age", INTEGER))); + QueryResult response = + new QueryResult( + schema, + Arrays.asList(tupleValue(ImmutableMap.of("na|me", "John|Smith", "||age", "30|||")))); + String expected = "\"na|me\"|\"||age\"%n" + "\"John|Smith\"|\"30|||\""; assertEquals(format(expected), rawFormatter.format(response)); } @@ -101,59 +123,67 @@ void formatError() { @Test void escapeSanitize() { FlatResponseFormatter escapeFormatter = new RawResponseFormatter(); - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("city", "city", STRING))); - QueryResult response = new QueryResult(schema, Arrays.asList( - tupleValue(ImmutableMap.of("city", "=Seattle")), - tupleValue(ImmutableMap.of("city", "||Seattle")))); - String expected = "city%n" - + "=Seattle%n" - + "\"||Seattle\""; + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of(new ExecutionEngine.Schema.Column("city", "city", STRING))); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue(ImmutableMap.of("city", "=Seattle")), + tupleValue(ImmutableMap.of("city", "||Seattle")))); + String expected = "city%n" + "=Seattle%n" + "\"||Seattle\""; assertEquals(format(expected), escapeFormatter.format(response)); } @Test void senstiveCharater() { - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("city", "city", STRING))); - QueryResult response = new QueryResult(schema, Arrays.asList( - tupleValue(ImmutableMap.of("city", "@Seattle")), - tupleValue(ImmutableMap.of("city", "++Seattle")))); - String expected = "city%n" - + "@Seattle%n" - + "++Seattle"; + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of(new ExecutionEngine.Schema.Column("city", "city", STRING))); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue(ImmutableMap.of("city", "@Seattle")), + tupleValue(ImmutableMap.of("city", "++Seattle")))); + String expected = "city%n" + "@Seattle%n" + "++Seattle"; assertEquals(format(expected), rawFormatter.format(response)); } @Test void senstiveCharaterWithSanitize() { FlatResponseFormatter testFormater = new RawResponseFormatter(); - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("city", "city", STRING))); - QueryResult response = new QueryResult(schema, Arrays.asList( - tupleValue(ImmutableMap.of("city", "@Seattle")), - tupleValue(ImmutableMap.of("city", "++Seattle|||")))); - String expected = "city%n" - + "@Seattle%n" - + "\"++Seattle|||\""; + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of(new ExecutionEngine.Schema.Column("city", "city", STRING))); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue(ImmutableMap.of("city", "@Seattle")), + tupleValue(ImmutableMap.of("city", "++Seattle|||")))); + String expected = "city%n" + "@Seattle%n" + "\"++Seattle|||\""; assertEquals(format(expected), testFormater.format(response)); } @Test void replaceNullValues() { - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("name", "name", STRING), - new ExecutionEngine.Schema.Column("city", "city", STRING))); - QueryResult response = new QueryResult(schema, Arrays.asList( - tupleValue(ImmutableMap.of("name", "John","city", "Seattle")), - ExprTupleValue.fromExprValueMap( - ImmutableMap.of("firstname", LITERAL_NULL, "city", stringValue("Seattle"))), - ExprTupleValue.fromExprValueMap( - ImmutableMap.of("firstname", stringValue("John"), "city", LITERAL_MISSING)))); - String expected = "name|city%n" - + "John|Seattle%n" - + "|Seattle%n" - + "John|"; + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of( + new ExecutionEngine.Schema.Column("name", "name", STRING), + new ExecutionEngine.Schema.Column("city", "city", STRING))); + QueryResult response = + new QueryResult( + schema, + Arrays.asList( + tupleValue(ImmutableMap.of("name", "John", "city", "Seattle")), + ExprTupleValue.fromExprValueMap( + ImmutableMap.of("firstname", LITERAL_NULL, "city", stringValue("Seattle"))), + ExprTupleValue.fromExprValueMap( + ImmutableMap.of("firstname", stringValue("John"), "city", LITERAL_MISSING)))); + String expected = "name|city%n" + "John|Seattle%n" + "|Seattle%n" + "John|"; assertEquals(format(expected), rawFormatter.format(response)); } @@ -161,5 +191,4 @@ void replaceNullValues() { void testContentType() { assertEquals(rawFormatter.contentType(), CONTENT_TYPE); } - } diff --git a/protocol/src/test/java/org/opensearch/sql/protocol/response/format/SimpleJsonResponseFormatterTest.java b/protocol/src/test/java/org/opensearch/sql/protocol/response/format/SimpleJsonResponseFormatterTest.java index 8b4438cf91..e5eb0f1ac7 100644 --- a/protocol/src/test/java/org/opensearch/sql/protocol/response/format/SimpleJsonResponseFormatterTest.java +++ b/protocol/src/test/java/org/opensearch/sql/protocol/response/format/SimpleJsonResponseFormatterTest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.protocol.response.format; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -25,9 +24,11 @@ class SimpleJsonResponseFormatterTest { - private final ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("firstname", null, STRING), - new ExecutionEngine.Schema.Column("age", null, INTEGER))); + private final ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of( + new ExecutionEngine.Schema.Column("firstname", null, STRING), + new ExecutionEngine.Schema.Column("age", null, INTEGER))); @Test void formatResponse() { @@ -84,12 +85,12 @@ void formatResponsePretty() { @Test void formatResponseSchemaWithAlias() { - ExecutionEngine.Schema schema = new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("firstname", "name", STRING))); + ExecutionEngine.Schema schema = + new ExecutionEngine.Schema( + ImmutableList.of(new ExecutionEngine.Schema.Column("firstname", "name", STRING))); QueryResult response = new QueryResult( - schema, - ImmutableList.of(tupleValue(ImmutableMap.of("name", "John", "age", 20)))); + schema, ImmutableList.of(tupleValue(ImmutableMap.of("name", "John", "age", 20)))); SimpleJsonResponseFormatter formatter = new SimpleJsonResponseFormatter(COMPACT); assertEquals( "{\"schema\":[{\"name\":\"name\",\"type\":\"string\"}]," @@ -120,10 +121,13 @@ void formatResponseWithTupleValue() { new QueryResult( schema, Arrays.asList( - tupleValue(ImmutableMap - .of("name", "Smith", - "address", ImmutableMap.of("state", "WA", "street", - ImmutableMap.of("city", "seattle")))))); + tupleValue( + ImmutableMap.of( + "name", + "Smith", + "address", + ImmutableMap.of( + "state", "WA", "street", ImmutableMap.of("city", "seattle")))))); SimpleJsonResponseFormatter formatter = new SimpleJsonResponseFormatter(COMPACT); assertEquals( @@ -140,11 +144,13 @@ void formatResponseWithArrayValue() { new QueryResult( schema, Arrays.asList( - tupleValue(ImmutableMap - .of("name", "Smith", - "address", Arrays.asList( - ImmutableMap.of("state", "WA"), ImmutableMap.of("state", "NYC") - ))))); + tupleValue( + ImmutableMap.of( + "name", + "Smith", + "address", + Arrays.asList( + ImmutableMap.of("state", "WA"), ImmutableMap.of("state", "NYC")))))); SimpleJsonResponseFormatter formatter = new SimpleJsonResponseFormatter(COMPACT); assertEquals( "{\"schema\":[{\"name\":\"firstname\",\"type\":\"string\"}," diff --git a/protocol/src/test/java/org/opensearch/sql/protocol/response/format/VisualizationResponseFormatterTest.java b/protocol/src/test/java/org/opensearch/sql/protocol/response/format/VisualizationResponseFormatterTest.java index f501a53d64..a6fdd1e03e 100644 --- a/protocol/src/test/java/org/opensearch/sql/protocol/response/format/VisualizationResponseFormatterTest.java +++ b/protocol/src/test/java/org/opensearch/sql/protocol/response/format/VisualizationResponseFormatterTest.java @@ -24,18 +24,21 @@ import org.opensearch.sql.protocol.response.QueryResult; public class VisualizationResponseFormatterTest { - private final VisualizationResponseFormatter formatter = new VisualizationResponseFormatter( - JsonResponseFormatter.Style.COMPACT); + private final VisualizationResponseFormatter formatter = + new VisualizationResponseFormatter(JsonResponseFormatter.Style.COMPACT); @Test void formatResponse() { - QueryResult response = new QueryResult( - new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("name", "name", STRING), - new ExecutionEngine.Schema.Column("age", "age", INTEGER))), - ImmutableList.of(tupleValue(ImmutableMap.of("name", "John", "age", 20)), - tupleValue(ImmutableMap.of("name", "Amy", "age", 31)), - tupleValue(ImmutableMap.of("name", "Bob", "age", 28)))); + QueryResult response = + new QueryResult( + new ExecutionEngine.Schema( + ImmutableList.of( + new ExecutionEngine.Schema.Column("name", "name", STRING), + new ExecutionEngine.Schema.Column("age", "age", INTEGER))), + ImmutableList.of( + tupleValue(ImmutableMap.of("name", "John", "age", 20)), + tupleValue(ImmutableMap.of("name", "Amy", "age", 31)), + tupleValue(ImmutableMap.of("name", "Bob", "age", 28)))); assertJsonEquals( "{\"data\":{" @@ -55,10 +58,12 @@ void formatResponse() { void formatResponseWithNull() { QueryResult response = new QueryResult( - new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("name", null, STRING), - new ExecutionEngine.Schema.Column("age", null, INTEGER))), - ImmutableList.of(tupleValue(ImmutableMap.of("name", "John", "age", LITERAL_MISSING)), + new ExecutionEngine.Schema( + ImmutableList.of( + new ExecutionEngine.Schema.Column("name", null, STRING), + new ExecutionEngine.Schema.Column("age", null, INTEGER))), + ImmutableList.of( + tupleValue(ImmutableMap.of("name", "John", "age", LITERAL_MISSING)), tupleValue(ImmutableMap.of("name", "Allen", "age", LITERAL_NULL)), tupleValue(ImmutableMap.of("name", "Smith", "age", 30)))); @@ -73,8 +78,7 @@ void formatResponseWithNull() { + "\"size\":3," + "\"status\":200" + "}", - formatter.format(response) - ); + formatter.format(response)); } @Test @@ -87,8 +91,7 @@ void clientErrorSyntaxException() { + "\"details\":\"Invalid query syntax\"" + "}," + "\"status\":400}", - formatter.format(new SyntaxCheckException("Invalid query syntax")) - ); + formatter.format(new SyntaxCheckException("Invalid query syntax"))); } @Test @@ -101,8 +104,7 @@ void clientErrorSemanticException() { + "\"details\":\"Invalid query semantics\"" + "}," + "\"status\":400}", - formatter.format(new SemanticCheckException("Invalid query semantics")) - ); + formatter.format(new SemanticCheckException("Invalid query semantics"))); } @Test @@ -115,8 +117,7 @@ void serverError() { + "\"details\":\"Execution error\"" + "}," + "\"status\":503}", - formatter.format(new IllegalStateException("Execution error")) - ); + formatter.format(new IllegalStateException("Execution error"))); } @Test @@ -133,22 +134,25 @@ void opensearchServerError() { + "from OpenSearch engine.\"" + "}," + "\"status\":503}", - formatter.format(new OpenSearchException("all shards failed", - new IllegalStateException("Execution error"))) - ); + formatter.format( + new OpenSearchException( + "all shards failed", new IllegalStateException("Execution error")))); } @Test void prettyStyle() { - VisualizationResponseFormatter prettyFormatter = new VisualizationResponseFormatter( - JsonResponseFormatter.Style.PRETTY); - QueryResult response = new QueryResult( - new ExecutionEngine.Schema(ImmutableList.of( - new ExecutionEngine.Schema.Column("name", "name", STRING), - new ExecutionEngine.Schema.Column("age", "age", INTEGER))), - ImmutableList.of(tupleValue(ImmutableMap.of("name", "John", "age", 20)), - tupleValue(ImmutableMap.of("name", "Amy", "age", 31)), - tupleValue(ImmutableMap.of("name", "Bob", "age", 28)))); + VisualizationResponseFormatter prettyFormatter = + new VisualizationResponseFormatter(JsonResponseFormatter.Style.PRETTY); + QueryResult response = + new QueryResult( + new ExecutionEngine.Schema( + ImmutableList.of( + new ExecutionEngine.Schema.Column("name", "name", STRING), + new ExecutionEngine.Schema.Column("age", "age", INTEGER))), + ImmutableList.of( + tupleValue(ImmutableMap.of("name", "John", "age", 20)), + tupleValue(ImmutableMap.of("name", "Amy", "age", 31)), + tupleValue(ImmutableMap.of("name", "Bob", "age", 28)))); assertJsonEquals( "{\n" @@ -179,14 +183,11 @@ void prettyStyle() { + " \"size\": 3,\n" + " \"status\": 200\n" + "}", - prettyFormatter.format(response) - ); + prettyFormatter.format(response)); } private static void assertJsonEquals(String expected, String actual) { - assertEquals( - JsonParser.parseString(expected), - JsonParser.parseString(actual)); + assertEquals(JsonParser.parseString(expected), JsonParser.parseString(actual)); } @Test diff --git a/spark/src/main/java/org/opensearch/sql/spark/client/EmrClientImpl.java b/spark/src/main/java/org/opensearch/sql/spark/client/EmrClientImpl.java index 1e2475c196..1a3304994b 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/client/EmrClientImpl.java +++ b/spark/src/main/java/org/opensearch/sql/spark/client/EmrClientImpl.java @@ -36,12 +36,16 @@ public class EmrClientImpl implements SparkClient { /** * Constructor for EMR Client Implementation. * - * @param emr EMR helper - * @param flint Opensearch args for flint integration jar + * @param emr EMR helper + * @param flint Opensearch args for flint integration jar * @param sparkResponse Response object to help with retrieving results from Opensearch index */ - public EmrClientImpl(AmazonElasticMapReduce emr, String emrCluster, FlintHelper flint, - SparkResponse sparkResponse, String sparkApplicationJar) { + public EmrClientImpl( + AmazonElasticMapReduce emr, + String emrCluster, + FlintHelper flint, + SparkResponse sparkResponse, + String sparkApplicationJar) { this.emr = emr; this.emrCluster = emrCluster; this.flint = flint; @@ -59,38 +63,39 @@ public JSONObject sql(String query) throws IOException { @VisibleForTesting void runEmrApplication(String query) { - HadoopJarStepConfig stepConfig = new HadoopJarStepConfig() - .withJar("command-runner.jar") - .withArgs("spark-submit", - "--class","org.opensearch.sql.SQLJob", - "--jars", - flint.getFlintIntegrationJar(), - sparkApplicationJar, - query, - SPARK_INDEX_NAME, - flint.getFlintHost(), - flint.getFlintPort(), - flint.getFlintScheme(), - flint.getFlintAuth(), - flint.getFlintRegion() - ); + HadoopJarStepConfig stepConfig = + new HadoopJarStepConfig() + .withJar("command-runner.jar") + .withArgs( + "spark-submit", + "--class", + "org.opensearch.sql.SQLJob", + "--jars", + flint.getFlintIntegrationJar(), + sparkApplicationJar, + query, + SPARK_INDEX_NAME, + flint.getFlintHost(), + flint.getFlintPort(), + flint.getFlintScheme(), + flint.getFlintAuth(), + flint.getFlintRegion()); - StepConfig emrstep = new StepConfig() - .withName("Spark Application") - .withActionOnFailure(ActionOnFailure.CONTINUE) - .withHadoopJarStep(stepConfig); + StepConfig emrstep = + new StepConfig() + .withName("Spark Application") + .withActionOnFailure(ActionOnFailure.CONTINUE) + .withHadoopJarStep(stepConfig); - AddJobFlowStepsRequest request = new AddJobFlowStepsRequest() - .withJobFlowId(emrCluster) - .withSteps(emrstep); + AddJobFlowStepsRequest request = + new AddJobFlowStepsRequest().withJobFlowId(emrCluster).withSteps(emrstep); AddJobFlowStepsResult result = emr.addJobFlowSteps(request); logger.info("EMR step ID: " + result.getStepIds()); String stepId = result.getStepIds().get(0); - DescribeStepRequest stepRequest = new DescribeStepRequest() - .withClusterId(emrCluster) - .withStepId(stepId); + DescribeStepRequest stepRequest = + new DescribeStepRequest().withClusterId(emrCluster).withStepId(stepId); waitForStepExecution(stepRequest); sparkResponse.setValue(stepId); @@ -117,5 +122,4 @@ private void waitForStepExecution(DescribeStepRequest stepRequest) { } } } - } diff --git a/spark/src/main/java/org/opensearch/sql/spark/client/SparkClient.java b/spark/src/main/java/org/opensearch/sql/spark/client/SparkClient.java index 99d8600dd0..b38f04680b 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/client/SparkClient.java +++ b/spark/src/main/java/org/opensearch/sql/spark/client/SparkClient.java @@ -8,15 +8,13 @@ import java.io.IOException; import org.json.JSONObject; -/** - * Interface class for Spark Client. - */ +/** Interface class for Spark Client. */ public interface SparkClient { /** * This method executes spark sql query. * * @param query spark sql query - * @return spark query response + * @return spark query response */ JSONObject sql(String query) throws IOException; } diff --git a/spark/src/main/java/org/opensearch/sql/spark/functions/implementation/SparkSqlFunctionImplementation.java b/spark/src/main/java/org/opensearch/sql/spark/functions/implementation/SparkSqlFunctionImplementation.java index 1936c266de..914aa80085 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/functions/implementation/SparkSqlFunctionImplementation.java +++ b/spark/src/main/java/org/opensearch/sql/spark/functions/implementation/SparkSqlFunctionImplementation.java @@ -24,9 +24,7 @@ import org.opensearch.sql.spark.storage.SparkTable; import org.opensearch.sql.storage.Table; -/** - * Spark SQL function implementation. - */ +/** Spark SQL function implementation. */ public class SparkSqlFunctionImplementation extends FunctionExpression implements TableFunctionImplementation { @@ -38,8 +36,8 @@ public class SparkSqlFunctionImplementation extends FunctionExpression * Constructor for spark sql function. * * @param functionName name of the function - * @param arguments a list of expressions - * @param sparkClient spark client + * @param arguments a list of expressions + * @param sparkClient spark client */ public SparkSqlFunctionImplementation( FunctionName functionName, List arguments, SparkClient sparkClient) { @@ -51,9 +49,11 @@ public SparkSqlFunctionImplementation( @Override public ExprValue valueOf(Environment valueEnv) { - throw new UnsupportedOperationException(String.format( - "Spark defined function [%s] is only " - + "supported in SOURCE clause with spark connector catalog", functionName)); + throw new UnsupportedOperationException( + String.format( + "Spark defined function [%s] is only " + + "supported in SOURCE clause with spark connector catalog", + functionName)); } @Override @@ -63,11 +63,15 @@ public ExprType type() { @Override public String toString() { - List args = arguments.stream() - .map(arg -> String.format("%s=%s", - ((NamedArgumentExpression) arg).getArgName(), - ((NamedArgumentExpression) arg).getValue().toString())) - .collect(Collectors.toList()); + List args = + arguments.stream() + .map( + arg -> + String.format( + "%s=%s", + ((NamedArgumentExpression) arg).getArgName(), + ((NamedArgumentExpression) arg).getValue().toString())) + .collect(Collectors.toList()); return String.format("%s(%s)", functionName, String.join(", ", args)); } @@ -80,23 +84,23 @@ public Table applyArguments() { * This method builds a spark query request. * * @param arguments spark sql function arguments - * @return spark query request + * @return spark query request */ private SparkQueryRequest buildQueryFromSqlFunction(List arguments) { SparkQueryRequest sparkQueryRequest = new SparkQueryRequest(); - arguments.forEach(arg -> { - String argName = ((NamedArgumentExpression) arg).getArgName(); - Expression argValue = ((NamedArgumentExpression) arg).getValue(); - ExprValue literalValue = argValue.valueOf(); - if (argName.equals(QUERY)) { - sparkQueryRequest.setSql((String) literalValue.value()); - } else { - throw new ExpressionEvaluationException( - String.format("Invalid Function Argument:%s", argName)); - } - }); + arguments.forEach( + arg -> { + String argName = ((NamedArgumentExpression) arg).getArgName(); + Expression argValue = ((NamedArgumentExpression) arg).getValue(); + ExprValue literalValue = argValue.valueOf(); + if (argName.equals(QUERY)) { + sparkQueryRequest.setSql((String) literalValue.value()); + } else { + throw new ExpressionEvaluationException( + String.format("Invalid Function Argument:%s", argName)); + } + }); return sparkQueryRequest; } - } diff --git a/spark/src/main/java/org/opensearch/sql/spark/functions/resolver/SparkSqlTableFunctionResolver.java b/spark/src/main/java/org/opensearch/sql/spark/functions/resolver/SparkSqlTableFunctionResolver.java index 624600e1a8..a4f2a6c0fe 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/functions/resolver/SparkSqlTableFunctionResolver.java +++ b/spark/src/main/java/org/opensearch/sql/spark/functions/resolver/SparkSqlTableFunctionResolver.java @@ -22,9 +22,7 @@ import org.opensearch.sql.spark.client.SparkClient; import org.opensearch.sql.spark.functions.implementation.SparkSqlFunctionImplementation; -/** - * Function resolver for sql function of spark connector. - */ +/** Function resolver for sql function of spark connector. */ @RequiredArgsConstructor public class SparkSqlTableFunctionResolver implements FunctionResolver { private final SparkClient sparkClient; @@ -35,35 +33,44 @@ public class SparkSqlTableFunctionResolver implements FunctionResolver { @Override public Pair resolve(FunctionSignature unresolvedSignature) { FunctionName functionName = FunctionName.of(SQL); - FunctionSignature functionSignature = - new FunctionSignature(functionName, List.of(STRING)); + FunctionSignature functionSignature = new FunctionSignature(functionName, List.of(STRING)); final List argumentNames = List.of(QUERY); - FunctionBuilder functionBuilder = (functionProperties, arguments) -> { - Boolean argumentsPassedByName = arguments.stream() - .noneMatch(arg -> StringUtils.isEmpty(((NamedArgumentExpression) arg).getArgName())); - Boolean argumentsPassedByPosition = arguments.stream() - .allMatch(arg -> StringUtils.isEmpty(((NamedArgumentExpression) arg).getArgName())); - if (!(argumentsPassedByName || argumentsPassedByPosition)) { - throw new SemanticCheckException("Arguments should be either passed by name or position"); - } + FunctionBuilder functionBuilder = + (functionProperties, arguments) -> { + Boolean argumentsPassedByName = + arguments.stream() + .noneMatch( + arg -> StringUtils.isEmpty(((NamedArgumentExpression) arg).getArgName())); + Boolean argumentsPassedByPosition = + arguments.stream() + .allMatch( + arg -> StringUtils.isEmpty(((NamedArgumentExpression) arg).getArgName())); + if (!(argumentsPassedByName || argumentsPassedByPosition)) { + throw new SemanticCheckException( + "Arguments should be either passed by name or position"); + } - if (arguments.size() != argumentNames.size()) { - throw new SemanticCheckException( - String.format("Missing arguments:[%s]", - String.join(",", argumentNames.subList(arguments.size(), argumentNames.size())))); - } + if (arguments.size() != argumentNames.size()) { + throw new SemanticCheckException( + String.format( + "Missing arguments:[%s]", + String.join( + ",", argumentNames.subList(arguments.size(), argumentNames.size())))); + } - if (argumentsPassedByPosition) { - List namedArguments = new ArrayList<>(); - for (int i = 0; i < arguments.size(); i++) { - namedArguments.add(new NamedArgumentExpression(argumentNames.get(i), - ((NamedArgumentExpression) arguments.get(i)).getValue())); - } - return new SparkSqlFunctionImplementation(functionName, namedArguments, sparkClient); - } - return new SparkSqlFunctionImplementation(functionName, arguments, sparkClient); - }; + if (argumentsPassedByPosition) { + List namedArguments = new ArrayList<>(); + for (int i = 0; i < arguments.size(); i++) { + namedArguments.add( + new NamedArgumentExpression( + argumentNames.get(i), + ((NamedArgumentExpression) arguments.get(i)).getValue())); + } + return new SparkSqlFunctionImplementation(functionName, namedArguments, sparkClient); + } + return new SparkSqlFunctionImplementation(functionName, arguments, sparkClient); + }; return Pair.of(functionSignature, functionBuilder); } diff --git a/spark/src/main/java/org/opensearch/sql/spark/functions/response/DefaultSparkSqlFunctionResponseHandle.java b/spark/src/main/java/org/opensearch/sql/spark/functions/response/DefaultSparkSqlFunctionResponseHandle.java index cb2b31ddc1..823ad2da29 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/functions/response/DefaultSparkSqlFunctionResponseHandle.java +++ b/spark/src/main/java/org/opensearch/sql/spark/functions/response/DefaultSparkSqlFunctionResponseHandle.java @@ -29,9 +29,7 @@ import org.opensearch.sql.data.type.ExprType; import org.opensearch.sql.executor.ExecutionEngine; -/** - * Default implementation of SparkSqlFunctionResponseHandle. - */ +/** Default implementation of SparkSqlFunctionResponseHandle. */ public class DefaultSparkSqlFunctionResponseHandle implements SparkSqlFunctionResponseHandle { private Iterator responseIterator; private ExecutionEngine.Schema schema; @@ -54,8 +52,8 @@ private void constructIteratorAndSchema(JSONObject responseObject) { logger.info("Spark Application ID: " + items.getString("applicationId")); columnList = getColumnList(items.getJSONArray("schema")); for (int i = 0; i < items.getJSONArray("result").length(); i++) { - JSONObject row = new JSONObject( - items.getJSONArray("result").get(i).toString().replace("'", "\"")); + JSONObject row = + new JSONObject(items.getJSONArray("result").get(i).toString().replace("'", "\"")); LinkedHashMap linkedHashMap = extractRow(row, columnList); result.add(new ExprTupleValue(linkedHashMap)); } @@ -85,8 +83,8 @@ private static LinkedHashMap extractRow( } else if (type == ExprCoreType.DATE) { linkedHashMap.put(column.getName(), new ExprDateValue(row.getString(column.getName()))); } else if (type == ExprCoreType.TIMESTAMP) { - linkedHashMap.put(column.getName(), - new ExprTimestampValue(row.getString(column.getName()))); + linkedHashMap.put( + column.getName(), new ExprTimestampValue(row.getString(column.getName()))); } else if (type == ExprCoreType.STRING) { linkedHashMap.put(column.getName(), new ExprStringValue(row.getString(column.getName()))); } else { @@ -101,10 +99,11 @@ private List getColumnList(JSONArray schema) { List columnList = new ArrayList<>(); for (int i = 0; i < schema.length(); i++) { JSONObject column = new JSONObject(schema.get(i).toString().replace("'", "\"")); - columnList.add(new ExecutionEngine.Schema.Column( - column.get("column_name").toString(), - column.get("column_name").toString(), - getDataType(column.get("data_type").toString()))); + columnList.add( + new ExecutionEngine.Schema.Column( + column.get("column_name").toString(), + column.get("column_name").toString(), + getDataType(column.get("data_type").toString()))); } return columnList; } diff --git a/spark/src/main/java/org/opensearch/sql/spark/functions/response/SparkSqlFunctionResponseHandle.java b/spark/src/main/java/org/opensearch/sql/spark/functions/response/SparkSqlFunctionResponseHandle.java index da68b591eb..a9be484712 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/functions/response/SparkSqlFunctionResponseHandle.java +++ b/spark/src/main/java/org/opensearch/sql/spark/functions/response/SparkSqlFunctionResponseHandle.java @@ -8,24 +8,18 @@ import org.opensearch.sql.data.model.ExprValue; import org.opensearch.sql.executor.ExecutionEngine; -/** - * Handle Spark response. - */ +/** Handle Spark response. */ public interface SparkSqlFunctionResponseHandle { - /** - * Return true if Spark response has more result. - */ + /** Return true if Spark response has more result. */ boolean hasNext(); /** - * Return Spark response as {@link ExprValue}. Attention, the method must been called when - * hasNext return true. + * Return Spark response as {@link ExprValue}. Attention, the method must been called when hasNext + * return true. */ ExprValue next(); - /** - * Return ExecutionEngine.Schema of the Spark response. - */ + /** Return ExecutionEngine.Schema of the Spark response. */ ExecutionEngine.Schema schema(); } diff --git a/spark/src/main/java/org/opensearch/sql/spark/functions/scan/SparkSqlFunctionTableScanBuilder.java b/spark/src/main/java/org/opensearch/sql/spark/functions/scan/SparkSqlFunctionTableScanBuilder.java index 28ce7dd19a..aea8f72f36 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/functions/scan/SparkSqlFunctionTableScanBuilder.java +++ b/spark/src/main/java/org/opensearch/sql/spark/functions/scan/SparkSqlFunctionTableScanBuilder.java @@ -12,9 +12,7 @@ import org.opensearch.sql.storage.TableScanOperator; import org.opensearch.sql.storage.read.TableScanBuilder; -/** - * TableScanBuilder for sql function of spark connector. - */ +/** TableScanBuilder for sql function of spark connector. */ @AllArgsConstructor public class SparkSqlFunctionTableScanBuilder extends TableScanBuilder { diff --git a/spark/src/main/java/org/opensearch/sql/spark/functions/scan/SparkSqlFunctionTableScanOperator.java b/spark/src/main/java/org/opensearch/sql/spark/functions/scan/SparkSqlFunctionTableScanOperator.java index 85e854e422..a2e44affd5 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/functions/scan/SparkSqlFunctionTableScanOperator.java +++ b/spark/src/main/java/org/opensearch/sql/spark/functions/scan/SparkSqlFunctionTableScanOperator.java @@ -21,9 +21,7 @@ import org.opensearch.sql.spark.request.SparkQueryRequest; import org.opensearch.sql.storage.TableScanOperator; -/** - * This a table scan operator to handle sql table function. - */ +/** This a table scan operator to handle sql table function. */ @RequiredArgsConstructor public class SparkSqlFunctionTableScanOperator extends TableScanOperator { private final SparkClient sparkClient; @@ -34,17 +32,19 @@ public class SparkSqlFunctionTableScanOperator extends TableScanOperator { @Override public void open() { super.open(); - this.sparkResponseHandle = AccessController.doPrivileged( - (PrivilegedAction) () -> { - try { - JSONObject responseObject = sparkClient.sql(request.getSql()); - return new DefaultSparkSqlFunctionResponseHandle(responseObject); - } catch (IOException e) { - LOG.error(e.getMessage()); - throw new RuntimeException( - String.format("Error fetching data from spark server: %s", e.getMessage())); - } - }); + this.sparkResponseHandle = + AccessController.doPrivileged( + (PrivilegedAction) + () -> { + try { + JSONObject responseObject = sparkClient.sql(request.getSql()); + return new DefaultSparkSqlFunctionResponseHandle(responseObject); + } catch (IOException e) { + LOG.error(e.getMessage()); + throw new RuntimeException( + String.format("Error fetching data from spark server: %s", e.getMessage())); + } + }); } @Override diff --git a/spark/src/main/java/org/opensearch/sql/spark/helper/FlintHelper.java b/spark/src/main/java/org/opensearch/sql/spark/helper/FlintHelper.java index b3c3c0871a..10d880187f 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/helper/FlintHelper.java +++ b/spark/src/main/java/org/opensearch/sql/spark/helper/FlintHelper.java @@ -15,25 +15,20 @@ import lombok.Getter; public class FlintHelper { - @Getter - private final String flintIntegrationJar; - @Getter - private final String flintHost; - @Getter - private final String flintPort; - @Getter - private final String flintScheme; - @Getter - private final String flintAuth; - @Getter - private final String flintRegion; + @Getter private final String flintIntegrationJar; + @Getter private final String flintHost; + @Getter private final String flintPort; + @Getter private final String flintScheme; + @Getter private final String flintAuth; + @Getter private final String flintRegion; - /** Arguments required to write data to opensearch index using flint integration. + /** + * Arguments required to write data to opensearch index using flint integration. * - * @param flintHost Opensearch host for flint - * @param flintPort Opensearch port for flint integration + * @param flintHost Opensearch host for flint + * @param flintPort Opensearch port for flint integration * @param flintScheme Opensearch scheme for flint integration - * @param flintAuth Opensearch auth for flint integration + * @param flintAuth Opensearch auth for flint integration * @param flintRegion Opensearch region for flint integration */ public FlintHelper( diff --git a/spark/src/main/java/org/opensearch/sql/spark/request/SparkQueryRequest.java b/spark/src/main/java/org/opensearch/sql/spark/request/SparkQueryRequest.java index bc0944a784..94c9795161 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/request/SparkQueryRequest.java +++ b/spark/src/main/java/org/opensearch/sql/spark/request/SparkQueryRequest.java @@ -7,15 +7,10 @@ import lombok.Data; -/** - * Spark query request. - */ +/** Spark query request. */ @Data public class SparkQueryRequest { - /** - * SQL. - */ + /** SQL. */ private String sql; - } diff --git a/spark/src/main/java/org/opensearch/sql/spark/response/SparkResponse.java b/spark/src/main/java/org/opensearch/sql/spark/response/SparkResponse.java index e010989019..3edb541384 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/response/SparkResponse.java +++ b/spark/src/main/java/org/opensearch/sql/spark/response/SparkResponse.java @@ -9,8 +9,6 @@ import com.google.common.annotations.VisibleForTesting; import lombok.Data; -import lombok.NoArgsConstructor; -import lombok.Setter; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.json.JSONObject; @@ -26,7 +24,6 @@ import org.opensearch.index.query.QueryBuilders; import org.opensearch.search.SearchHit; import org.opensearch.search.builder.SearchSourceBuilder; -import org.opensearch.sql.datasources.exceptions.DataSourceNotFoundException; @Data public class SparkResponse { @@ -39,8 +36,8 @@ public class SparkResponse { * Response for spark sql query. * * @param client Opensearch client - * @param value Identifier field value - * @param field Identifier field name + * @param value Identifier field value + * @param field Identifier field name */ public SparkResponse(Client client, String value, String field) { this.client = client; @@ -67,8 +64,10 @@ private JSONObject searchInSparkIndex(QueryBuilder query) { SearchResponse searchResponse = searchResponseActionFuture.actionGet(); if (searchResponse.status().getStatus() != 200) { throw new RuntimeException( - "Fetching result from " + SPARK_INDEX_NAME + " index failed with status : " - + searchResponse.status()); + "Fetching result from " + + SPARK_INDEX_NAME + + " index failed with status : " + + searchResponse.status()); } else { JSONObject data = new JSONObject(); for (SearchHit searchHit : searchResponse.getHits().getHits()) { @@ -93,11 +92,11 @@ void deleteInSparkIndex(String id) { if (deleteResponse.getResult().equals(DocWriteResponse.Result.DELETED)) { LOG.debug("Spark result successfully deleted ", id); } else if (deleteResponse.getResult().equals(DocWriteResponse.Result.NOT_FOUND)) { - throw new ResourceNotFoundException("Spark result with id " - + id + " doesn't exist"); + throw new ResourceNotFoundException("Spark result with id " + id + " doesn't exist"); } else { - throw new RuntimeException("Deleting spark result information failed with : " - + deleteResponse.getResult().getLowercase()); + throw new RuntimeException( + "Deleting spark result information failed with : " + + deleteResponse.getResult().getLowercase()); } } } diff --git a/spark/src/main/java/org/opensearch/sql/spark/storage/SparkScan.java b/spark/src/main/java/org/opensearch/sql/spark/storage/SparkScan.java index 3897e8690e..395e1685a6 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/storage/SparkScan.java +++ b/spark/src/main/java/org/opensearch/sql/spark/storage/SparkScan.java @@ -14,21 +14,14 @@ import org.opensearch.sql.spark.request.SparkQueryRequest; import org.opensearch.sql.storage.TableScanOperator; -/** - * Spark scan operator. - */ +/** Spark scan operator. */ @EqualsAndHashCode(onlyExplicitlyIncluded = true, callSuper = false) @ToString(onlyExplicitlyIncluded = true) public class SparkScan extends TableScanOperator { private final SparkClient sparkClient; - @EqualsAndHashCode.Include - @Getter - @Setter - @ToString.Include - private SparkQueryRequest request; - + @EqualsAndHashCode.Include @Getter @Setter @ToString.Include private SparkQueryRequest request; /** * Constructor. @@ -54,5 +47,4 @@ public ExprValue next() { public String explain() { return getRequest().toString(); } - } diff --git a/spark/src/main/java/org/opensearch/sql/spark/storage/SparkStorageEngine.java b/spark/src/main/java/org/opensearch/sql/spark/storage/SparkStorageEngine.java index a5e35ecc4c..84c9c05e79 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/storage/SparkStorageEngine.java +++ b/spark/src/main/java/org/opensearch/sql/spark/storage/SparkStorageEngine.java @@ -15,17 +15,14 @@ import org.opensearch.sql.storage.StorageEngine; import org.opensearch.sql.storage.Table; -/** - * Spark storage engine implementation. - */ +/** Spark storage engine implementation. */ @RequiredArgsConstructor public class SparkStorageEngine implements StorageEngine { private final SparkClient sparkClient; @Override public Collection getFunctions() { - return Collections.singletonList( - new SparkSqlTableFunctionResolver(sparkClient)); + return Collections.singletonList(new SparkSqlTableFunctionResolver(sparkClient)); } @Override diff --git a/spark/src/main/java/org/opensearch/sql/spark/storage/SparkStorageFactory.java b/spark/src/main/java/org/opensearch/sql/spark/storage/SparkStorageFactory.java index 937679b50e..467bacbaea 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/storage/SparkStorageFactory.java +++ b/spark/src/main/java/org/opensearch/sql/spark/storage/SparkStorageFactory.java @@ -30,9 +30,7 @@ import org.opensearch.sql.storage.DataSourceFactory; import org.opensearch.sql.storage.StorageEngine; -/** - * Storage factory implementation for spark connector. - */ +/** Storage factory implementation for spark connector. */ @RequiredArgsConstructor public class SparkStorageFactory implements DataSourceFactory { private final Client client; @@ -66,9 +64,7 @@ public DataSourceType getDataSourceType() { @Override public DataSource createDataSource(DataSourceMetadata metadata) { return new DataSource( - metadata.getName(), - DataSourceType.SPARK, - getStorageEngine(metadata.getProperties())); + metadata.getName(), DataSourceType.SPARK, getStorageEngine(metadata.getProperties())); } /** @@ -81,24 +77,26 @@ StorageEngine getStorageEngine(Map requiredConfig) { SparkClient sparkClient; if (requiredConfig.get(CONNECTOR_TYPE).equals(EMR)) { sparkClient = - AccessController.doPrivileged((PrivilegedAction) () -> { - validateEMRConfigProperties(requiredConfig); - return new EmrClientImpl( - getEMRClient( - requiredConfig.get(EMR_ACCESS_KEY), - requiredConfig.get(EMR_SECRET_KEY), - requiredConfig.get(EMR_REGION)), - requiredConfig.get(EMR_CLUSTER), - new FlintHelper( - requiredConfig.get(FLINT_INTEGRATION), - requiredConfig.get(FLINT_HOST), - requiredConfig.get(FLINT_PORT), - requiredConfig.get(FLINT_SCHEME), - requiredConfig.get(FLINT_AUTH), - requiredConfig.get(FLINT_REGION)), - new SparkResponse(client, null, STEP_ID_FIELD), - requiredConfig.get(SPARK_SQL_APPLICATION)); - }); + AccessController.doPrivileged( + (PrivilegedAction) + () -> { + validateEMRConfigProperties(requiredConfig); + return new EmrClientImpl( + getEMRClient( + requiredConfig.get(EMR_ACCESS_KEY), + requiredConfig.get(EMR_SECRET_KEY), + requiredConfig.get(EMR_REGION)), + requiredConfig.get(EMR_CLUSTER), + new FlintHelper( + requiredConfig.get(FLINT_INTEGRATION), + requiredConfig.get(FLINT_HOST), + requiredConfig.get(FLINT_PORT), + requiredConfig.get(FLINT_SCHEME), + requiredConfig.get(FLINT_AUTH), + requiredConfig.get(FLINT_REGION)), + new SparkResponse(client, null, STEP_ID_FIELD), + requiredConfig.get(SPARK_SQL_APPLICATION)); + }); } else { throw new InvalidParameterException("Spark connector type is invalid."); } @@ -110,12 +108,14 @@ private void validateEMRConfigProperties(Map dataSourceMetadataC if (dataSourceMetadataConfig.get(EMR_CLUSTER) == null || dataSourceMetadataConfig.get(EMR_AUTH_TYPE) == null) { throw new IllegalArgumentException("EMR config properties are missing."); - } else if (dataSourceMetadataConfig.get(EMR_AUTH_TYPE) - .equals(AuthenticationType.AWSSIGV4AUTH.getName()) + } else if (dataSourceMetadataConfig + .get(EMR_AUTH_TYPE) + .equals(AuthenticationType.AWSSIGV4AUTH.getName()) && (dataSourceMetadataConfig.get(EMR_ACCESS_KEY) == null - || dataSourceMetadataConfig.get(EMR_SECRET_KEY) == null)) { + || dataSourceMetadataConfig.get(EMR_SECRET_KEY) == null)) { throw new IllegalArgumentException("EMR auth keys are missing."); - } else if (!dataSourceMetadataConfig.get(EMR_AUTH_TYPE) + } else if (!dataSourceMetadataConfig + .get(EMR_AUTH_TYPE) .equals(AuthenticationType.AWSSIGV4AUTH.getName())) { throw new IllegalArgumentException("Invalid auth type."); } @@ -124,8 +124,8 @@ private void validateEMRConfigProperties(Map dataSourceMetadataC private AmazonElasticMapReduce getEMRClient( String emrAccessKey, String emrSecretKey, String emrRegion) { return AmazonElasticMapReduceClientBuilder.standard() - .withCredentials(new AWSStaticCredentialsProvider( - new BasicAWSCredentials(emrAccessKey, emrSecretKey))) + .withCredentials( + new AWSStaticCredentialsProvider(new BasicAWSCredentials(emrAccessKey, emrSecretKey))) .withRegion(emrRegion) .build(); } diff --git a/spark/src/main/java/org/opensearch/sql/spark/storage/SparkTable.java b/spark/src/main/java/org/opensearch/sql/spark/storage/SparkTable.java index 5151405db9..731c3df672 100644 --- a/spark/src/main/java/org/opensearch/sql/spark/storage/SparkTable.java +++ b/spark/src/main/java/org/opensearch/sql/spark/storage/SparkTable.java @@ -18,20 +18,14 @@ import org.opensearch.sql.storage.Table; import org.opensearch.sql.storage.read.TableScanBuilder; -/** - * Spark table implementation. - * This can be constructed from SparkQueryRequest. - */ +/** Spark table implementation. This can be constructed from SparkQueryRequest. */ public class SparkTable implements Table { private final SparkClient sparkClient; - @Getter - private final SparkQueryRequest sparkQueryRequest; + @Getter private final SparkQueryRequest sparkQueryRequest; - /** - * Constructor for entire Sql Request. - */ + /** Constructor for entire Sql Request. */ public SparkTable(SparkClient sparkService, SparkQueryRequest sparkQueryRequest) { this.sparkClient = sparkService; this.sparkQueryRequest = sparkQueryRequest; @@ -56,8 +50,7 @@ public Map getFieldTypes() { @Override public PhysicalPlan implement(LogicalPlan plan) { - SparkScan metricScan = - new SparkScan(sparkClient); + SparkScan metricScan = new SparkScan(sparkClient); metricScan.setRequest(sparkQueryRequest); return plan.accept(new DefaultImplementor(), metricScan); } diff --git a/spark/src/test/java/org/opensearch/sql/spark/client/EmrClientImplTest.java b/spark/src/test/java/org/opensearch/sql/spark/client/EmrClientImplTest.java index a94ac01f2f..93dc0d6bc8 100644 --- a/spark/src/test/java/org/opensearch/sql/spark/client/EmrClientImplTest.java +++ b/spark/src/test/java/org/opensearch/sql/spark/client/EmrClientImplTest.java @@ -29,12 +29,9 @@ @ExtendWith(MockitoExtension.class) public class EmrClientImplTest { - @Mock - private AmazonElasticMapReduce emr; - @Mock - private FlintHelper flint; - @Mock - private SparkResponse sparkResponse; + @Mock private AmazonElasticMapReduce emr; + @Mock private FlintHelper flint; + @Mock private SparkResponse sparkResponse; @Test @SneakyThrows @@ -50,8 +47,8 @@ void testRunEmrApplication() { describeStepResult.setStep(step); when(emr.describeStep(any())).thenReturn(describeStepResult); - EmrClientImpl emrClientImpl = new EmrClientImpl( - emr, EMR_CLUSTER_ID, flint, sparkResponse, null); + EmrClientImpl emrClientImpl = + new EmrClientImpl(emr, EMR_CLUSTER_ID, flint, sparkResponse, null); emrClientImpl.runEmrApplication(QUERY); } @@ -69,12 +66,12 @@ void testRunEmrApplicationFailed() { describeStepResult.setStep(step); when(emr.describeStep(any())).thenReturn(describeStepResult); - EmrClientImpl emrClientImpl = new EmrClientImpl( - emr, EMR_CLUSTER_ID, flint, sparkResponse, null); - RuntimeException exception = Assertions.assertThrows(RuntimeException.class, - () -> emrClientImpl.runEmrApplication(QUERY)); - Assertions.assertEquals("Spark SQL application failed.", - exception.getMessage()); + EmrClientImpl emrClientImpl = + new EmrClientImpl(emr, EMR_CLUSTER_ID, flint, sparkResponse, null); + RuntimeException exception = + Assertions.assertThrows( + RuntimeException.class, () -> emrClientImpl.runEmrApplication(QUERY)); + Assertions.assertEquals("Spark SQL application failed.", exception.getMessage()); } @Test @@ -91,12 +88,12 @@ void testRunEmrApplicationCancelled() { describeStepResult.setStep(step); when(emr.describeStep(any())).thenReturn(describeStepResult); - EmrClientImpl emrClientImpl = new EmrClientImpl( - emr, EMR_CLUSTER_ID, flint, sparkResponse, null); - RuntimeException exception = Assertions.assertThrows(RuntimeException.class, - () -> emrClientImpl.runEmrApplication(QUERY)); - Assertions.assertEquals("Spark SQL application failed.", - exception.getMessage()); + EmrClientImpl emrClientImpl = + new EmrClientImpl(emr, EMR_CLUSTER_ID, flint, sparkResponse, null); + RuntimeException exception = + Assertions.assertThrows( + RuntimeException.class, () -> emrClientImpl.runEmrApplication(QUERY)); + Assertions.assertEquals("Spark SQL application failed.", exception.getMessage()); } @Test @@ -119,11 +116,12 @@ void testRunEmrApplicationRunnning() { DescribeStepResult completedDescribeStepResult = new DescribeStepResult(); completedDescribeStepResult.setStep(completedStep); - when(emr.describeStep(any())).thenReturn(runningDescribeStepResult) + when(emr.describeStep(any())) + .thenReturn(runningDescribeStepResult) .thenReturn(completedDescribeStepResult); - EmrClientImpl emrClientImpl = new EmrClientImpl( - emr, EMR_CLUSTER_ID, flint, sparkResponse, null); + EmrClientImpl emrClientImpl = + new EmrClientImpl(emr, EMR_CLUSTER_ID, flint, sparkResponse, null); emrClientImpl.runEmrApplication(QUERY); } @@ -147,14 +145,14 @@ void testSql() { DescribeStepResult completedDescribeStepResult = new DescribeStepResult(); completedDescribeStepResult.setStep(completedStep); - when(emr.describeStep(any())).thenReturn(runningDescribeStepResult) + when(emr.describeStep(any())) + .thenReturn(runningDescribeStepResult) .thenReturn(completedDescribeStepResult); when(sparkResponse.getResultFromOpensearchIndex()) .thenReturn(new JSONObject(getJson("select_query_response.json"))); - EmrClientImpl emrClientImpl = new EmrClientImpl( - emr, EMR_CLUSTER_ID, flint, sparkResponse, null); + EmrClientImpl emrClientImpl = + new EmrClientImpl(emr, EMR_CLUSTER_ID, flint, sparkResponse, null); emrClientImpl.sql(QUERY); - } } diff --git a/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlFunctionImplementationTest.java b/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlFunctionImplementationTest.java index 18db5b9471..120747e0d3 100644 --- a/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlFunctionImplementationTest.java +++ b/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlFunctionImplementationTest.java @@ -27,51 +27,52 @@ @ExtendWith(MockitoExtension.class) public class SparkSqlFunctionImplementationTest { - @Mock - private SparkClient client; + @Mock private SparkClient client; @Test void testValueOfAndTypeToString() { FunctionName functionName = new FunctionName("sql"); - List namedArgumentExpressionList - = List.of(DSL.namedArgument("query", DSL.literal(QUERY))); - SparkSqlFunctionImplementation sparkSqlFunctionImplementation - = new SparkSqlFunctionImplementation(functionName, namedArgumentExpressionList, client); - UnsupportedOperationException exception = assertThrows(UnsupportedOperationException.class, - () -> sparkSqlFunctionImplementation.valueOf()); - assertEquals("Spark defined function [sql] is only " - + "supported in SOURCE clause with spark connector catalog", exception.getMessage()); - assertEquals("sql(query=\"select 1\")", - sparkSqlFunctionImplementation.toString()); + List namedArgumentExpressionList = + List.of(DSL.namedArgument("query", DSL.literal(QUERY))); + SparkSqlFunctionImplementation sparkSqlFunctionImplementation = + new SparkSqlFunctionImplementation(functionName, namedArgumentExpressionList, client); + UnsupportedOperationException exception = + assertThrows( + UnsupportedOperationException.class, () -> sparkSqlFunctionImplementation.valueOf()); + assertEquals( + "Spark defined function [sql] is only " + + "supported in SOURCE clause with spark connector catalog", + exception.getMessage()); + assertEquals("sql(query=\"select 1\")", sparkSqlFunctionImplementation.toString()); assertEquals(ExprCoreType.STRUCT, sparkSqlFunctionImplementation.type()); } @Test void testApplyArguments() { FunctionName functionName = new FunctionName("sql"); - List namedArgumentExpressionList - = List.of(DSL.namedArgument("query", DSL.literal(QUERY))); - SparkSqlFunctionImplementation sparkSqlFunctionImplementation - = new SparkSqlFunctionImplementation(functionName, namedArgumentExpressionList, client); - SparkTable sparkTable - = (SparkTable) sparkSqlFunctionImplementation.applyArguments(); + List namedArgumentExpressionList = + List.of(DSL.namedArgument("query", DSL.literal(QUERY))); + SparkSqlFunctionImplementation sparkSqlFunctionImplementation = + new SparkSqlFunctionImplementation(functionName, namedArgumentExpressionList, client); + SparkTable sparkTable = (SparkTable) sparkSqlFunctionImplementation.applyArguments(); assertNotNull(sparkTable.getSparkQueryRequest()); - SparkQueryRequest sparkQueryRequest - = sparkTable.getSparkQueryRequest(); + SparkQueryRequest sparkQueryRequest = sparkTable.getSparkQueryRequest(); assertEquals(QUERY, sparkQueryRequest.getSql()); } @Test void testApplyArgumentsException() { FunctionName functionName = new FunctionName("sql"); - List namedArgumentExpressionList - = List.of(DSL.namedArgument("query", DSL.literal(QUERY)), - DSL.namedArgument("tmp", DSL.literal(12345))); - SparkSqlFunctionImplementation sparkSqlFunctionImplementation - = new SparkSqlFunctionImplementation(functionName, namedArgumentExpressionList, client); - ExpressionEvaluationException exception = assertThrows(ExpressionEvaluationException.class, - () -> sparkSqlFunctionImplementation.applyArguments()); + List namedArgumentExpressionList = + List.of( + DSL.namedArgument("query", DSL.literal(QUERY)), + DSL.namedArgument("tmp", DSL.literal(12345))); + SparkSqlFunctionImplementation sparkSqlFunctionImplementation = + new SparkSqlFunctionImplementation(functionName, namedArgumentExpressionList, client); + ExpressionEvaluationException exception = + assertThrows( + ExpressionEvaluationException.class, + () -> sparkSqlFunctionImplementation.applyArguments()); assertEquals("Invalid Function Argument:tmp", exception.getMessage()); } - } diff --git a/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlFunctionTableScanBuilderTest.java b/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlFunctionTableScanBuilderTest.java index 94c87602b7..212056eb15 100644 --- a/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlFunctionTableScanBuilderTest.java +++ b/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlFunctionTableScanBuilderTest.java @@ -18,23 +18,20 @@ import org.opensearch.sql.storage.TableScanOperator; public class SparkSqlFunctionTableScanBuilderTest { - @Mock - private SparkClient sparkClient; + @Mock private SparkClient sparkClient; - @Mock - private LogicalProject logicalProject; + @Mock private LogicalProject logicalProject; @Test void testBuild() { SparkQueryRequest sparkQueryRequest = new SparkQueryRequest(); sparkQueryRequest.setSql(QUERY); - SparkSqlFunctionTableScanBuilder sparkSqlFunctionTableScanBuilder - = new SparkSqlFunctionTableScanBuilder(sparkClient, sparkQueryRequest); - TableScanOperator sqlFunctionTableScanOperator - = sparkSqlFunctionTableScanBuilder.build(); - Assertions.assertTrue(sqlFunctionTableScanOperator - instanceof SparkSqlFunctionTableScanOperator); + SparkSqlFunctionTableScanBuilder sparkSqlFunctionTableScanBuilder = + new SparkSqlFunctionTableScanBuilder(sparkClient, sparkQueryRequest); + TableScanOperator sqlFunctionTableScanOperator = sparkSqlFunctionTableScanBuilder.build(); + Assertions.assertTrue( + sqlFunctionTableScanOperator instanceof SparkSqlFunctionTableScanOperator); } @Test @@ -42,8 +39,8 @@ void testPushProject() { SparkQueryRequest sparkQueryRequest = new SparkQueryRequest(); sparkQueryRequest.setSql(QUERY); - SparkSqlFunctionTableScanBuilder sparkSqlFunctionTableScanBuilder - = new SparkSqlFunctionTableScanBuilder(sparkClient, sparkQueryRequest); + SparkSqlFunctionTableScanBuilder sparkSqlFunctionTableScanBuilder = + new SparkSqlFunctionTableScanBuilder(sparkClient, sparkQueryRequest); Assertions.assertTrue(sparkSqlFunctionTableScanBuilder.pushDownProject(logicalProject)); } } diff --git a/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlFunctionTableScanOperatorTest.java b/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlFunctionTableScanOperatorTest.java index f6807f9913..586f0ef2d8 100644 --- a/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlFunctionTableScanOperatorTest.java +++ b/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlFunctionTableScanOperatorTest.java @@ -43,8 +43,7 @@ @ExtendWith(MockitoExtension.class) public class SparkSqlFunctionTableScanOperatorTest { - @Mock - private SparkClient sparkClient; + @Mock private SparkClient sparkClient; @Test @SneakyThrows @@ -52,15 +51,14 @@ void testEmptyQueryWithException() { SparkQueryRequest sparkQueryRequest = new SparkQueryRequest(); sparkQueryRequest.setSql(QUERY); - SparkSqlFunctionTableScanOperator sparkSqlFunctionTableScanOperator - = new SparkSqlFunctionTableScanOperator(sparkClient, sparkQueryRequest); + SparkSqlFunctionTableScanOperator sparkSqlFunctionTableScanOperator = + new SparkSqlFunctionTableScanOperator(sparkClient, sparkQueryRequest); - when(sparkClient.sql(any())) - .thenThrow(new IOException("Error Message")); - RuntimeException runtimeException - = assertThrows(RuntimeException.class, sparkSqlFunctionTableScanOperator::open); - assertEquals("Error fetching data from spark server: Error Message", - runtimeException.getMessage()); + when(sparkClient.sql(any())).thenThrow(new IOException("Error Message")); + RuntimeException runtimeException = + assertThrows(RuntimeException.class, sparkSqlFunctionTableScanOperator::open); + assertEquals( + "Error fetching data from spark server: Error Message", runtimeException.getMessage()); } @Test @@ -69,8 +67,8 @@ void testClose() { SparkQueryRequest sparkQueryRequest = new SparkQueryRequest(); sparkQueryRequest.setSql(QUERY); - SparkSqlFunctionTableScanOperator sparkSqlFunctionTableScanOperator - = new SparkSqlFunctionTableScanOperator(sparkClient, sparkQueryRequest); + SparkSqlFunctionTableScanOperator sparkSqlFunctionTableScanOperator = + new SparkSqlFunctionTableScanOperator(sparkClient, sparkQueryRequest); sparkSqlFunctionTableScanOperator.close(); } @@ -80,11 +78,10 @@ void testExplain() { SparkQueryRequest sparkQueryRequest = new SparkQueryRequest(); sparkQueryRequest.setSql(QUERY); - SparkSqlFunctionTableScanOperator sparkSqlFunctionTableScanOperator - = new SparkSqlFunctionTableScanOperator(sparkClient, sparkQueryRequest); + SparkSqlFunctionTableScanOperator sparkSqlFunctionTableScanOperator = + new SparkSqlFunctionTableScanOperator(sparkClient, sparkQueryRequest); - Assertions.assertEquals("sql(select 1)", - sparkSqlFunctionTableScanOperator.explain()); + Assertions.assertEquals("sql(select 1)", sparkSqlFunctionTableScanOperator.explain()); } @Test @@ -93,18 +90,19 @@ void testQueryResponseIterator() { SparkQueryRequest sparkQueryRequest = new SparkQueryRequest(); sparkQueryRequest.setSql(QUERY); - SparkSqlFunctionTableScanOperator sparkSqlFunctionTableScanOperator - = new SparkSqlFunctionTableScanOperator(sparkClient, sparkQueryRequest); + SparkSqlFunctionTableScanOperator sparkSqlFunctionTableScanOperator = + new SparkSqlFunctionTableScanOperator(sparkClient, sparkQueryRequest); - when(sparkClient.sql(any())) - .thenReturn(new JSONObject(getJson("select_query_response.json"))); + when(sparkClient.sql(any())).thenReturn(new JSONObject(getJson("select_query_response.json"))); sparkSqlFunctionTableScanOperator.open(); assertTrue(sparkSqlFunctionTableScanOperator.hasNext()); - ExprTupleValue firstRow = new ExprTupleValue(new LinkedHashMap<>() { - { - put("1", new ExprIntegerValue(1)); - } - }); + ExprTupleValue firstRow = + new ExprTupleValue( + new LinkedHashMap<>() { + { + put("1", new ExprIntegerValue(1)); + } + }); assertEquals(firstRow, sparkSqlFunctionTableScanOperator.next()); Assertions.assertFalse(sparkSqlFunctionTableScanOperator.hasNext()); } @@ -115,28 +113,29 @@ void testQueryResponseAllTypes() { SparkQueryRequest sparkQueryRequest = new SparkQueryRequest(); sparkQueryRequest.setSql(QUERY); - SparkSqlFunctionTableScanOperator sparkSqlFunctionTableScanOperator - = new SparkSqlFunctionTableScanOperator(sparkClient, sparkQueryRequest); + SparkSqlFunctionTableScanOperator sparkSqlFunctionTableScanOperator = + new SparkSqlFunctionTableScanOperator(sparkClient, sparkQueryRequest); - when(sparkClient.sql(any())) - .thenReturn(new JSONObject(getJson("all_data_type.json"))); + when(sparkClient.sql(any())).thenReturn(new JSONObject(getJson("all_data_type.json"))); sparkSqlFunctionTableScanOperator.open(); assertTrue(sparkSqlFunctionTableScanOperator.hasNext()); - ExprTupleValue firstRow = new ExprTupleValue(new LinkedHashMap<>() { - { - put("boolean", ExprBooleanValue.of(true)); - put("long", new ExprLongValue(922337203)); - put("integer", new ExprIntegerValue(2147483647)); - put("short", new ExprShortValue(32767)); - put("byte", new ExprByteValue(127)); - put("double", new ExprDoubleValue(9223372036854.775807)); - put("float", new ExprFloatValue(21474.83647)); - put("timestamp", new ExprDateValue("2023-07-01 10:31:30")); - put("date", new ExprTimestampValue("2023-07-01 10:31:30")); - put("string", new ExprStringValue("ABC")); - put("char", new ExprStringValue("A")); - } - }); + ExprTupleValue firstRow = + new ExprTupleValue( + new LinkedHashMap<>() { + { + put("boolean", ExprBooleanValue.of(true)); + put("long", new ExprLongValue(922337203)); + put("integer", new ExprIntegerValue(2147483647)); + put("short", new ExprShortValue(32767)); + put("byte", new ExprByteValue(127)); + put("double", new ExprDoubleValue(9223372036854.775807)); + put("float", new ExprFloatValue(21474.83647)); + put("timestamp", new ExprDateValue("2023-07-01 10:31:30")); + put("date", new ExprTimestampValue("2023-07-01 10:31:30")); + put("string", new ExprStringValue("ABC")); + put("char", new ExprStringValue("A")); + } + }); assertEquals(firstRow, sparkSqlFunctionTableScanOperator.next()); Assertions.assertFalse(sparkSqlFunctionTableScanOperator.hasNext()); } @@ -147,16 +146,15 @@ void testQueryResponseInvalidDataType() { SparkQueryRequest sparkQueryRequest = new SparkQueryRequest(); sparkQueryRequest.setSql(QUERY); - SparkSqlFunctionTableScanOperator sparkSqlFunctionTableScanOperator - = new SparkSqlFunctionTableScanOperator(sparkClient, sparkQueryRequest); + SparkSqlFunctionTableScanOperator sparkSqlFunctionTableScanOperator = + new SparkSqlFunctionTableScanOperator(sparkClient, sparkQueryRequest); - when(sparkClient.sql(any())) - .thenReturn(new JSONObject(getJson("invalid_data_type.json"))); + when(sparkClient.sql(any())).thenReturn(new JSONObject(getJson("invalid_data_type.json"))); - RuntimeException exception = Assertions.assertThrows(RuntimeException.class, - () -> sparkSqlFunctionTableScanOperator.open()); - Assertions.assertEquals("Result contains invalid data type", - exception.getMessage()); + RuntimeException exception = + Assertions.assertThrows( + RuntimeException.class, () -> sparkSqlFunctionTableScanOperator.open()); + Assertions.assertEquals("Result contains invalid data type", exception.getMessage()); } @Test @@ -165,17 +163,14 @@ void testQuerySchema() { SparkQueryRequest sparkQueryRequest = new SparkQueryRequest(); sparkQueryRequest.setSql(QUERY); - SparkSqlFunctionTableScanOperator sparkSqlFunctionTableScanOperator - = new SparkSqlFunctionTableScanOperator(sparkClient, sparkQueryRequest); + SparkSqlFunctionTableScanOperator sparkSqlFunctionTableScanOperator = + new SparkSqlFunctionTableScanOperator(sparkClient, sparkQueryRequest); - when(sparkClient.sql(any())) - .thenReturn( - new JSONObject(getJson("select_query_response.json"))); + when(sparkClient.sql(any())).thenReturn(new JSONObject(getJson("select_query_response.json"))); sparkSqlFunctionTableScanOperator.open(); ArrayList columns = new ArrayList<>(); columns.add(new ExecutionEngine.Schema.Column("1", "1", ExprCoreType.INTEGER)); ExecutionEngine.Schema expectedSchema = new ExecutionEngine.Schema(columns); assertEquals(expectedSchema, sparkSqlFunctionTableScanOperator.schema()); } - } diff --git a/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlTableFunctionResolverTest.java b/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlTableFunctionResolverTest.java index e18fac36de..a828ac76c4 100644 --- a/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlTableFunctionResolverTest.java +++ b/spark/src/test/java/org/opensearch/sql/spark/functions/SparkSqlTableFunctionResolverTest.java @@ -35,107 +35,106 @@ @ExtendWith(MockitoExtension.class) public class SparkSqlTableFunctionResolverTest { - @Mock - private SparkClient client; + @Mock private SparkClient client; - @Mock - private FunctionProperties functionProperties; + @Mock private FunctionProperties functionProperties; @Test void testResolve() { - SparkSqlTableFunctionResolver sqlTableFunctionResolver - = new SparkSqlTableFunctionResolver(client); + SparkSqlTableFunctionResolver sqlTableFunctionResolver = + new SparkSqlTableFunctionResolver(client); FunctionName functionName = FunctionName.of("sql"); - List expressions - = List.of(DSL.namedArgument("query", DSL.literal(QUERY))); - FunctionSignature functionSignature = new FunctionSignature(functionName, expressions - .stream().map(Expression::type).collect(Collectors.toList())); - Pair resolution - = sqlTableFunctionResolver.resolve(functionSignature); + List expressions = List.of(DSL.namedArgument("query", DSL.literal(QUERY))); + FunctionSignature functionSignature = + new FunctionSignature( + functionName, expressions.stream().map(Expression::type).collect(Collectors.toList())); + Pair resolution = + sqlTableFunctionResolver.resolve(functionSignature); assertEquals(functionName, resolution.getKey().getFunctionName()); assertEquals(functionName, sqlTableFunctionResolver.getFunctionName()); assertEquals(List.of(STRING), resolution.getKey().getParamTypeList()); FunctionBuilder functionBuilder = resolution.getValue(); - TableFunctionImplementation functionImplementation - = (TableFunctionImplementation) functionBuilder.apply(functionProperties, expressions); + TableFunctionImplementation functionImplementation = + (TableFunctionImplementation) functionBuilder.apply(functionProperties, expressions); assertTrue(functionImplementation instanceof SparkSqlFunctionImplementation); - SparkTable sparkTable - = (SparkTable) functionImplementation.applyArguments(); + SparkTable sparkTable = (SparkTable) functionImplementation.applyArguments(); assertNotNull(sparkTable.getSparkQueryRequest()); - SparkQueryRequest sparkQueryRequest = - sparkTable.getSparkQueryRequest(); + SparkQueryRequest sparkQueryRequest = sparkTable.getSparkQueryRequest(); assertEquals(QUERY, sparkQueryRequest.getSql()); } @Test void testArgumentsPassedByPosition() { - SparkSqlTableFunctionResolver sqlTableFunctionResolver - = new SparkSqlTableFunctionResolver(client); + SparkSqlTableFunctionResolver sqlTableFunctionResolver = + new SparkSqlTableFunctionResolver(client); FunctionName functionName = FunctionName.of("sql"); - List expressions - = List.of(DSL.namedArgument(null, DSL.literal(QUERY))); - FunctionSignature functionSignature = new FunctionSignature(functionName, expressions - .stream().map(Expression::type).collect(Collectors.toList())); + List expressions = List.of(DSL.namedArgument(null, DSL.literal(QUERY))); + FunctionSignature functionSignature = + new FunctionSignature( + functionName, expressions.stream().map(Expression::type).collect(Collectors.toList())); - Pair resolution - = sqlTableFunctionResolver.resolve(functionSignature); + Pair resolution = + sqlTableFunctionResolver.resolve(functionSignature); assertEquals(functionName, resolution.getKey().getFunctionName()); assertEquals(functionName, sqlTableFunctionResolver.getFunctionName()); assertEquals(List.of(STRING), resolution.getKey().getParamTypeList()); FunctionBuilder functionBuilder = resolution.getValue(); - TableFunctionImplementation functionImplementation - = (TableFunctionImplementation) functionBuilder.apply(functionProperties, expressions); + TableFunctionImplementation functionImplementation = + (TableFunctionImplementation) functionBuilder.apply(functionProperties, expressions); assertTrue(functionImplementation instanceof SparkSqlFunctionImplementation); - SparkTable sparkTable - = (SparkTable) functionImplementation.applyArguments(); + SparkTable sparkTable = (SparkTable) functionImplementation.applyArguments(); assertNotNull(sparkTable.getSparkQueryRequest()); - SparkQueryRequest sparkQueryRequest = - sparkTable.getSparkQueryRequest(); + SparkQueryRequest sparkQueryRequest = sparkTable.getSparkQueryRequest(); assertEquals(QUERY, sparkQueryRequest.getSql()); } @Test void testMixedArgumentTypes() { - SparkSqlTableFunctionResolver sqlTableFunctionResolver - = new SparkSqlTableFunctionResolver(client); + SparkSqlTableFunctionResolver sqlTableFunctionResolver = + new SparkSqlTableFunctionResolver(client); FunctionName functionName = FunctionName.of("sql"); - List expressions - = List.of(DSL.namedArgument("query", DSL.literal(QUERY)), - DSL.namedArgument(null, DSL.literal(12345))); - FunctionSignature functionSignature = new FunctionSignature(functionName, expressions - .stream().map(Expression::type).collect(Collectors.toList())); - Pair resolution - = sqlTableFunctionResolver.resolve(functionSignature); + List expressions = + List.of( + DSL.namedArgument("query", DSL.literal(QUERY)), + DSL.namedArgument(null, DSL.literal(12345))); + FunctionSignature functionSignature = + new FunctionSignature( + functionName, expressions.stream().map(Expression::type).collect(Collectors.toList())); + Pair resolution = + sqlTableFunctionResolver.resolve(functionSignature); assertEquals(functionName, resolution.getKey().getFunctionName()); assertEquals(functionName, sqlTableFunctionResolver.getFunctionName()); assertEquals(List.of(STRING), resolution.getKey().getParamTypeList()); - SemanticCheckException exception = assertThrows(SemanticCheckException.class, - () -> resolution.getValue().apply(functionProperties, expressions)); + SemanticCheckException exception = + assertThrows( + SemanticCheckException.class, + () -> resolution.getValue().apply(functionProperties, expressions)); assertEquals("Arguments should be either passed by name or position", exception.getMessage()); } @Test void testWrongArgumentsSizeWhenPassedByName() { - SparkSqlTableFunctionResolver sqlTableFunctionResolver - = new SparkSqlTableFunctionResolver(client); + SparkSqlTableFunctionResolver sqlTableFunctionResolver = + new SparkSqlTableFunctionResolver(client); FunctionName functionName = FunctionName.of("sql"); - List expressions - = List.of(); - FunctionSignature functionSignature = new FunctionSignature(functionName, expressions - .stream().map(Expression::type).collect(Collectors.toList())); - Pair resolution - = sqlTableFunctionResolver.resolve(functionSignature); + List expressions = List.of(); + FunctionSignature functionSignature = + new FunctionSignature( + functionName, expressions.stream().map(Expression::type).collect(Collectors.toList())); + Pair resolution = + sqlTableFunctionResolver.resolve(functionSignature); assertEquals(functionName, resolution.getKey().getFunctionName()); assertEquals(functionName, sqlTableFunctionResolver.getFunctionName()); assertEquals(List.of(STRING), resolution.getKey().getParamTypeList()); - SemanticCheckException exception = assertThrows(SemanticCheckException.class, - () -> resolution.getValue().apply(functionProperties, expressions)); + SemanticCheckException exception = + assertThrows( + SemanticCheckException.class, + () -> resolution.getValue().apply(functionProperties, expressions)); assertEquals("Missing arguments:[query]", exception.getMessage()); } - } diff --git a/spark/src/test/java/org/opensearch/sql/spark/response/SparkResponseTest.java b/spark/src/test/java/org/opensearch/sql/spark/response/SparkResponseTest.java index abc4c81626..211561ac72 100644 --- a/spark/src/test/java/org/opensearch/sql/spark/response/SparkResponseTest.java +++ b/spark/src/test/java/org/opensearch/sql/spark/response/SparkResponseTest.java @@ -32,18 +32,12 @@ @ExtendWith(MockitoExtension.class) public class SparkResponseTest { - @Mock - private Client client; - @Mock - private SearchResponse searchResponse; - @Mock - private DeleteResponse deleteResponse; - @Mock - private SearchHit searchHit; - @Mock - private ActionFuture searchResponseActionFuture; - @Mock - private ActionFuture deleteResponseActionFuture; + @Mock private Client client; + @Mock private SearchResponse searchResponse; + @Mock private DeleteResponse deleteResponse; + @Mock private SearchHit searchHit; + @Mock private ActionFuture searchResponseActionFuture; + @Mock private ActionFuture deleteResponseActionFuture; @Test public void testGetResultFromOpensearchIndex() { @@ -53,12 +47,8 @@ public void testGetResultFromOpensearchIndex() { when(searchResponse.getHits()) .thenReturn( new SearchHits( - new SearchHit[] {searchHit}, - new TotalHits(1, TotalHits.Relation.EQUAL_TO), - 1.0F)); - Mockito.when(searchHit.getSourceAsMap()) - .thenReturn(Map.of("stepId", EMR_CLUSTER_ID)); - + new SearchHit[] {searchHit}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F)); + Mockito.when(searchHit.getSourceAsMap()).thenReturn(Map.of("stepId", EMR_CLUSTER_ID)); when(client.delete(any())).thenReturn(deleteResponseActionFuture); when(deleteResponseActionFuture.actionGet()).thenReturn(deleteResponse); @@ -75,11 +65,13 @@ public void testInvalidSearchResponse() { when(searchResponse.status()).thenReturn(RestStatus.NO_CONTENT); SparkResponse sparkResponse = new SparkResponse(client, EMR_CLUSTER_ID, "stepId"); - RuntimeException exception = assertThrows(RuntimeException.class, - () -> sparkResponse.getResultFromOpensearchIndex()); + RuntimeException exception = + assertThrows(RuntimeException.class, () -> sparkResponse.getResultFromOpensearchIndex()); Assertions.assertEquals( - "Fetching result from " + SPARK_INDEX_NAME - + " index failed with status : " + RestStatus.NO_CONTENT, + "Fetching result from " + + SPARK_INDEX_NAME + + " index failed with status : " + + RestStatus.NO_CONTENT, exception.getMessage()); } @@ -104,8 +96,9 @@ public void testNotFoundDeleteResponse() { when(deleteResponse.getResult()).thenReturn(DocWriteResponse.Result.NOT_FOUND); SparkResponse sparkResponse = new SparkResponse(client, EMR_CLUSTER_ID, "stepId"); - RuntimeException exception = assertThrows(ResourceNotFoundException.class, - () -> sparkResponse.deleteInSparkIndex("123")); + RuntimeException exception = + assertThrows( + ResourceNotFoundException.class, () -> sparkResponse.deleteInSparkIndex("123")); Assertions.assertEquals("Spark result with id 123 doesn't exist", exception.getMessage()); } @@ -116,8 +109,8 @@ public void testInvalidDeleteResponse() { when(deleteResponse.getResult()).thenReturn(DocWriteResponse.Result.NOOP); SparkResponse sparkResponse = new SparkResponse(client, EMR_CLUSTER_ID, "stepId"); - RuntimeException exception = assertThrows(RuntimeException.class, - () -> sparkResponse.deleteInSparkIndex("123")); + RuntimeException exception = + assertThrows(RuntimeException.class, () -> sparkResponse.deleteInSparkIndex("123")); Assertions.assertEquals( "Deleting spark result information failed with : noop", exception.getMessage()); } diff --git a/spark/src/test/java/org/opensearch/sql/spark/storage/SparkScanTest.java b/spark/src/test/java/org/opensearch/sql/spark/storage/SparkScanTest.java index c57142f580..971db3c33c 100644 --- a/spark/src/test/java/org/opensearch/sql/spark/storage/SparkScanTest.java +++ b/spark/src/test/java/org/opensearch/sql/spark/storage/SparkScanTest.java @@ -19,8 +19,7 @@ @ExtendWith(MockitoExtension.class) public class SparkScanTest { - @Mock - private SparkClient sparkClient; + @Mock private SparkClient sparkClient; @Test @SneakyThrows @@ -36,8 +35,6 @@ void testQueryResponseIteratorForQueryRangeFunction() { void testExplain() { SparkScan sparkScan = new SparkScan(sparkClient); sparkScan.getRequest().setSql(QUERY); - assertEquals( - "SparkQueryRequest(sql=select 1)", - sparkScan.explain()); + assertEquals("SparkQueryRequest(sql=select 1)", sparkScan.explain()); } } diff --git a/spark/src/test/java/org/opensearch/sql/spark/storage/SparkStorageEngineTest.java b/spark/src/test/java/org/opensearch/sql/spark/storage/SparkStorageEngineTest.java index d42e123678..5e7ec76cdb 100644 --- a/spark/src/test/java/org/opensearch/sql/spark/storage/SparkStorageEngineTest.java +++ b/spark/src/test/java/org/opensearch/sql/spark/storage/SparkStorageEngineTest.java @@ -22,14 +22,12 @@ @ExtendWith(MockitoExtension.class) public class SparkStorageEngineTest { - @Mock - private SparkClient client; + @Mock private SparkClient client; @Test public void getFunctions() { SparkStorageEngine engine = new SparkStorageEngine(client); - Collection functionResolverCollection - = engine.getFunctions(); + Collection functionResolverCollection = engine.getFunctions(); assertNotNull(functionResolverCollection); assertEquals(1, functionResolverCollection.size()); assertTrue( @@ -39,8 +37,10 @@ public void getFunctions() { @Test public void getTable() { SparkStorageEngine engine = new SparkStorageEngine(client); - RuntimeException exception = assertThrows(RuntimeException.class, - () -> engine.getTable(new DataSourceSchemaName("spark", "default"), "")); + RuntimeException exception = + assertThrows( + RuntimeException.class, + () -> engine.getTable(new DataSourceSchemaName("spark", "default"), "")); assertEquals("Unable to get table from storage engine.", exception.getMessage()); } } diff --git a/spark/src/test/java/org/opensearch/sql/spark/storage/SparkStorageFactoryTest.java b/spark/src/test/java/org/opensearch/sql/spark/storage/SparkStorageFactoryTest.java index c68adf2039..eb93cdabfe 100644 --- a/spark/src/test/java/org/opensearch/sql/spark/storage/SparkStorageFactoryTest.java +++ b/spark/src/test/java/org/opensearch/sql/spark/storage/SparkStorageFactoryTest.java @@ -24,17 +24,14 @@ @ExtendWith(MockitoExtension.class) public class SparkStorageFactoryTest { - @Mock - private Settings settings; + @Mock private Settings settings; - @Mock - private Client client; + @Mock private Client client; @Test void testGetConnectorType() { SparkStorageFactory sparkStorageFactory = new SparkStorageFactory(client, settings); - Assertions.assertEquals( - DataSourceType.SPARK, sparkStorageFactory.getDataSourceType()); + Assertions.assertEquals(DataSourceType.SPARK, sparkStorageFactory.getDataSourceType()); } @Test @@ -48,8 +45,7 @@ void testGetStorageEngine() { properties.put("emr.auth.secret_key", "secret_key"); properties.put("emr.auth.region", "region"); SparkStorageFactory sparkStorageFactory = new SparkStorageFactory(client, settings); - StorageEngine storageEngine - = sparkStorageFactory.getStorageEngine(properties); + StorageEngine storageEngine = sparkStorageFactory.getStorageEngine(properties); Assertions.assertTrue(storageEngine instanceof SparkStorageEngine); } @@ -59,10 +55,11 @@ void testInvalidConnectorType() { HashMap properties = new HashMap<>(); properties.put("spark.connector", "random"); SparkStorageFactory sparkStorageFactory = new SparkStorageFactory(client, settings); - InvalidParameterException exception = Assertions.assertThrows(InvalidParameterException.class, - () -> sparkStorageFactory.getStorageEngine(properties)); - Assertions.assertEquals("Spark connector type is invalid.", - exception.getMessage()); + InvalidParameterException exception = + Assertions.assertThrows( + InvalidParameterException.class, + () -> sparkStorageFactory.getStorageEngine(properties)); + Assertions.assertEquals("Spark connector type is invalid.", exception.getMessage()); } @Test @@ -72,10 +69,10 @@ void testMissingAuth() { properties.put("spark.connector", "emr"); properties.put("emr.cluster", EMR_CLUSTER_ID); SparkStorageFactory sparkStorageFactory = new SparkStorageFactory(client, settings); - IllegalArgumentException exception = Assertions.assertThrows(IllegalArgumentException.class, - () -> sparkStorageFactory.getStorageEngine(properties)); - Assertions.assertEquals("EMR config properties are missing.", - exception.getMessage()); + IllegalArgumentException exception = + Assertions.assertThrows( + IllegalArgumentException.class, () -> sparkStorageFactory.getStorageEngine(properties)); + Assertions.assertEquals("EMR config properties are missing.", exception.getMessage()); } @Test @@ -86,10 +83,10 @@ void testUnsupportedEmrAuth() { properties.put("emr.cluster", EMR_CLUSTER_ID); properties.put("emr.auth.type", "basic"); SparkStorageFactory sparkStorageFactory = new SparkStorageFactory(client, settings); - IllegalArgumentException exception = Assertions.assertThrows(IllegalArgumentException.class, - () -> sparkStorageFactory.getStorageEngine(properties)); - Assertions.assertEquals("Invalid auth type.", - exception.getMessage()); + IllegalArgumentException exception = + Assertions.assertThrows( + IllegalArgumentException.class, () -> sparkStorageFactory.getStorageEngine(properties)); + Assertions.assertEquals("Invalid auth type.", exception.getMessage()); } @Test @@ -99,10 +96,10 @@ void testMissingCluster() { properties.put("spark.connector", "emr"); properties.put("emr.auth.type", "awssigv4"); SparkStorageFactory sparkStorageFactory = new SparkStorageFactory(client, settings); - IllegalArgumentException exception = Assertions.assertThrows(IllegalArgumentException.class, - () -> sparkStorageFactory.getStorageEngine(properties)); - Assertions.assertEquals("EMR config properties are missing.", - exception.getMessage()); + IllegalArgumentException exception = + Assertions.assertThrows( + IllegalArgumentException.class, () -> sparkStorageFactory.getStorageEngine(properties)); + Assertions.assertEquals("EMR config properties are missing.", exception.getMessage()); } @Test @@ -113,10 +110,10 @@ void testMissingAuthKeys() { properties.put("emr.cluster", EMR_CLUSTER_ID); properties.put("emr.auth.type", "awssigv4"); SparkStorageFactory sparkStorageFactory = new SparkStorageFactory(client, settings); - IllegalArgumentException exception = Assertions.assertThrows(IllegalArgumentException.class, - () -> sparkStorageFactory.getStorageEngine(properties)); - Assertions.assertEquals("EMR auth keys are missing.", - exception.getMessage()); + IllegalArgumentException exception = + Assertions.assertThrows( + IllegalArgumentException.class, () -> sparkStorageFactory.getStorageEngine(properties)); + Assertions.assertEquals("EMR auth keys are missing.", exception.getMessage()); } @Test @@ -128,10 +125,10 @@ void testMissingAuthSecretKey() { properties.put("emr.auth.type", "awssigv4"); properties.put("emr.auth.access_key", "test"); SparkStorageFactory sparkStorageFactory = new SparkStorageFactory(client, settings); - IllegalArgumentException exception = Assertions.assertThrows(IllegalArgumentException.class, - () -> sparkStorageFactory.getStorageEngine(properties)); - Assertions.assertEquals("EMR auth keys are missing.", - exception.getMessage()); + IllegalArgumentException exception = + Assertions.assertThrows( + IllegalArgumentException.class, () -> sparkStorageFactory.getStorageEngine(properties)); + Assertions.assertEquals("EMR auth keys are missing.", exception.getMessage()); } @Test @@ -178,5 +175,4 @@ void testSetSparkJars() { DataSource dataSource = new SparkStorageFactory(client, settings).createDataSource(metadata); Assertions.assertTrue(dataSource.getStorageEngine() instanceof SparkStorageEngine); } - } diff --git a/spark/src/test/java/org/opensearch/sql/spark/storage/SparkTableTest.java b/spark/src/test/java/org/opensearch/sql/spark/storage/SparkTableTest.java index 39bd2eb199..a70d4ba69e 100644 --- a/spark/src/test/java/org/opensearch/sql/spark/storage/SparkTableTest.java +++ b/spark/src/test/java/org/opensearch/sql/spark/storage/SparkTableTest.java @@ -31,26 +31,23 @@ @ExtendWith(MockitoExtension.class) public class SparkTableTest { - @Mock - private SparkClient client; + @Mock private SparkClient client; @Test void testUnsupportedOperation() { SparkQueryRequest sparkQueryRequest = new SparkQueryRequest(); - SparkTable sparkTable = - new SparkTable(client, sparkQueryRequest); + SparkTable sparkTable = new SparkTable(client, sparkQueryRequest); assertThrows(UnsupportedOperationException.class, sparkTable::exists); - assertThrows(UnsupportedOperationException.class, - () -> sparkTable.create(Collections.emptyMap())); + assertThrows( + UnsupportedOperationException.class, () -> sparkTable.create(Collections.emptyMap())); } @Test void testCreateScanBuilderWithSqlTableFunction() { SparkQueryRequest sparkQueryRequest = new SparkQueryRequest(); sparkQueryRequest.setSql(QUERY); - SparkTable sparkTable = - new SparkTable(client, sparkQueryRequest); + SparkTable sparkTable = new SparkTable(client, sparkQueryRequest); TableScanBuilder tableScanBuilder = sparkTable.createScanBuilder(); Assertions.assertNotNull(tableScanBuilder); Assertions.assertTrue(tableScanBuilder instanceof SparkSqlFunctionTableScanBuilder); @@ -59,8 +56,7 @@ void testCreateScanBuilderWithSqlTableFunction() { @Test @SneakyThrows void testGetFieldTypesFromSparkQueryRequest() { - SparkTable sparkTable - = new SparkTable(client, new SparkQueryRequest()); + SparkTable sparkTable = new SparkTable(client, new SparkQueryRequest()); Map expectedFieldTypes = new HashMap<>(); Map fieldTypes = sparkTable.getFieldTypes(); @@ -73,10 +69,9 @@ void testGetFieldTypesFromSparkQueryRequest() { void testImplementWithSqlFunction() { SparkQueryRequest sparkQueryRequest = new SparkQueryRequest(); sparkQueryRequest.setSql(QUERY); - SparkTable sparkMetricTable = - new SparkTable(client, sparkQueryRequest); - PhysicalPlan plan = sparkMetricTable.implement( - new SparkSqlFunctionTableScanBuilder(client, sparkQueryRequest)); + SparkTable sparkMetricTable = new SparkTable(client, sparkQueryRequest); + PhysicalPlan plan = + sparkMetricTable.implement(new SparkSqlFunctionTableScanBuilder(client, sparkQueryRequest)); assertTrue(plan instanceof SparkSqlFunctionTableScanOperator); } } diff --git a/spark/src/test/java/org/opensearch/sql/spark/utils/TestUtils.java b/spark/src/test/java/org/opensearch/sql/spark/utils/TestUtils.java index b480e6d9d9..ca77006d9c 100644 --- a/spark/src/test/java/org/opensearch/sql/spark/utils/TestUtils.java +++ b/spark/src/test/java/org/opensearch/sql/spark/utils/TestUtils.java @@ -12,6 +12,7 @@ public class TestUtils { /** * Get Json document from the files in resources folder. + * * @param filename filename. * @return String. * @throws IOException IOException. @@ -21,5 +22,4 @@ public static String getJson(String filename) throws IOException { return new String( Objects.requireNonNull(classLoader.getResourceAsStream(filename)).readAllBytes()); } - } diff --git a/sql/src/main/java/org/opensearch/sql/sql/SQLService.java b/sql/src/main/java/org/opensearch/sql/sql/SQLService.java index 91ec00cdd5..e1ca778453 100644 --- a/sql/src/main/java/org/opensearch/sql/sql/SQLService.java +++ b/sql/src/main/java/org/opensearch/sql/sql/SQLService.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql; import java.util.Optional; @@ -21,9 +20,7 @@ import org.opensearch.sql.sql.parser.AstBuilder; import org.opensearch.sql.sql.parser.AstStatementBuilder; -/** - * SQL service. - */ +/** SQL service. */ @RequiredArgsConstructor public class SQLService { @@ -69,15 +66,19 @@ private AbstractPlan plan( if (request.getCursor().isPresent()) { // Handle v2 cursor here -- legacy cursor was handled earlier. if (isExplainRequest) { - throw new UnsupportedOperationException("Explain of a paged query continuation " - + "is not supported. Use `explain` for the initial query request."); + throw new UnsupportedOperationException( + "Explain of a paged query continuation " + + "is not supported. Use `explain` for the initial query request."); } if (request.isCursorCloseRequest()) { - return queryExecutionFactory.createCloseCursor(request.getCursor().get(), - queryListener.orElse(null)); + return queryExecutionFactory.createCloseCursor( + request.getCursor().get(), queryListener.orElse(null)); } - return queryExecutionFactory.create(request.getCursor().get(), - isExplainRequest, queryListener.orElse(null), explainListener.orElse(null)); + return queryExecutionFactory.create( + request.getCursor().get(), + isExplainRequest, + queryListener.orElse(null), + explainListener.orElse(null)); } else { // 1.Parse query and convert parse tree (CST) to abstract syntax tree (AST) ParseTree cst = parser.parse(request.getQuery()); @@ -90,8 +91,7 @@ private AbstractPlan plan( .fetchSize(request.getFetchSize()) .build())); - return queryExecutionFactory.create( - statement, queryListener, explainListener); + return queryExecutionFactory.create(statement, queryListener, explainListener); } } } diff --git a/sql/src/main/java/org/opensearch/sql/sql/antlr/AnonymizerListener.java b/sql/src/main/java/org/opensearch/sql/sql/antlr/AnonymizerListener.java index bd7b5cbedf..0d1b89f7a9 100644 --- a/sql/src/main/java/org/opensearch/sql/sql/antlr/AnonymizerListener.java +++ b/sql/src/main/java/org/opensearch/sql/sql/antlr/AnonymizerListener.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.antlr; import static org.opensearch.sql.sql.antlr.parser.OpenSearchSQLLexer.BACKTICK_QUOTE_ID; @@ -31,21 +30,17 @@ import org.antlr.v4.runtime.tree.ParseTreeListener; import org.antlr.v4.runtime.tree.TerminalNode; -/** - * Parse tree listener for anonymizing SQL requests. - */ +/** Parse tree listener for anonymizing SQL requests. */ public class AnonymizerListener implements ParseTreeListener { private String anonymizedQueryString = ""; private static final int NO_TYPE = -1; private int previousType = NO_TYPE; @Override - public void enterEveryRule(ParserRuleContext ctx) { - } + public void enterEveryRule(ParserRuleContext ctx) {} @Override - public void exitEveryRule(ParserRuleContext ctx) { - } + public void exitEveryRule(ParserRuleContext ctx) {} @Override public void visitTerminal(TerminalNode node) { @@ -57,10 +52,11 @@ public void visitTerminal(TerminalNode node) { int token = node.getSymbol().getType(); boolean isDotIdentifiers = token == DOT || previousType == DOT; boolean isComma = token == COMMA; - boolean isEqualComparison = ((token == EQUAL_SYMBOL) + boolean isEqualComparison = + ((token == EQUAL_SYMBOL) && (previousType == LESS_SYMBOL - || previousType == GREATER_SYMBOL - || previousType == EXCLAMATION_SYMBOL)); + || previousType == GREATER_SYMBOL + || previousType == EXCLAMATION_SYMBOL)); boolean isNotEqualComparisonAlternative = previousType == LESS_SYMBOL && token == GREATER_SYMBOL; if (!isDotIdentifiers && !isComma && !isEqualComparison && !isNotEqualComparisonAlternative) { @@ -103,9 +99,7 @@ public void visitTerminal(TerminalNode node) { } @Override - public void visitErrorNode(ErrorNode node) { - - } + public void visitErrorNode(ErrorNode node) {} public String getAnonymizedQueryString() { return "(" + anonymizedQueryString + ")"; diff --git a/sql/src/main/java/org/opensearch/sql/sql/antlr/SQLSyntaxParser.java b/sql/src/main/java/org/opensearch/sql/sql/antlr/SQLSyntaxParser.java index 4f7b925718..d1a6adc236 100644 --- a/sql/src/main/java/org/opensearch/sql/sql/antlr/SQLSyntaxParser.java +++ b/sql/src/main/java/org/opensearch/sql/sql/antlr/SQLSyntaxParser.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.antlr; import org.antlr.v4.runtime.CommonTokenStream; @@ -16,16 +15,15 @@ import org.opensearch.sql.sql.antlr.parser.OpenSearchSQLLexer; import org.opensearch.sql.sql.antlr.parser.OpenSearchSQLParser; -/** - * SQL syntax parser which encapsulates an ANTLR parser. - */ +/** SQL syntax parser which encapsulates an ANTLR parser. */ public class SQLSyntaxParser implements Parser { private static final Logger LOG = LogManager.getLogger(SQLSyntaxParser.class); /** * Parse a SQL query by ANTLR parser. - * @param query a SQL query - * @return parse tree root + * + * @param query a SQL query + * @return parse tree root */ @Override public ParseTree parse(String query) { diff --git a/sql/src/main/java/org/opensearch/sql/sql/domain/SQLQueryRequest.java b/sql/src/main/java/org/opensearch/sql/sql/domain/SQLQueryRequest.java index c9321f5775..f46c7c794d 100644 --- a/sql/src/main/java/org/opensearch/sql/sql/domain/SQLQueryRequest.java +++ b/sql/src/main/java/org/opensearch/sql/sql/domain/SQLQueryRequest.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.domain; import java.util.Collections; @@ -20,43 +19,30 @@ import org.json.JSONObject; import org.opensearch.sql.protocol.response.format.Format; -/** - * SQL query request. - */ +/** SQL query request. */ @ToString @EqualsAndHashCode @RequiredArgsConstructor public class SQLQueryRequest { private static final String QUERY_FIELD_CURSOR = "cursor"; - private static final Set SUPPORTED_FIELDS = Set.of( - "query", "fetch_size", "parameters", QUERY_FIELD_CURSOR); + private static final Set SUPPORTED_FIELDS = + Set.of("query", "fetch_size", "parameters", QUERY_FIELD_CURSOR); private static final String QUERY_PARAMS_FORMAT = "format"; private static final String QUERY_PARAMS_SANITIZE = "sanitize"; - /** - * JSON payload in REST request. - */ + /** JSON payload in REST request. */ private final JSONObject jsonContent; - /** - * SQL query. - */ - @Getter - private final String query; + /** SQL query. */ + @Getter private final String query; - /** - * Request path. - */ + /** Request path. */ private final String path; - /** - * Request format. - */ + /** Request format. */ private final String format; - /** - * Request params. - */ + /** Request params. */ private Map params = Collections.emptyMap(); @Getter @@ -65,11 +51,13 @@ public class SQLQueryRequest { private String cursor; - /** - * Constructor of SQLQueryRequest that passes request params. - */ - public SQLQueryRequest(JSONObject jsonContent, String query, String path, - Map params, String cursor) { + /** Constructor of SQLQueryRequest that passes request params. */ + public SQLQueryRequest( + JSONObject jsonContent, + String query, + String path, + Map params, + String cursor) { this.jsonContent = jsonContent; this.query = query; this.path = path; @@ -80,24 +68,26 @@ public SQLQueryRequest(JSONObject jsonContent, String query, String path, } /** - * Pre-check if the request can be supported by meeting ALL the following criteria: - * 1.Only supported fields present in request body, ex. "filter" and "cursor" are not supported - * 2.Response format is default or can be supported. + * Pre-check if the request can be supported by meeting ALL the following criteria: 1.Only + * supported fields present in request body, ex. "filter" and "cursor" are not supported + * 2.Response format is default or can be supported. * * @return true if supported. */ public boolean isSupported() { var noCursor = !isCursor(); var noQuery = query == null; - var noUnsupportedParams = params.isEmpty() - || (params.size() == 1 && params.containsKey(QUERY_PARAMS_FORMAT)); + var noUnsupportedParams = + params.isEmpty() || (params.size() == 1 && params.containsKey(QUERY_PARAMS_FORMAT)); var noContent = jsonContent == null || jsonContent.isEmpty(); - return ((!noCursor && noQuery - && noUnsupportedParams && noContent) // if cursor is given, but other things - || (noCursor && !noQuery)) // or if cursor is not given, but query - && isOnlySupportedFieldInPayload() // and request has supported fields only - && isSupportedFormat(); // and request is in supported format + return ((!noCursor + && noQuery + && noUnsupportedParams + && noContent) // if cursor is given, but other things + || (noCursor && !noQuery)) // or if cursor is not given, but query + && isOnlySupportedFieldInPayload() // and request has supported fields only + && isSupportedFormat(); // and request is in supported format } private boolean isCursor() { @@ -106,6 +96,7 @@ private boolean isCursor() { /** * Check if request is to explain rather than execute the query. + * * @return true if it is an explain request */ public boolean isExplainRequest() { @@ -116,16 +107,14 @@ public boolean isCursorCloseRequest() { return path.endsWith("/close"); } - /** - * Decide on the formatter by the requested format. - */ + /** Decide on the formatter by the requested format. */ public Format format() { Optional optionalFormat = Format.of(format); if (optionalFormat.isPresent()) { return optionalFormat.get(); } else { throw new IllegalArgumentException( - String.format(Locale.ROOT,"response in %s format is not supported.", format)); + String.format(Locale.ROOT, "response in %s format is not supported.", format)); } } @@ -155,5 +144,4 @@ private boolean shouldSanitize(Map params) { } return true; } - } diff --git a/sql/src/main/java/org/opensearch/sql/sql/parser/AstAggregationBuilder.java b/sql/src/main/java/org/opensearch/sql/sql/parser/AstAggregationBuilder.java index bd4464d00e..e46147b7a3 100644 --- a/sql/src/main/java/org/opensearch/sql/sql/parser/AstAggregationBuilder.java +++ b/sql/src/main/java/org/opensearch/sql/sql/parser/AstAggregationBuilder.java @@ -3,7 +3,6 @@ * SPDX-License-Identifier: Apache-2.0 */ - package org.opensearch.sql.sql.parser; import static java.util.Collections.emptyList; @@ -27,6 +26,8 @@ import org.opensearch.sql.sql.parser.context.QuerySpecification; /** + * + * *
SelectExpressionAnalyzerTest
  * AST aggregation builder that builds AST aggregation node for the following scenarios:
  *
@@ -59,9 +60,7 @@
 @RequiredArgsConstructor
 public class AstAggregationBuilder extends OpenSearchSQLParserBaseVisitor {
 
-  /**
-   * Query specification that contains info collected beforehand.
-   */
+  /** Query specification that contains info collected beforehand. */
   private final QuerySpecification querySpec;
 
   @Override
@@ -78,10 +77,7 @@ public UnresolvedPlan visit(ParseTree groupByClause) {
 
   private UnresolvedPlan buildExplicitAggregation() {
     List groupByItems = replaceGroupByItemIfAliasOrOrdinal();
-    return new Aggregation(
-        new ArrayList<>(querySpec.getAggregators()),
-        emptyList(),
-        groupByItems);
+    return new Aggregation(new ArrayList<>(querySpec.getAggregators()), emptyList(), groupByItems);
   }
 
   private UnresolvedPlan buildImplicitAggregation() {
@@ -89,33 +85,32 @@ private UnresolvedPlan buildImplicitAggregation() {
 
     if (invalidSelectItem.isPresent()) {
       // Report semantic error to avoid fall back to old engine again
-      throw new SemanticCheckException(StringUtils.format(
-          "Explicit GROUP BY clause is required because expression [%s] "
-              + "contains non-aggregated column", invalidSelectItem.get()));
+      throw new SemanticCheckException(
+          StringUtils.format(
+              "Explicit GROUP BY clause is required because expression [%s] "
+                  + "contains non-aggregated column",
+              invalidSelectItem.get()));
     }
 
     return new Aggregation(
-        new ArrayList<>(querySpec.getAggregators()),
-        emptyList(),
-        querySpec.getGroupByItems());
+        new ArrayList<>(querySpec.getAggregators()), emptyList(), querySpec.getGroupByItems());
   }
 
   private List replaceGroupByItemIfAliasOrOrdinal() {
-    return querySpec.getGroupByItems()
-                    .stream()
-                    .map(querySpec::replaceIfAliasOrOrdinal)
-                    .map(expr -> new Alias(expr.toString(), expr))
-                    .collect(Collectors.toList());
+    return querySpec.getGroupByItems().stream()
+        .map(querySpec::replaceIfAliasOrOrdinal)
+        .map(expr -> new Alias(expr.toString(), expr))
+        .collect(Collectors.toList());
   }
 
   /**
-   * Find non-aggregate item in SELECT clause. Note that literal is special which is not required
-   * to be applied by aggregate function.
+   * Find non-aggregate item in SELECT clause. Note that literal is special which is not required to
+   * be applied by aggregate function.
    */
   private Optional findNonAggregatedItemInSelect() {
     return querySpec.getSelectItems().stream()
-                                     .filter(this::isNonAggregateOrLiteralExpression)
-                                     .findFirst();
+        .filter(this::isNonAggregateOrLiteralExpression)
+        .findFirst();
   }
 
   private boolean isAggregatorNotFoundAnywhere() {
@@ -132,8 +127,7 @@ private boolean isNonAggregateOrLiteralExpression(UnresolvedExpression expr) {
     }
 
     List children = expr.getChild();
-    return children.stream().anyMatch(child ->
-        isNonAggregateOrLiteralExpression((UnresolvedExpression) child));
+    return children.stream()
+        .anyMatch(child -> isNonAggregateOrLiteralExpression((UnresolvedExpression) child));
   }
-
 }
diff --git a/sql/src/main/java/org/opensearch/sql/sql/parser/AstBuilder.java b/sql/src/main/java/org/opensearch/sql/sql/parser/AstBuilder.java
index 020889c082..ab96f16263 100644
--- a/sql/src/main/java/org/opensearch/sql/sql/parser/AstBuilder.java
+++ b/sql/src/main/java/org/opensearch/sql/sql/parser/AstBuilder.java
@@ -3,7 +3,6 @@
  * SPDX-License-Identifier: Apache-2.0
  */
 
-
 package org.opensearch.sql.sql.parser;
 
 import static java.util.Collections.emptyList;
@@ -43,22 +42,18 @@
 import org.opensearch.sql.sql.antlr.parser.OpenSearchSQLParserBaseVisitor;
 import org.opensearch.sql.sql.parser.context.ParsingContext;
 
-/**
- * Abstract syntax tree (AST) builder.
- */
+/** Abstract syntax tree (AST) builder. */
 @RequiredArgsConstructor
 public class AstBuilder extends OpenSearchSQLParserBaseVisitor {
 
   private final AstExpressionBuilder expressionBuilder = new AstExpressionBuilder();
 
-  /**
-   * Parsing context stack that contains context for current query parsing.
-   */
+  /** Parsing context stack that contains context for current query parsing. */
   private final ParsingContext context = new ParsingContext();
 
   /**
-   * SQL query to get original token text. This is necessary because token.getText() returns
-   * text without whitespaces or other characters discarded by lexer.
+   * SQL query to get original token text. This is necessary because token.getText() returns text
+   * without whitespaces or other characters discarded by lexer.
    */
   private final String query;
 
@@ -91,8 +86,7 @@ public UnresolvedPlan visitQuerySpecification(QuerySpecificationContext queryCon
 
     if (queryContext.fromClause() == null) {
       Optional allFields =
-          project.getProjectList().stream().filter(node -> node instanceof AllFields)
-              .findFirst();
+          project.getProjectList().stream().filter(node -> node instanceof AllFields).findFirst();
       if (allFields.isPresent()) {
         throw new SyntaxCheckException("No FROM clause found for select all");
       }
@@ -119,9 +113,8 @@ public UnresolvedPlan visitQuerySpecification(QuerySpecificationContext queryCon
 
   @Override
   public UnresolvedPlan visitSelectClause(SelectClauseContext ctx) {
-    ImmutableList.Builder builder =
-        new ImmutableList.Builder<>();
-    if (ctx.selectElements().star != null) { //TODO: project operator should be required?
+    ImmutableList.Builder builder = new ImmutableList.Builder<>();
+    if (ctx.selectElements().star != null) { // TODO: project operator should be required?
       builder.add(AllFields.of());
     }
     ctx.selectElements().selectElement().forEach(field -> builder.add(visitSelectItem(field)));
@@ -132,8 +125,7 @@ public UnresolvedPlan visitSelectClause(SelectClauseContext ctx) {
   public UnresolvedPlan visitLimitClause(OpenSearchSQLParser.LimitClauseContext ctx) {
     return new Limit(
         Integer.parseInt(ctx.limit.getText()),
-        ctx.offset == null ? 0 : Integer.parseInt(ctx.offset.getText())
-    );
+        ctx.offset == null ? 0 : Integer.parseInt(ctx.offset.getText()));
   }
 
   @Override
@@ -165,29 +157,26 @@ public UnresolvedPlan visitFromClause(FromClauseContext ctx) {
   }
 
   /**
-   * Ensure NESTED function is not used in HAVING clause and fallback to legacy engine.
-   * Can remove when support is added for NESTED function in HAVING clause.
+   * Ensure NESTED function is not used in HAVING clause and fallback to legacy engine. Can remove
+   * when support is added for NESTED function in HAVING clause.
+   *
    * @param func : Function in HAVING clause
    */
   private void verifySupportsCondition(UnresolvedExpression func) {
     if (func instanceof Function) {
-      if (((Function) func).getFuncName().equalsIgnoreCase(
-          BuiltinFunctionName.NESTED.name()
-      )) {
+      if (((Function) func).getFuncName().equalsIgnoreCase(BuiltinFunctionName.NESTED.name())) {
         throw new SyntaxCheckException(
-            "Falling back to legacy engine. Nested function is not supported in the HAVING clause."
-        );
+            "Falling back to legacy engine. Nested function is not supported in the HAVING"
+                + " clause.");
       }
-      ((Function)func).getFuncArgs().stream()
-          .forEach(e -> verifySupportsCondition(e)
-      );
+      ((Function) func).getFuncArgs().stream().forEach(e -> verifySupportsCondition(e));
     }
   }
 
   @Override
   public UnresolvedPlan visitTableAsRelation(TableAsRelationContext ctx) {
-    String tableAlias = (ctx.alias() == null) ? null
-        : StringUtils.unquoteIdentifier(ctx.alias().getText());
+    String tableAlias =
+        (ctx.alias() == null) ? null : StringUtils.unquoteIdentifier(ctx.alias().getText());
     return new Relation(visitAstExpression(ctx.tableName()), tableAlias);
   }
 
@@ -228,5 +217,4 @@ private UnresolvedExpression visitSelectItem(SelectElementContext ctx) {
       return new Alias(name, expr, alias);
     }
   }
-
 }
diff --git a/sql/src/main/java/org/opensearch/sql/sql/parser/AstExpressionBuilder.java b/sql/src/main/java/org/opensearch/sql/sql/parser/AstExpressionBuilder.java
index 808edebad1..06d9e93a69 100644
--- a/sql/src/main/java/org/opensearch/sql/sql/parser/AstExpressionBuilder.java
+++ b/sql/src/main/java/org/opensearch/sql/sql/parser/AstExpressionBuilder.java
@@ -3,7 +3,6 @@
  * SPDX-License-Identifier: Apache-2.0
  */
 
-
 package org.opensearch.sql.sql.parser;
 
 import static org.opensearch.sql.ast.dsl.AstDSL.between;
@@ -104,7 +103,6 @@
 import org.opensearch.sql.ast.tree.Sort.SortOption;
 import org.opensearch.sql.common.utils.StringUtils;
 import org.opensearch.sql.expression.function.BuiltinFunctionName;
-import org.opensearch.sql.sql.antlr.parser.OpenSearchSQLParser;
 import org.opensearch.sql.sql.antlr.parser.OpenSearchSQLParser.AlternateMultiMatchQueryContext;
 import org.opensearch.sql.sql.antlr.parser.OpenSearchSQLParser.AndExpressionContext;
 import org.opensearch.sql.sql.antlr.parser.OpenSearchSQLParser.ColumnNameContext;
@@ -115,9 +113,7 @@
 import org.opensearch.sql.sql.antlr.parser.OpenSearchSQLParser.TableNameContext;
 import org.opensearch.sql.sql.antlr.parser.OpenSearchSQLParserBaseVisitor;
 
-/**
- * Expression builder to parse text to expression in AST.
- */
+/** Expression builder to parse text to expression in AST. */
 public class AstExpressionBuilder extends OpenSearchSQLParserBaseVisitor {
 
   @Override
@@ -143,9 +139,7 @@ public UnresolvedExpression visitQualifiedName(QualifiedNameContext ctx) {
   @Override
   public UnresolvedExpression visitMathExpressionAtom(MathExpressionAtomContext ctx) {
     return new Function(
-        ctx.mathOperator.getText(),
-        Arrays.asList(visit(ctx.left), visit(ctx.right))
-    );
+        ctx.mathOperator.getText(), Arrays.asList(visit(ctx.left), visit(ctx.right)));
   }
 
   @Override
@@ -154,11 +148,8 @@ public UnresolvedExpression visitNestedExpressionAtom(NestedExpressionAtomContex
   }
 
   @Override
-  public UnresolvedExpression visitNestedAllFunctionCall(
-      NestedAllFunctionCallContext ctx) {
-    return new NestedAllTupleFields(
-        visitQualifiedName(ctx.allTupleFields().path).toString()
-    );
+  public UnresolvedExpression visitNestedAllFunctionCall(NestedAllFunctionCallContext ctx) {
+    return new NestedAllTupleFields(visitQualifiedName(ctx.allTupleFields().path).toString());
   }
 
   @Override
@@ -169,39 +160,36 @@ public UnresolvedExpression visitScalarFunctionCall(ScalarFunctionCallContext ct
   @Override
   public UnresolvedExpression visitGetFormatFunctionCall(GetFormatFunctionCallContext ctx) {
     return new Function(
-        ctx.getFormatFunction().GET_FORMAT().toString(),
-        getFormatFunctionArguments(ctx));
+        ctx.getFormatFunction().GET_FORMAT().toString(), getFormatFunctionArguments(ctx));
   }
 
   @Override
-  public UnresolvedExpression visitHighlightFunctionCall(
-      HighlightFunctionCallContext ctx) {
+  public UnresolvedExpression visitHighlightFunctionCall(HighlightFunctionCallContext ctx) {
     ImmutableMap.Builder builder = ImmutableMap.builder();
-    ctx.highlightFunction().highlightArg().forEach(v -> builder.put(
-        v.highlightArgName().getText().toLowerCase(),
-        new Literal(StringUtils.unquoteText(v.highlightArgValue().getText()),
-            DataType.STRING))
-    );
+    ctx.highlightFunction()
+        .highlightArg()
+        .forEach(
+            v ->
+                builder.put(
+                    v.highlightArgName().getText().toLowerCase(),
+                    new Literal(
+                        StringUtils.unquoteText(v.highlightArgValue().getText()),
+                        DataType.STRING)));
 
-    return new HighlightFunction(visit(ctx.highlightFunction().relevanceField()),
-        builder.build());
+    return new HighlightFunction(visit(ctx.highlightFunction().relevanceField()), builder.build());
   }
 
-
   @Override
   public UnresolvedExpression visitTimestampFunctionCall(TimestampFunctionCallContext ctx) {
     return new Function(
-        ctx.timestampFunction().timestampFunctionName().getText(),
-        timestampFunctionArguments(ctx));
+        ctx.timestampFunction().timestampFunctionName().getText(), timestampFunctionArguments(ctx));
   }
 
   @Override
-  public UnresolvedExpression visitPositionFunction(
-          PositionFunctionContext ctx) {
+  public UnresolvedExpression visitPositionFunction(PositionFunctionContext ctx) {
     return new Function(
-            POSITION.getName().getFunctionName(),
-            Arrays.asList(visitFunctionArg(ctx.functionArg(0)),
-                visitFunctionArg(ctx.functionArg(1))));
+        POSITION.getName().getFunctionName(),
+        Arrays.asList(visitFunctionArg(ctx.functionArg(0)), visitFunctionArg(ctx.functionArg(1))));
   }
 
   @Override
@@ -219,8 +207,7 @@ public UnresolvedExpression visitColumnFilter(ColumnFilterContext ctx) {
   }
 
   @Override
-  public UnresolvedExpression visitShowDescribePattern(
-      ShowDescribePatternContext ctx) {
+  public UnresolvedExpression visitShowDescribePattern(ShowDescribePatternContext ctx) {
     if (ctx.compatibleID() != null) {
       return stringLiteral(ctx.compatibleID().getText());
     } else {
@@ -241,21 +228,18 @@ public UnresolvedExpression visitWindowFunctionClause(WindowFunctionClauseContex
 
     List partitionByList = Collections.emptyList();
     if (overClause.partitionByClause() != null) {
-      partitionByList = overClause.partitionByClause()
-                                  .expression()
-                                  .stream()
-                                  .map(this::visit)
-                                  .collect(Collectors.toList());
+      partitionByList =
+          overClause.partitionByClause().expression().stream()
+              .map(this::visit)
+              .collect(Collectors.toList());
     }
 
     List> sortList = Collections.emptyList();
     if (overClause.orderByClause() != null) {
-      sortList = overClause.orderByClause()
-                           .orderByElement()
-                           .stream()
-                           .map(item -> ImmutablePair.of(
-                               createSortOption(item), visit(item.expression())))
-                           .collect(Collectors.toList());
+      sortList =
+          overClause.orderByClause().orderByElement().stream()
+              .map(item -> ImmutablePair.of(createSortOption(item), visit(item.expression())))
+              .collect(Collectors.toList());
     }
     return new WindowFunction(visit(ctx.function), partitionByList, sortList);
   }
@@ -268,17 +252,12 @@ public UnresolvedExpression visitScalarWindowFunction(ScalarWindowFunctionContex
   @Override
   public UnresolvedExpression visitRegularAggregateFunctionCall(
       RegularAggregateFunctionCallContext ctx) {
-    return new AggregateFunction(
-        ctx.functionName.getText(),
-        visitFunctionArg(ctx.functionArg()));
+    return new AggregateFunction(ctx.functionName.getText(), visitFunctionArg(ctx.functionArg()));
   }
 
   @Override
   public UnresolvedExpression visitDistinctCountFunctionCall(DistinctCountFunctionCallContext ctx) {
-    return new AggregateFunction(
-        ctx.COUNT().getText(),
-        visitFunctionArg(ctx.functionArg()),
-        true);
+    return new AggregateFunction(ctx.COUNT().getText(), visitFunctionArg(ctx.functionArg()), true);
   }
 
   @Override
@@ -294,18 +273,16 @@ public UnresolvedExpression visitFilterClause(FilterClauseContext ctx) {
   @Override
   public UnresolvedExpression visitIsNullPredicate(IsNullPredicateContext ctx) {
     return new Function(
-        ctx.nullNotnull().NOT() == null ? IS_NULL.getName().getFunctionName() :
-            IS_NOT_NULL.getName().getFunctionName(),
+        ctx.nullNotnull().NOT() == null
+            ? IS_NULL.getName().getFunctionName()
+            : IS_NOT_NULL.getName().getFunctionName(),
         Arrays.asList(visit(ctx.predicate())));
   }
 
   @Override
   public UnresolvedExpression visitBetweenPredicate(BetweenPredicateContext ctx) {
     UnresolvedExpression func =
-        between(
-            visit(ctx.predicate(0)),
-            visit(ctx.predicate(1)),
-            visit(ctx.predicate(2)));
+        between(visit(ctx.predicate(0)), visit(ctx.predicate(1)), visit(ctx.predicate(2)));
 
     if (ctx.NOT() != null) {
       func = not(func);
@@ -316,26 +293,21 @@ public UnresolvedExpression visitBetweenPredicate(BetweenPredicateContext ctx) {
   @Override
   public UnresolvedExpression visitLikePredicate(LikePredicateContext ctx) {
     return new Function(
-        ctx.NOT() == null ? LIKE.getName().getFunctionName() :
-            NOT_LIKE.getName().getFunctionName(),
+        ctx.NOT() == null ? LIKE.getName().getFunctionName() : NOT_LIKE.getName().getFunctionName(),
         Arrays.asList(visit(ctx.left), visit(ctx.right)));
   }
 
   @Override
   public UnresolvedExpression visitRegexpPredicate(RegexpPredicateContext ctx) {
-    return new Function(REGEXP.getName().getFunctionName(),
-            Arrays.asList(visit(ctx.left), visit(ctx.right)));
+    return new Function(
+        REGEXP.getName().getFunctionName(), Arrays.asList(visit(ctx.left), visit(ctx.right)));
   }
 
   @Override
   public UnresolvedExpression visitInPredicate(InPredicateContext ctx) {
     UnresolvedExpression field = visit(ctx.predicate());
-    List inLists = ctx
-        .expressions()
-        .expression()
-        .stream()
-        .map(this::visit)
-        .collect(Collectors.toList());
+    List inLists =
+        ctx.expressions().expression().stream().map(this::visit).collect(Collectors.toList());
     UnresolvedExpression in = AstDSL.in(field, inLists);
     return ctx.NOT() != null ? AstDSL.not(in) : in;
   }
@@ -400,34 +372,30 @@ public UnresolvedExpression visitTimeLiteral(TimeLiteralContext ctx) {
   }
 
   @Override
-  public UnresolvedExpression visitTimestampLiteral(
-      TimestampLiteralContext ctx) {
+  public UnresolvedExpression visitTimestampLiteral(TimestampLiteralContext ctx) {
     return AstDSL.timestampLiteral(StringUtils.unquoteText(ctx.timestamp.getText()));
   }
 
   @Override
   public UnresolvedExpression visitIntervalLiteral(IntervalLiteralContext ctx) {
-    return new Interval(
-        visit(ctx.expression()), IntervalUnit.of(ctx.intervalUnit().getText()));
+    return new Interval(visit(ctx.expression()), IntervalUnit.of(ctx.intervalUnit().getText()));
   }
 
   @Override
-  public UnresolvedExpression visitBinaryComparisonPredicate(
-      BinaryComparisonPredicateContext ctx) {
+  public UnresolvedExpression visitBinaryComparisonPredicate(BinaryComparisonPredicateContext ctx) {
     String functionName = ctx.comparisonOperator().getText();
     return new Function(
         functionName.equals("<>") ? "!=" : functionName,
-        Arrays.asList(visit(ctx.left), visit(ctx.right))
-    );
+        Arrays.asList(visit(ctx.left), visit(ctx.right)));
   }
 
   @Override
   public UnresolvedExpression visitCaseFunctionCall(CaseFunctionCallContext ctx) {
     UnresolvedExpression caseValue = (ctx.expression() == null) ? null : visit(ctx.expression());
-    List whenStatements = ctx.caseFuncAlternative()
-                                   .stream()
-                                   .map(when -> (When) visit(when))
-                                   .collect(Collectors.toList());
+    List whenStatements =
+        ctx.caseFuncAlternative().stream()
+            .map(when -> (When) visit(when))
+            .collect(Collectors.toList());
     UnresolvedExpression elseStatement = (ctx.elseArg == null) ? null : visit(ctx.elseArg);
 
     return new Case(caseValue, whenStatements, elseStatement);
@@ -439,23 +407,19 @@ public UnresolvedExpression visitCaseFuncAlternative(CaseFuncAlternativeContext
   }
 
   @Override
-  public UnresolvedExpression visitDataTypeFunctionCall(
-      DataTypeFunctionCallContext ctx) {
+  public UnresolvedExpression visitDataTypeFunctionCall(DataTypeFunctionCallContext ctx) {
     return new Cast(visit(ctx.expression()), visit(ctx.convertedDataType()));
   }
 
   @Override
-  public UnresolvedExpression visitConvertedDataType(
-      ConvertedDataTypeContext ctx) {
+  public UnresolvedExpression visitConvertedDataType(ConvertedDataTypeContext ctx) {
     return AstDSL.stringLiteral(ctx.getText());
   }
 
   @Override
-  public UnresolvedExpression visitNoFieldRelevanceFunction(
-          NoFieldRelevanceFunctionContext ctx) {
+  public UnresolvedExpression visitNoFieldRelevanceFunction(NoFieldRelevanceFunctionContext ctx) {
     return new Function(
-            ctx.noFieldRelevanceFunctionName().getText().toLowerCase(),
-            noFieldRelevanceArguments(ctx));
+        ctx.noFieldRelevanceFunctionName().getText().toLowerCase(), noFieldRelevanceArguments(ctx));
   }
 
   @Override
@@ -481,10 +445,9 @@ public UnresolvedExpression visitMultiFieldRelevanceFunction(
     // 'MULTI_MATCH('query'='query_val', 'fields'='*fields_val')'
     String funcName = StringUtils.unquoteText(ctx.multiFieldRelevanceFunctionName().getText());
     if ((funcName.equalsIgnoreCase(BuiltinFunctionName.MULTI_MATCH.toString())
-        || funcName.equalsIgnoreCase(BuiltinFunctionName.MULTIMATCH.toString())
-        || funcName.equalsIgnoreCase(BuiltinFunctionName.MULTIMATCHQUERY.toString()))
-        && !ctx.getRuleContexts(AlternateMultiMatchQueryContext.class)
-        .isEmpty()) {
+            || funcName.equalsIgnoreCase(BuiltinFunctionName.MULTIMATCH.toString())
+            || funcName.equalsIgnoreCase(BuiltinFunctionName.MULTIMATCHQUERY.toString()))
+        && !ctx.getRuleContexts(AlternateMultiMatchQueryContext.class).isEmpty()) {
       return new Function(
           ctx.multiFieldRelevanceFunctionName().getText().toLowerCase(),
           alternateMultiMatchArguments(ctx));
@@ -517,78 +480,81 @@ public UnresolvedExpression visitScoreRelevanceFunction(ScoreRelevanceFunctionCo
     return new ScoreFunction(visit(ctx.relevanceFunction()), weight);
   }
 
-  private Function buildFunction(String functionName,
-                                 List arg) {
+  private Function buildFunction(String functionName, List arg) {
     return new Function(
-        functionName,
-        arg
-            .stream()
-            .map(this::visitFunctionArg)
-            .collect(Collectors.toList())
-    );
+        functionName, arg.stream().map(this::visitFunctionArg).collect(Collectors.toList()));
   }
 
   @Override
   public UnresolvedExpression visitExtractFunctionCall(ExtractFunctionCallContext ctx) {
     return new Function(
-        ctx.extractFunction().EXTRACT().toString(),
-        getExtractFunctionArguments(ctx));
+        ctx.extractFunction().EXTRACT().toString(), getExtractFunctionArguments(ctx));
   }
 
-
   private QualifiedName visitIdentifiers(List identifiers) {
     return new QualifiedName(
         identifiers.stream()
-                   .map(RuleContext::getText)
-                   .map(StringUtils::unquoteIdentifier)
-                   .collect(Collectors.toList()));
+            .map(RuleContext::getText)
+            .map(StringUtils::unquoteIdentifier)
+            .collect(Collectors.toList()));
   }
 
-  private void fillRelevanceArgs(List args,
-                                 ImmutableList.Builder builder) {
+  private void fillRelevanceArgs(
+      List args, ImmutableList.Builder builder) {
     // To support old syntax we must support argument keys as quoted strings.
-    args.forEach(v -> builder.add(v.argName == null
-        ? new UnresolvedArgument(v.relevanceArgName().getText().toLowerCase(),
-            new Literal(StringUtils.unquoteText(v.relevanceArgValue().getText()),
-            DataType.STRING))
-        : new UnresolvedArgument(StringUtils.unquoteText(v.argName.getText()).toLowerCase(),
-            new Literal(StringUtils.unquoteText(v.argVal.getText()), DataType.STRING))));
+    args.forEach(
+        v ->
+            builder.add(
+                v.argName == null
+                    ? new UnresolvedArgument(
+                        v.relevanceArgName().getText().toLowerCase(),
+                        new Literal(
+                            StringUtils.unquoteText(v.relevanceArgValue().getText()),
+                            DataType.STRING))
+                    : new UnresolvedArgument(
+                        StringUtils.unquoteText(v.argName.getText()).toLowerCase(),
+                        new Literal(
+                            StringUtils.unquoteText(v.argVal.getText()), DataType.STRING))));
   }
 
   private List noFieldRelevanceArguments(
-          NoFieldRelevanceFunctionContext ctx) {
+      NoFieldRelevanceFunctionContext ctx) {
     // all the arguments are defaulted to string values
     // to skip environment resolving and function signature resolving
     ImmutableList.Builder builder = ImmutableList.builder();
-    builder.add(new UnresolvedArgument("query",
-            new Literal(StringUtils.unquoteText(ctx.query.getText()), DataType.STRING)));
+    builder.add(
+        new UnresolvedArgument(
+            "query", new Literal(StringUtils.unquoteText(ctx.query.getText()), DataType.STRING)));
     fillRelevanceArgs(ctx.relevanceArg(), builder);
     return builder.build();
   }
 
   private List singleFieldRelevanceArguments(
-        SingleFieldRelevanceFunctionContext ctx) {
+      SingleFieldRelevanceFunctionContext ctx) {
     // all the arguments are defaulted to string values
     // to skip environment resolving and function signature resolving
     ImmutableList.Builder builder = ImmutableList.builder();
-    builder.add(new UnresolvedArgument("field",
-        new QualifiedName(StringUtils.unquoteText(ctx.field.getText()))));
-    builder.add(new UnresolvedArgument("query",
-        new Literal(StringUtils.unquoteText(ctx.query.getText()), DataType.STRING)));
+    builder.add(
+        new UnresolvedArgument(
+            "field", new QualifiedName(StringUtils.unquoteText(ctx.field.getText()))));
+    builder.add(
+        new UnresolvedArgument(
+            "query", new Literal(StringUtils.unquoteText(ctx.query.getText()), DataType.STRING)));
     fillRelevanceArgs(ctx.relevanceArg(), builder);
     return builder.build();
   }
 
-
   private List altSingleFieldRelevanceFunctionArguments(
       AltSingleFieldRelevanceFunctionContext ctx) {
     // all the arguments are defaulted to string values
     // to skip environment resolving and function signature resolving
     ImmutableList.Builder builder = ImmutableList.builder();
-    builder.add(new UnresolvedArgument("field",
-        new QualifiedName(StringUtils.unquoteText(ctx.field.getText()))));
-    builder.add(new UnresolvedArgument("query",
-        new Literal(StringUtils.unquoteText(ctx.query.getText()), DataType.STRING)));
+    builder.add(
+        new UnresolvedArgument(
+            "field", new QualifiedName(StringUtils.unquoteText(ctx.field.getText()))));
+    builder.add(
+        new UnresolvedArgument(
+            "query", new Literal(StringUtils.unquoteText(ctx.query.getText()), DataType.STRING)));
     fillRelevanceArgs(ctx.relevanceArg(), builder);
     return builder.build();
   }
@@ -598,43 +564,41 @@ private List multiFieldRelevanceArguments(
     // all the arguments are defaulted to string values
     // to skip environment resolving and function signature resolving
     ImmutableList.Builder builder = ImmutableList.builder();
-    var fields = new RelevanceFieldList(ctx
-        .getRuleContexts(RelevanceFieldAndWeightContext.class)
-        .stream()
-        .collect(Collectors.toMap(
-            f -> StringUtils.unquoteText(f.field.getText()),
-            f -> (f.weight == null) ? 1F : Float.parseFloat(f.weight.getText()))));
+    var fields =
+        new RelevanceFieldList(
+            ctx.getRuleContexts(RelevanceFieldAndWeightContext.class).stream()
+                .collect(
+                    Collectors.toMap(
+                        f -> StringUtils.unquoteText(f.field.getText()),
+                        f -> (f.weight == null) ? 1F : Float.parseFloat(f.weight.getText()))));
     builder.add(new UnresolvedArgument("fields", fields));
-    builder.add(new UnresolvedArgument("query",
-        new Literal(StringUtils.unquoteText(ctx.query.getText()), DataType.STRING)));
+    builder.add(
+        new UnresolvedArgument(
+            "query", new Literal(StringUtils.unquoteText(ctx.query.getText()), DataType.STRING)));
     fillRelevanceArgs(ctx.relevanceArg(), builder);
     return builder.build();
   }
 
-  private List getFormatFunctionArguments(
-      GetFormatFunctionCallContext ctx) {
-    List args = Arrays.asList(
-        new Literal(ctx.getFormatFunction().getFormatType().getText(), DataType.STRING),
-        visitFunctionArg(ctx.getFormatFunction().functionArg())
-    );
+  private List getFormatFunctionArguments(GetFormatFunctionCallContext ctx) {
+    List args =
+        Arrays.asList(
+            new Literal(ctx.getFormatFunction().getFormatType().getText(), DataType.STRING),
+            visitFunctionArg(ctx.getFormatFunction().functionArg()));
     return args;
   }
 
-  private List timestampFunctionArguments(
-      TimestampFunctionCallContext ctx) {
-    List args = Arrays.asList(
-        new Literal(
-            ctx.timestampFunction().simpleDateTimePart().getText(),
-            DataType.STRING),
-        visitFunctionArg(ctx.timestampFunction().firstArg),
-        visitFunctionArg(ctx.timestampFunction().secondArg)
-    );
+  private List timestampFunctionArguments(TimestampFunctionCallContext ctx) {
+    List args =
+        Arrays.asList(
+            new Literal(ctx.timestampFunction().simpleDateTimePart().getText(), DataType.STRING),
+            visitFunctionArg(ctx.timestampFunction().firstArg),
+            visitFunctionArg(ctx.timestampFunction().secondArg));
     return args;
   }
 
   /**
-   * Adds support for multi_match alternate syntax like
-   * MULTI_MATCH('query'='Dale', 'fields'='*name').
+   * Adds support for multi_match alternate syntax like MULTI_MATCH('query'='Dale',
+   * 'fields'='*name').
    *
    * @param ctx : Context for multi field relevance function.
    * @return : Returns list of all arguments for relevance function.
@@ -646,25 +610,32 @@ private List alternateMultiMatchArguments(
     ImmutableList.Builder builder = ImmutableList.builder();
     Map fieldAndWeightMap = new HashMap<>();
 
-    String[] fieldAndWeights = StringUtils.unquoteText(
-        ctx.getRuleContexts(AlternateMultiMatchFieldContext.class)
-                .stream().findFirst().get().argVal.getText()).split(",");
+    String[] fieldAndWeights =
+        StringUtils.unquoteText(
+                ctx.getRuleContexts(AlternateMultiMatchFieldContext.class).stream()
+                    .findFirst()
+                    .get()
+                    .argVal
+                    .getText())
+            .split(",");
 
     for (var fieldAndWeight : fieldAndWeights) {
       String[] splitFieldAndWeights = fieldAndWeight.split("\\^");
-      fieldAndWeightMap.put(splitFieldAndWeights[0],
+      fieldAndWeightMap.put(
+          splitFieldAndWeights[0],
           splitFieldAndWeights.length > 1 ? Float.parseFloat(splitFieldAndWeights[1]) : 1F);
     }
-    builder.add(new UnresolvedArgument("fields",
-        new RelevanceFieldList(fieldAndWeightMap)));
-
-    ctx.getRuleContexts(AlternateMultiMatchQueryContext.class)
-        .stream().findFirst().ifPresent(
-              arg ->
-                    builder.add(new UnresolvedArgument("query",
+    builder.add(new UnresolvedArgument("fields", new RelevanceFieldList(fieldAndWeightMap)));
+
+    ctx.getRuleContexts(AlternateMultiMatchQueryContext.class).stream()
+        .findFirst()
+        .ifPresent(
+            arg ->
+                builder.add(
+                    new UnresolvedArgument(
+                        "query",
                         new Literal(
-                            StringUtils.unquoteText(arg.argVal.getText()), DataType.STRING)))
-        );
+                            StringUtils.unquoteText(arg.argVal.getText()), DataType.STRING))));
 
     fillRelevanceArgs(ctx.relevanceArg(), builder);
 
@@ -680,18 +651,18 @@ private List altMultiFieldRelevanceFunctionArguments(
     ImmutableList.Builder builder = ImmutableList.builder();
     var fields = new RelevanceFieldList(map);
     builder.add(new UnresolvedArgument("fields", fields));
-    builder.add(new UnresolvedArgument("query",
-        new Literal(StringUtils.unquoteText(ctx.query.getText()), DataType.STRING)));
+    builder.add(
+        new UnresolvedArgument(
+            "query", new Literal(StringUtils.unquoteText(ctx.query.getText()), DataType.STRING)));
     fillRelevanceArgs(ctx.relevanceArg(), builder);
     return builder.build();
   }
 
-  private List getExtractFunctionArguments(
-      ExtractFunctionCallContext ctx) {
-    List args = Arrays.asList(
-        new Literal(ctx.extractFunction().datetimePart().getText(), DataType.STRING),
-        visitFunctionArg(ctx.extractFunction().functionArg())
-    );
+  private List getExtractFunctionArguments(ExtractFunctionCallContext ctx) {
+    List args =
+        Arrays.asList(
+            new Literal(ctx.extractFunction().datetimePart().getText(), DataType.STRING),
+            visitFunctionArg(ctx.extractFunction().functionArg()));
     return args;
   }
 }
diff --git a/sql/src/main/java/org/opensearch/sql/sql/parser/AstHavingFilterBuilder.java b/sql/src/main/java/org/opensearch/sql/sql/parser/AstHavingFilterBuilder.java
index f90ea2f991..94c11d05af 100644
--- a/sql/src/main/java/org/opensearch/sql/sql/parser/AstHavingFilterBuilder.java
+++ b/sql/src/main/java/org/opensearch/sql/sql/parser/AstHavingFilterBuilder.java
@@ -3,7 +3,6 @@
  * SPDX-License-Identifier: Apache-2.0
  */
 
-
 package org.opensearch.sql.sql.parser;
 
 import static org.opensearch.sql.sql.antlr.parser.OpenSearchSQLParser.QualifiedNameContext;
@@ -13,10 +12,9 @@
 import org.opensearch.sql.sql.parser.context.QuerySpecification;
 
 /**
- * AST Having filter builder that builds HAVING clause condition expressions
- * and replace alias by original expression in SELECT clause.
- * The reason for this is it's hard to replace afterwards since UnresolvedExpression
- * is immutable.
+ * AST Having filter builder that builds HAVING clause condition expressions and replace alias by
+ * original expression in SELECT clause. The reason for this is it's hard to replace afterwards
+ * since UnresolvedExpression is immutable.
  */
 @RequiredArgsConstructor
 public class AstHavingFilterBuilder extends AstExpressionBuilder {
@@ -34,5 +32,4 @@ private UnresolvedExpression replaceAlias(UnresolvedExpression expr) {
     }
     return expr;
   }
-
 }
diff --git a/sql/src/main/java/org/opensearch/sql/sql/parser/AstSortBuilder.java b/sql/src/main/java/org/opensearch/sql/sql/parser/AstSortBuilder.java
index 1b872dce54..2594709f4f 100644
--- a/sql/src/main/java/org/opensearch/sql/sql/parser/AstSortBuilder.java
+++ b/sql/src/main/java/org/opensearch/sql/sql/parser/AstSortBuilder.java
@@ -3,7 +3,6 @@
  * SPDX-License-Identifier: Apache-2.0
  */
 
-
 package org.opensearch.sql.sql.parser;
 
 import static org.opensearch.sql.ast.dsl.AstDSL.booleanLiteral;
@@ -27,8 +26,8 @@
 import org.opensearch.sql.sql.parser.context.QuerySpecification;
 
 /**
- * AST sort builder that builds Sort AST node from ORDER BY clause. During this process, the item
- * in order by may be replaced by item in project list if it's an alias or ordinal. This is same as
+ * AST sort builder that builds Sort AST node from ORDER BY clause. During this process, the item in
+ * order by may be replaced by item in project list if it's an alias or ordinal. This is same as
  * GROUP BY building process.
  */
 @RequiredArgsConstructor
@@ -38,9 +37,7 @@ public class AstSortBuilder extends OpenSearchSQLParserBaseVisitor createSortFields() {
@@ -57,8 +54,8 @@ private List createSortFields() {
   }
 
   /**
-   * Argument "asc" is required.
-   * Argument "nullFirst" is optional and determined by Analyzer later if absent.
+   * Argument "asc" is required. Argument "nullFirst" is optional and determined by Analyzer later
+   * if absent.
    */
   private List createSortArguments(SortOption option) {
     SortOrder sortOrder = option.getSortOrder();
@@ -71,5 +68,4 @@ private List createSortArguments(SortOption option) {
     }
     return args.build();
   }
-
 }
diff --git a/sql/src/main/java/org/opensearch/sql/sql/parser/ParserUtils.java b/sql/src/main/java/org/opensearch/sql/sql/parser/ParserUtils.java
index 947dca51b9..3c60d43733 100644
--- a/sql/src/main/java/org/opensearch/sql/sql/parser/ParserUtils.java
+++ b/sql/src/main/java/org/opensearch/sql/sql/parser/ParserUtils.java
@@ -3,7 +3,6 @@
  * SPDX-License-Identifier: Apache-2.0
  */
 
-
 package org.opensearch.sql.sql.parser;
 
 import static org.opensearch.sql.ast.tree.Sort.NullOrder;
@@ -16,33 +15,24 @@
 import org.antlr.v4.runtime.Token;
 import org.antlr.v4.runtime.tree.TerminalNode;
 
-/**
- * Parser Utils Class.
- */
+/** Parser Utils Class. */
 @UtilityClass
 public class ParserUtils {
 
-  /**
-   * Get original text in query.
-   */
+  /** Get original text in query. */
   public static String getTextInQuery(ParserRuleContext ctx, String queryString) {
     Token start = ctx.getStart();
     Token stop = ctx.getStop();
     return queryString.substring(start.getStartIndex(), stop.getStopIndex() + 1);
   }
 
-  /**
-   * Create sort option from syntax tree node.
-   */
+  /** Create sort option from syntax tree node. */
   public static SortOption createSortOption(OrderByElementContext orderBy) {
     return new SortOption(
-        createSortOrder(orderBy.order),
-        createNullOrder(orderBy.FIRST(), orderBy.LAST()));
+        createSortOrder(orderBy.order), createNullOrder(orderBy.FIRST(), orderBy.LAST()));
   }
 
-  /**
-   * Create sort order for sort option use from ASC/DESC token.
-   */
+  /** Create sort order for sort option use from ASC/DESC token. */
   public static SortOrder createSortOrder(Token ctx) {
     if (ctx == null) {
       return null;
@@ -50,9 +40,7 @@ public static SortOrder createSortOrder(Token ctx) {
     return SortOrder.valueOf(ctx.getText().toUpperCase());
   }
 
-  /**
-   * Create null order for sort option use from FIRST/LAST token.
-   */
+  /** Create null order for sort option use from FIRST/LAST token. */
   public static NullOrder createNullOrder(TerminalNode first, TerminalNode last) {
     if (first != null) {
       return NullOrder.NULL_FIRST;
@@ -62,5 +50,4 @@ public static NullOrder createNullOrder(TerminalNode first, TerminalNode last) {
       return null;
     }
   }
-
 }
diff --git a/sql/src/main/java/org/opensearch/sql/sql/parser/context/ParsingContext.java b/sql/src/main/java/org/opensearch/sql/sql/parser/context/ParsingContext.java
index 33b313367d..297fdfd749 100644
--- a/sql/src/main/java/org/opensearch/sql/sql/parser/context/ParsingContext.java
+++ b/sql/src/main/java/org/opensearch/sql/sql/parser/context/ParsingContext.java
@@ -3,21 +3,20 @@
  * SPDX-License-Identifier: Apache-2.0
  */
 
-
 package org.opensearch.sql.sql.parser.context;
 
 import java.util.ArrayDeque;
 import java.util.Deque;
 
 /**
- * SQL parsing context that maintains stack of query specifications for nested queries.
- * Currently this is just a thin wrapper by a stack.
+ * SQL parsing context that maintains stack of query specifications for nested queries. Currently
+ * this is just a thin wrapper by a stack.
  */
 public class ParsingContext {
 
   /**
-   * Use stack rather than linked query specification because there is no need
-   * to look up through the stack.
+   * Use stack rather than linked query specification because there is no need to look up through
+   * the stack.
    */
   private final Deque contexts = new ArrayDeque<>();
 
@@ -31,10 +30,10 @@ public QuerySpecification peek() {
 
   /**
    * Pop up query context.
-   * @return  query context after popup.
+   *
+   * @return query context after popup.
    */
   public QuerySpecification pop() {
     return contexts.pop();
   }
-
 }
diff --git a/sql/src/main/java/org/opensearch/sql/sql/parser/context/QuerySpecification.java b/sql/src/main/java/org/opensearch/sql/sql/parser/context/QuerySpecification.java
index 21dddde2b9..abcd4f2073 100644
--- a/sql/src/main/java/org/opensearch/sql/sql/parser/context/QuerySpecification.java
+++ b/sql/src/main/java/org/opensearch/sql/sql/parser/context/QuerySpecification.java
@@ -3,7 +3,6 @@
  * SPDX-License-Identifier: Apache-2.0
  */
 
-
 package org.opensearch.sql.sql.parser.context;
 
 import static org.opensearch.sql.sql.antlr.parser.OpenSearchSQLParser.FilteredAggregationFunctionCallContext;
@@ -42,6 +41,7 @@
 
 /**
  * Query specification domain that collects basic info for a simple query.
+ *
  * 
  * (I) What is the impact of this new abstraction?
  *  This abstraction and collecting process turns AST building process into two phases:
@@ -61,10 +61,9 @@
 @ToString
 public class QuerySpecification {
 
-  /**
-   * Items in SELECT clause and mapping from alias to select item.
-   */
+  /** Items in SELECT clause and mapping from alias to select item. */
   private final List selectItems = new ArrayList<>();
+
   private final Map selectItemsByAlias = new HashMap<>();
 
   /**
@@ -74,31 +73,30 @@ public class QuerySpecification {
   private final Set aggregators = new LinkedHashSet<>();
 
   /**
-   * Items in GROUP BY clause that may be:
-   *  1) Simple field name
-   *  2) Field nested in scalar function call
-   *  3) Ordinal that points to expression in SELECT
-   *  4) Alias that points to expression in SELECT.
+   * Items in GROUP BY clause that may be: 1) Simple field name 2) Field nested in scalar function
+   * call 3) Ordinal that points to expression in SELECT 4) Alias that points to expression in
+   * SELECT.
    */
   private final List groupByItems = new ArrayList<>();
 
-  /**
-   * Items in ORDER BY clause that may be different forms as above and its options.
-   */
+  /** Items in ORDER BY clause that may be different forms as above and its options. */
   private final List orderByItems = new ArrayList<>();
+
   private final List orderByOptions = new ArrayList<>();
 
   /**
    * Collect all query information in the parse tree excluding info in sub-query).
-   * @param query   query spec node in parse tree
+   *
+   * @param query query spec node in parse tree
    */
   public void collect(QuerySpecificationContext query, String queryString) {
     query.accept(new QuerySpecificationCollector(queryString));
   }
 
   /**
-   * Replace unresolved expression if it's an alias or ordinal that represents
-   * an actual expression in SELECT list.
+   * Replace unresolved expression if it's an alias or ordinal that represents an actual expression
+   * in SELECT list.
+   *
    * @param expr item to be replaced
    * @return select item that the given expr represents
    */
@@ -118,8 +116,8 @@ private boolean isIntegerLiteral(UnresolvedExpression expr) {
     }
 
     if (((Literal) expr).getType() != DataType.INTEGER) {
-      throw new SemanticCheckException(StringUtils.format(
-          "Non-integer constant [%s] found in ordinal", expr));
+      throw new SemanticCheckException(
+          StringUtils.format("Non-integer constant [%s] found in ordinal", expr));
     }
     return true;
   }
@@ -127,25 +125,26 @@ private boolean isIntegerLiteral(UnresolvedExpression expr) {
   private UnresolvedExpression getSelectItemByOrdinal(UnresolvedExpression expr) {
     int ordinal = (Integer) ((Literal) expr).getValue();
     if (ordinal <= 0 || ordinal > selectItems.size()) {
-      throw new SemanticCheckException(StringUtils.format(
-          "Ordinal [%d] is out of bound of select item list", ordinal));
+      throw new SemanticCheckException(
+          StringUtils.format("Ordinal [%d] is out of bound of select item list", ordinal));
     }
     return selectItems.get(ordinal - 1);
   }
 
   /**
    * Check if an expression is a select alias.
-   * @param expr  expression
+   *
+   * @param expr expression
    * @return true if it's an alias
    */
   public boolean isSelectAlias(UnresolvedExpression expr) {
-    return (expr instanceof QualifiedName)
-        && (selectItemsByAlias.containsKey(expr.toString()));
+    return (expr instanceof QualifiedName) && (selectItemsByAlias.containsKey(expr.toString()));
   }
 
   /**
    * Get original expression aliased in SELECT clause.
-   * @param expr  alias
+   *
+   * @param expr alias
    * @return expression in SELECT
    */
   public UnresolvedExpression getSelectItemByAlias(UnresolvedExpression expr) {
@@ -223,8 +222,7 @@ public Void visitAggregateFunctionCall(AggregateFunctionCallContext ctx) {
     @Override
     public Void visitFilteredAggregationFunctionCall(FilteredAggregationFunctionCallContext ctx) {
       UnresolvedExpression aggregateFunction = visitAstExpression(ctx);
-      aggregators.add(
-          AstDSL.alias(getTextInQuery(ctx, queryString), aggregateFunction));
+      aggregators.add(AstDSL.alias(getTextInQuery(ctx, queryString), aggregateFunction));
       return super.visitFilteredAggregationFunctionCall(ctx);
     }
 
@@ -236,5 +234,4 @@ private UnresolvedExpression visitAstExpression(ParseTree tree) {
       return expressionBuilder.visit(tree);
     }
   }
-
 }
diff --git a/sql/src/test/java/org/opensearch/sql/common/antlr/SyntaxParserTestBase.java b/sql/src/test/java/org/opensearch/sql/common/antlr/SyntaxParserTestBase.java
index 63d7666c62..87f2083774 100644
--- a/sql/src/test/java/org/opensearch/sql/common/antlr/SyntaxParserTestBase.java
+++ b/sql/src/test/java/org/opensearch/sql/common/antlr/SyntaxParserTestBase.java
@@ -7,16 +7,14 @@
 import lombok.Getter;
 import lombok.RequiredArgsConstructor;
 
-/**
- * A base class for tests for SQL or PPL parser.
- */
+/** A base class for tests for SQL or PPL parser. */
 @RequiredArgsConstructor(access = AccessLevel.PROTECTED)
 public abstract class SyntaxParserTestBase {
-  @Getter
-  private final Parser parser;
+  @Getter private final Parser parser;
 
   /**
    * A helper function that fails a test if the parser rejects a given query.
+   *
    * @param query Query to test.
    */
   protected void acceptQuery(String query) {
@@ -25,6 +23,7 @@ protected void acceptQuery(String query) {
 
   /**
    * A helper function that fails a test if the parser accepts a given query.
+   *
    * @param query Query to test.
    */
   protected void rejectQuery(String query) {
diff --git a/sql/src/test/java/org/opensearch/sql/sql/SQLServiceTest.java b/sql/src/test/java/org/opensearch/sql/sql/SQLServiceTest.java
index f4342d877d..8cb2994dc3 100644
--- a/sql/src/test/java/org/opensearch/sql/sql/SQLServiceTest.java
+++ b/sql/src/test/java/org/opensearch/sql/sql/SQLServiceTest.java
@@ -3,7 +3,6 @@
  * SPDX-License-Identifier: Apache-2.0
  */
 
-
 package org.opensearch.sql.sql;
 
 import static org.junit.jupiter.api.Assertions.assertEquals;
@@ -45,14 +44,13 @@ class SQLServiceTest {
 
   private DefaultQueryManager queryManager;
 
-  @Mock
-  private QueryService queryService;
+  @Mock private QueryService queryService;
 
   @BeforeEach
   public void setUp() {
     queryManager = DefaultQueryManager.defaultQueryManager();
-    sqlService = new SQLService(new SQLSyntaxParser(), queryManager,
-        new QueryPlanFactory(queryService));
+    sqlService =
+        new SQLService(new SQLSyntaxParser(), queryManager, new QueryPlanFactory(queryService));
   }
 
   @AfterEach
@@ -97,8 +95,8 @@ public void onFailure(Exception e) {
   @Test
   public void can_execute_close_cursor_query() {
     sqlService.execute(
-        new SQLQueryRequest(new JSONObject(), null, QUERY + "/close",
-            Map.of("format", "jdbc"), "n:cursor"),
+        new SQLQueryRequest(
+            new JSONObject(), null, QUERY + "/close", Map.of("format", "jdbc"), "n:cursor"),
         new ResponseListener<>() {
           @Override
           public void onResponse(QueryResponse response) {
@@ -131,13 +129,17 @@ public void onFailure(Exception e) {
 
   @Test
   public void can_explain_sql_query() {
-    doAnswer(invocation -> {
-      ResponseListener listener = invocation.getArgument(1);
-      listener.onResponse(new ExplainResponse(new ExplainResponseNode("Test")));
-      return null;
-    }).when(queryService).explain(any(), any());
+    doAnswer(
+            invocation -> {
+              ResponseListener listener = invocation.getArgument(1);
+              listener.onResponse(new ExplainResponse(new ExplainResponseNode("Test")));
+              return null;
+            })
+        .when(queryService)
+        .explain(any(), any());
 
-    sqlService.explain(new SQLQueryRequest(new JSONObject(), "SELECT 123", EXPLAIN, "csv"),
+    sqlService.explain(
+        new SQLQueryRequest(new JSONObject(), "SELECT 123", EXPLAIN, "csv"),
         new ResponseListener() {
           @Override
           public void onResponse(ExplainResponse response) {
@@ -153,8 +155,8 @@ public void onFailure(Exception e) {
 
   @Test
   public void cannot_explain_cursor_query() {
-    sqlService.explain(new SQLQueryRequest(new JSONObject(), null, EXPLAIN,
-            Map.of("format", "jdbc"), "n:cursor"),
+    sqlService.explain(
+        new SQLQueryRequest(new JSONObject(), null, EXPLAIN, Map.of("format", "jdbc"), "n:cursor"),
         new ResponseListener() {
           @Override
           public void onResponse(ExplainResponse response) {
@@ -163,8 +165,10 @@ public void onResponse(ExplainResponse response) {
 
           @Override
           public void onFailure(Exception e) {
-            assertEquals("Explain of a paged query continuation is not supported."
-                + " Use `explain` for the initial query request.", e.getMessage());
+            assertEquals(
+                "Explain of a paged query continuation is not supported."
+                    + " Use `explain` for the initial query request.",
+                e.getMessage());
           }
         });
   }
diff --git a/sql/src/test/java/org/opensearch/sql/sql/antlr/BracketedTimestampTest.java b/sql/src/test/java/org/opensearch/sql/sql/antlr/BracketedTimestampTest.java
index 0f7a284aa7..120cd233fc 100644
--- a/sql/src/test/java/org/opensearch/sql/sql/antlr/BracketedTimestampTest.java
+++ b/sql/src/test/java/org/opensearch/sql/sql/antlr/BracketedTimestampTest.java
@@ -3,7 +3,6 @@
  * SPDX-License-Identifier: Apache-2.0
  */
 
-
 package org.opensearch.sql.sql.antlr;
 
 import org.junit.jupiter.api.Test;
diff --git a/sql/src/test/java/org/opensearch/sql/sql/antlr/HighlightTest.java b/sql/src/test/java/org/opensearch/sql/sql/antlr/HighlightTest.java
index 6826a37c0b..ae1e418357 100644
--- a/sql/src/test/java/org/opensearch/sql/sql/antlr/HighlightTest.java
+++ b/sql/src/test/java/org/opensearch/sql/sql/antlr/HighlightTest.java
@@ -15,14 +15,14 @@ void single_field_test() {
 
   @Test
   void multiple_highlights_test() {
-    acceptQuery("SELECT HIGHLIGHT(Tags), HIGHLIGHT(Body) FROM Index "
-        + "WHERE MULTI_MATCH([Tags, Body], 'Time')");
+    acceptQuery(
+        "SELECT HIGHLIGHT(Tags), HIGHLIGHT(Body) FROM Index "
+            + "WHERE MULTI_MATCH([Tags, Body], 'Time')");
   }
 
   @Test
   void wildcard_test() {
-    acceptQuery("SELECT HIGHLIGHT('T*') FROM Index "
-        + "WHERE MULTI_MATCH([Tags, Body], 'Time')");
+    acceptQuery("SELECT HIGHLIGHT('T*') FROM Index " + "WHERE MULTI_MATCH([Tags, Body], 'Time')");
   }
 
   @Test
@@ -33,13 +33,12 @@ void highlight_all_test() {
 
   @Test
   void multiple_parameters_failure_test() {
-    rejectQuery("SELECT HIGHLIGHT(Tags1, Tags2) FROM Index "
-        + "WHERE MULTI_MATCH([Tags, Body], 'Time')");
+    rejectQuery(
+        "SELECT HIGHLIGHT(Tags1, Tags2) FROM Index " + "WHERE MULTI_MATCH([Tags, Body], 'Time')");
   }
 
   @Test
   void no_parameters_failure_test() {
-    rejectQuery("SELECT HIGHLIGHT() FROM Index "
-        + "WHERE MULTI_MATCH([Tags, Body], 'Time')");
+    rejectQuery("SELECT HIGHLIGHT() FROM Index " + "WHERE MULTI_MATCH([Tags, Body], 'Time')");
   }
 }
diff --git a/sql/src/test/java/org/opensearch/sql/sql/antlr/MatchBoolPrefixParserTest.java b/sql/src/test/java/org/opensearch/sql/sql/antlr/MatchBoolPrefixParserTest.java
index 66c4d5be9d..db5ce18edb 100644
--- a/sql/src/test/java/org/opensearch/sql/sql/antlr/MatchBoolPrefixParserTest.java
+++ b/sql/src/test/java/org/opensearch/sql/sql/antlr/MatchBoolPrefixParserTest.java
@@ -25,14 +25,13 @@ static Stream generateValidArguments() {
         new String("max_expansions=50"),
         new String("fuzzy_transpositions=true"),
         new String("fuzzy_rewrite=constant_score"),
-        new String("boost=1")
-    );
+        new String("boost=1"));
   }
 
   @ParameterizedTest
   @MethodSource("generateValidArguments")
   public void testValidArguments(String arg) {
-    acceptQuery("SELECT * FROM T WHERE MATCH_BOOL_PREFIX(message, 'query', " + arg  + ")");
+    acceptQuery("SELECT * FROM T WHERE MATCH_BOOL_PREFIX(message, 'query', " + arg + ")");
   }
 
   @Test
diff --git a/sql/src/test/java/org/opensearch/sql/sql/antlr/SQLParserTest.java b/sql/src/test/java/org/opensearch/sql/sql/antlr/SQLParserTest.java
index 3f323725ab..db091a4932 100644
--- a/sql/src/test/java/org/opensearch/sql/sql/antlr/SQLParserTest.java
+++ b/sql/src/test/java/org/opensearch/sql/sql/antlr/SQLParserTest.java
@@ -3,7 +3,6 @@
  * SPDX-License-Identifier: Apache-2.0
  */
 
-
 package org.opensearch.sql.sql.antlr;
 
 import org.opensearch.sql.common.antlr.SyntaxParserTestBase;
diff --git a/sql/src/test/java/org/opensearch/sql/sql/antlr/SQLSyntaxParserTest.java b/sql/src/test/java/org/opensearch/sql/sql/antlr/SQLSyntaxParserTest.java
index 0dbdd6c36f..a1a6923bf1 100644
--- a/sql/src/test/java/org/opensearch/sql/sql/antlr/SQLSyntaxParserTest.java
+++ b/sql/src/test/java/org/opensearch/sql/sql/antlr/SQLSyntaxParserTest.java
@@ -3,7 +3,6 @@
  * SPDX-License-Identifier: Apache-2.0
  */
 
-
 package org.opensearch.sql.sql.antlr;
 
 import static org.junit.jupiter.api.Assertions.assertNotNull;
@@ -72,8 +71,7 @@ public void canParseHiddenIndexName() {
 
   @Test
   public void canNotParseIndexNameWithSpecialChar() {
-    assertThrows(SyntaxCheckException.class,
-        () -> parser.parse("SELECT * FROM hello+world"));
+    assertThrows(SyntaxCheckException.class, () -> parser.parse("SELECT * FROM hello+world"));
   }
 
   @Test
@@ -83,14 +81,12 @@ public void canParseIndexNameWithSpecialCharQuoted() {
 
   @Test
   public void canNotParseIndexNameStartingWithNumber() {
-    assertThrows(SyntaxCheckException.class,
-        () -> parser.parse("SELECT * FROM 123test"));
+    assertThrows(SyntaxCheckException.class, () -> parser.parse("SELECT * FROM 123test"));
   }
 
   @Test
   public void canNotParseIndexNameSingleQuoted() {
-    assertThrows(SyntaxCheckException.class,
-        () -> parser.parse("SELECT * FROM 'test'"));
+    assertThrows(SyntaxCheckException.class, () -> parser.parse("SELECT * FROM 'test'"));
   }
 
   @Test
@@ -100,14 +96,15 @@ public void canParseWhereClause() {
 
   @Test
   public void canParseSelectClauseWithLogicalOperator() {
-    assertNotNull(parser.parse(
-        "SELECT age = 10 AND name = 'John' OR NOT (balance > 1000) FROM test"));
+    assertNotNull(
+        parser.parse("SELECT age = 10 AND name = 'John' OR NOT (balance > 1000) FROM test"));
   }
 
   @Test
   public void canParseWhereClauseWithLogicalOperator() {
-    assertNotNull(parser.parse("SELECT name FROM test "
-        + "WHERE age = 10 AND name = 'John' OR NOT (balance > 1000)"));
+    assertNotNull(
+        parser.parse(
+            "SELECT name FROM test " + "WHERE age = 10 AND name = 'John' OR NOT (balance > 1000)"));
   }
 
   @Test
@@ -127,9 +124,11 @@ public void canParseDistinctClause() {
   @Test
   public void canParseCaseStatement() {
     assertNotNull(parser.parse("SELECT CASE WHEN age > 30 THEN 'age1' ELSE 'age2' END FROM test"));
-    assertNotNull(parser.parse("SELECT CASE WHEN age > 30 THEN 'age1' "
-                                        + " WHEN age < 50 THEN 'age2' "
-                                        + " ELSE 'age3' END FROM test"));
+    assertNotNull(
+        parser.parse(
+            "SELECT CASE WHEN age > 30 THEN 'age1' "
+                + " WHEN age < 50 THEN 'age2' "
+                + " ELSE 'age3' END FROM test"));
     assertNotNull(parser.parse("SELECT CASE age WHEN 30 THEN 'age1' ELSE 'age2' END FROM test"));
     assertNotNull(parser.parse("SELECT CASE age WHEN 30 THEN 'age1' END FROM test"));
   }
@@ -146,10 +145,11 @@ public void canNotParseAggregateFunctionWithWrongArgument() {
   public void canParseOrderByClause() {
     assertNotNull(parser.parse("SELECT name, age FROM test ORDER BY name, age"));
     assertNotNull(parser.parse("SELECT name, age FROM test ORDER BY name ASC, age DESC"));
-    assertNotNull(parser.parse(
-        "SELECT name, age FROM test ORDER BY name NULLS LAST, age NULLS FIRST"));
-    assertNotNull(parser.parse(
-        "SELECT name, age FROM test ORDER BY name ASC NULLS FIRST, age DESC NULLS LAST"));
+    assertNotNull(
+        parser.parse("SELECT name, age FROM test ORDER BY name NULLS LAST, age NULLS FIRST"));
+    assertNotNull(
+        parser.parse(
+            "SELECT name, age FROM test ORDER BY name ASC NULLS FIRST, age DESC NULLS LAST"));
   }
 
   @Test
@@ -170,8 +170,7 @@ private static Stream nowLikeFunctionsData() {
         Arguments.of("current_date", false, true),
         Arguments.of("utc_date", false, true),
         Arguments.of("utc_time", false, true),
-        Arguments.of("utc_timestamp", false, true)
-    );
+        Arguments.of("utc_timestamp", false, true));
   }
 
   private static Stream getPartForExtractFunction() {
@@ -195,8 +194,7 @@ private static Stream getPartForExtractFunction() {
         Arguments.of("DAY_SECOND"),
         Arguments.of("DAY_MINUTE"),
         Arguments.of("DAY_HOUR"),
-        Arguments.of("YEAR_MONTH")
-    );
+        Arguments.of("YEAR_MONTH"));
   }
 
   @ParameterizedTest(name = "{0}")
@@ -206,11 +204,7 @@ public void can_parse_extract_function(String part) {
   }
 
   private static Stream getInvalidPartForExtractFunction() {
-    return Stream.of(
-        Arguments.of("INVALID"),
-        Arguments.of("\"SECOND\""),
-        Arguments.of("123")
-    );
+    return Stream.of(Arguments.of("INVALID"), Arguments.of("\"SECOND\""), Arguments.of("123"));
   }
 
   @ParameterizedTest(name = "{0}")
@@ -230,9 +224,12 @@ public void can_parse_weekday_function() {
   @ParameterizedTest(name = "{0}")
   @MethodSource("nowLikeFunctionsData")
   public void can_parse_now_like_functions(String name, Boolean hasFsp, Boolean hasShortcut) {
-    var calls = new ArrayList() {{
-        add(name + "()");
-      }};
+    var calls =
+        new ArrayList() {
+          {
+            add(name + "()");
+          }
+        };
     if (hasShortcut) {
       calls.add(name);
     }
@@ -269,8 +266,7 @@ public void can_parse_get_format_function(String type, String format) {
   @Test
   public void cannot_parse_get_format_function_with_bad_arg() {
     assertThrows(
-        SyntaxCheckException.class,
-        () -> parser.parse("GET_FORMAT(NONSENSE_ARG,'INTERNAL')"));
+        SyntaxCheckException.class, () -> parser.parse("GET_FORMAT(NONSENSE_ARG,'INTERNAL')"));
   }
 
   @Test
@@ -290,7 +286,7 @@ public void can_parse_dayofmonth_functions() {
     assertNotNull(parser.parse("SELECT dayofmonth('2022-11-18')"));
     assertNotNull(parser.parse("SELECT day_of_month('2022-11-18')"));
   }
-    
+
   @Test
   public void can_parse_day_of_week_functions() {
     assertNotNull(parser.parse("SELECT dayofweek('2022-11-18')"));
@@ -325,53 +321,55 @@ public void can_parse_month_of_year_function() {
 
     assertNotNull(parser.parse("SELECT month(timestamp('2022-11-18 00:00:00'))"));
     assertNotNull(parser.parse("SELECT month_of_year(timestamp('2022-11-18 00:00:00'))"));
-
   }
 
   @Test
   public void can_parse_multi_match_relevance_function() {
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE multimatch(\"fields\"=\"field\", query=\"query\")"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE multimatchquery(fields=\"field\", \"query\"=\"query\")"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE multi_match(\"fields\"=\"field\", \"query\"=\"query\")"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE multi_match(\'fields\'=\'field\', \'query\'=\'query\')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE multi_match(fields=\'field\', query=\'query\')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE multi_match(['address'], 'query')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE multi_match(['address', 'notes'], 'query')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE multi_match([\"*\"], 'query')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE multi_match([\"address\"], 'query')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE multi_match([`address`], 'query')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE multi_match([address], 'query')"));
-
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE"
-            + " multi_match(['address' ^ 1.0, 'notes' ^ 2.2], 'query')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE multi_match(['address' ^ 1.1, 'notes'], 'query')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE multi_match(['address', 'notes' ^ 1.5], 'query')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE multi_match(['address', 'notes' 3], 'query')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE multi_match(['address' ^ .3, 'notes' 3], 'query')"));
-
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE"
-            + " multi_match([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE"
-            + " multi_match([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query', analyzer=keyword,"
-            + "operator='AND', tie_breaker=0.3, type = \"most_fields\", fuzziness = \"AUTO\")"));
+    assertNotNull(
+        parser.parse(
+            "SELECT id FROM test WHERE multimatch(\"fields\"=\"field\", query=\"query\")"));
+    assertNotNull(
+        parser.parse(
+            "SELECT id FROM test WHERE multimatchquery(fields=\"field\", \"query\"=\"query\")"));
+    assertNotNull(
+        parser.parse(
+            "SELECT id FROM test WHERE multi_match(\"fields\"=\"field\", \"query\"=\"query\")"));
+    assertNotNull(
+        parser.parse(
+            "SELECT id FROM test WHERE multi_match(\'fields\'=\'field\', \'query\'=\'query\')"));
+    assertNotNull(
+        parser.parse("SELECT id FROM test WHERE multi_match(fields=\'field\', query=\'query\')"));
+    assertNotNull(parser.parse("SELECT id FROM test WHERE multi_match(['address'], 'query')"));
+    assertNotNull(
+        parser.parse("SELECT id FROM test WHERE multi_match(['address', 'notes'], 'query')"));
+    assertNotNull(parser.parse("SELECT id FROM test WHERE multi_match([\"*\"], 'query')"));
+    assertNotNull(parser.parse("SELECT id FROM test WHERE multi_match([\"address\"], 'query')"));
+    assertNotNull(parser.parse("SELECT id FROM test WHERE multi_match([`address`], 'query')"));
+    assertNotNull(parser.parse("SELECT id FROM test WHERE multi_match([address], 'query')"));
+
+    assertNotNull(
+        parser.parse(
+            "SELECT id FROM test WHERE"
+                + " multi_match(['address' ^ 1.0, 'notes' ^ 2.2], 'query')"));
+    assertNotNull(
+        parser.parse("SELECT id FROM test WHERE multi_match(['address' ^ 1.1, 'notes'], 'query')"));
+    assertNotNull(
+        parser.parse("SELECT id FROM test WHERE multi_match(['address', 'notes' ^ 1.5], 'query')"));
+    assertNotNull(
+        parser.parse("SELECT id FROM test WHERE multi_match(['address', 'notes' 3], 'query')"));
+    assertNotNull(
+        parser.parse(
+            "SELECT id FROM test WHERE multi_match(['address' ^ .3, 'notes' 3], 'query')"));
+
+    assertNotNull(
+        parser.parse(
+            "SELECT id FROM test WHERE"
+                + " multi_match([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query')"));
+    assertNotNull(
+        parser.parse(
+            "SELECT id FROM test WHERE multi_match([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query',"
+                + " analyzer=keyword,operator='AND', tie_breaker=0.3, type = \"most_fields\","
+                + " fuzziness = \"AUTO\")"));
   }
 
   @Test
@@ -384,160 +382,137 @@ public void can_parse_second_functions() {
 
   @Test
   public void can_parse_simple_query_string_relevance_function() {
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE simple_query_string(['address'], 'query')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE simple_query_string(['address', 'notes'], 'query')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE simple_query_string([\"*\"], 'query')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE simple_query_string([\"address\"], 'query')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE simple_query_string([`address`], 'query')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE simple_query_string([address], 'query')"));
-
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE"
-            + " simple_query_string(['address' ^ 1.0, 'notes' ^ 2.2], 'query')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE simple_query_string(['address' ^ 1.1, 'notes'], 'query')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE simple_query_string(['address', 'notes' ^ 1.5], 'query')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE simple_query_string(['address', 'notes' 3], 'query')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE simple_query_string(['address' ^ .3, 'notes' 3], 'query')"));
-
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE"
-            + " simple_query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE"
-            + " simple_query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query', analyzer=keyword,"
-            + "flags='AND', quote_field_suffix=\".exact\", fuzzy_prefix_length = 4)"));
+    assertNotNull(
+        parser.parse("SELECT id FROM test WHERE simple_query_string(['address'], 'query')"));
+    assertNotNull(
+        parser.parse(
+            "SELECT id FROM test WHERE simple_query_string(['address', 'notes'], 'query')"));
+    assertNotNull(parser.parse("SELECT id FROM test WHERE simple_query_string([\"*\"], 'query')"));
+    assertNotNull(
+        parser.parse("SELECT id FROM test WHERE simple_query_string([\"address\"], 'query')"));
+    assertNotNull(
+        parser.parse("SELECT id FROM test WHERE simple_query_string([`address`], 'query')"));
+    assertNotNull(
+        parser.parse("SELECT id FROM test WHERE simple_query_string([address], 'query')"));
+
+    assertNotNull(
+        parser.parse(
+            "SELECT id FROM test WHERE"
+                + " simple_query_string(['address' ^ 1.0, 'notes' ^ 2.2], 'query')"));
+    assertNotNull(
+        parser.parse(
+            "SELECT id FROM test WHERE simple_query_string(['address' ^ 1.1, 'notes'], 'query')"));
+    assertNotNull(
+        parser.parse(
+            "SELECT id FROM test WHERE simple_query_string(['address', 'notes' ^ 1.5], 'query')"));
+    assertNotNull(
+        parser.parse(
+            "SELECT id FROM test WHERE simple_query_string(['address', 'notes' 3], 'query')"));
+    assertNotNull(
+        parser.parse(
+            "SELECT id FROM test WHERE simple_query_string(['address' ^ .3, 'notes' 3], 'query')"));
+
+    assertNotNull(
+        parser.parse(
+            "SELECT id FROM test WHERE"
+                + " simple_query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query')"));
+    assertNotNull(
+        parser.parse(
+            "SELECT id FROM test WHERE simple_query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2],"
+                + " 'query', analyzer=keyword,flags='AND', quote_field_suffix=\".exact\","
+                + " fuzzy_prefix_length = 4)"));
   }
 
   @Test
   public void can_parse_str_to_date() {
-    assertNotNull(parser.parse(
-        "SELECT STR_TO_DATE('01,5,2013','%d,%m,%Y')"
-    ));
+    assertNotNull(parser.parse("SELECT STR_TO_DATE('01,5,2013','%d,%m,%Y')"));
 
-    assertNotNull(parser.parse(
-        "SELECT STR_TO_DATE('a09:30:17','a%h:%i:%s')"
-    ));
+    assertNotNull(parser.parse("SELECT STR_TO_DATE('a09:30:17','a%h:%i:%s')"));
 
-    assertNotNull(parser.parse(
-        "SELECT STR_TO_DATE('abc','abc');"
-    ));
+    assertNotNull(parser.parse("SELECT STR_TO_DATE('abc','abc');"));
   }
 
   @Test
   public void can_parse_query_string_relevance_function() {
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE query_string(['*'], 'query')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE query_string(['address'], 'query')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE query_string(['add*'], 'query')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE query_string(['*ess'], 'query')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE query_string(['address', 'notes'], 'query')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE query_string([\"*\"], 'query')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE query_string([\"address\"], 'query')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE query_string([\"ad*\"], 'query')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE query_string([\"*s\"], 'query')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE query_string([\"address\", \"notes\"], 'query')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE query_string([`*`], 'query')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE query_string([`address`], 'query')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE query_string([`ad*`], 'query')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE query_string([`*ss`], 'query')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE query_string([`address`, `notes`], 'query')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE query_string([address], 'query')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE query_string([addr*], 'query')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE query_string([*ss], 'query')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE query_string([address, notes], 'query')"));
-
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE"
-            + " query_string(['address' ^ 1.0, 'notes' ^ 2.2], 'query')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE query_string(['address' ^ 1.1, 'notes'], 'query')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE query_string(['address', 'notes' ^ 1.5], 'query')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE query_string(['address', 'notes' 3], 'query')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE query_string(['address' ^ .3, 'notes' 3], 'query')"));
-
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE"
-            + " query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query')"));
-    assertNotNull(parser.parse(
-        "SELECT id FROM test WHERE"
-            + " query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query', analyzer=keyword,"
-            + "operator='AND', tie_breaker=0.3, type = \"most_fields\", fuzziness = 4)"));
-  }
+    assertNotNull(parser.parse("SELECT id FROM test WHERE query_string(['*'], 'query')"));
+    assertNotNull(parser.parse("SELECT id FROM test WHERE query_string(['address'], 'query')"));
+    assertNotNull(parser.parse("SELECT id FROM test WHERE query_string(['add*'], 'query')"));
+    assertNotNull(parser.parse("SELECT id FROM test WHERE query_string(['*ess'], 'query')"));
+    assertNotNull(
+        parser.parse("SELECT id FROM test WHERE query_string(['address', 'notes'], 'query')"));
+    assertNotNull(parser.parse("SELECT id FROM test WHERE query_string([\"*\"], 'query')"));
+    assertNotNull(parser.parse("SELECT id FROM test WHERE query_string([\"address\"], 'query')"));
+    assertNotNull(parser.parse("SELECT id FROM test WHERE query_string([\"ad*\"], 'query')"));
+    assertNotNull(parser.parse("SELECT id FROM test WHERE query_string([\"*s\"], 'query')"));
+    assertNotNull(
+        parser.parse("SELECT id FROM test WHERE query_string([\"address\", \"notes\"], 'query')"));
+    assertNotNull(parser.parse("SELECT id FROM test WHERE query_string([`*`], 'query')"));
+    assertNotNull(parser.parse("SELECT id FROM test WHERE query_string([`address`], 'query')"));
+    assertNotNull(parser.parse("SELECT id FROM test WHERE query_string([`ad*`], 'query')"));
+    assertNotNull(parser.parse("SELECT id FROM test WHERE query_string([`*ss`], 'query')"));
+    assertNotNull(
+        parser.parse("SELECT id FROM test WHERE query_string([`address`, `notes`], 'query')"));
+    assertNotNull(parser.parse("SELECT id FROM test WHERE query_string([address], 'query')"));
+    assertNotNull(parser.parse("SELECT id FROM test WHERE query_string([addr*], 'query')"));
+    assertNotNull(parser.parse("SELECT id FROM test WHERE query_string([*ss], 'query')"));
+    assertNotNull(
+        parser.parse("SELECT id FROM test WHERE query_string([address, notes], 'query')"));
+
+    assertNotNull(
+        parser.parse(
+            "SELECT id FROM test WHERE"
+                + " query_string(['address' ^ 1.0, 'notes' ^ 2.2], 'query')"));
+    assertNotNull(
+        parser.parse(
+            "SELECT id FROM test WHERE query_string(['address' ^ 1.1, 'notes'], 'query')"));
+    assertNotNull(
+        parser.parse(
+            "SELECT id FROM test WHERE query_string(['address', 'notes' ^ 1.5], 'query')"));
+    assertNotNull(
+        parser.parse("SELECT id FROM test WHERE query_string(['address', 'notes' 3], 'query')"));
+    assertNotNull(
+        parser.parse(
+            "SELECT id FROM test WHERE query_string(['address' ^ .3, 'notes' 3], 'query')"));
 
+    assertNotNull(
+        parser.parse(
+            "SELECT id FROM test WHERE"
+                + " query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query')"));
+    assertNotNull(
+        parser.parse(
+            "SELECT id FROM test WHERE"
+                + " query_string([\"Tags\" ^ 1.5, Title, `Body` 4.2], 'query', analyzer=keyword,"
+                + "operator='AND', tie_breaker=0.3, type = \"most_fields\", fuzziness = 4)"));
+  }
 
   @Test
   public void can_parse_query_relevance_function() {
-    assertNotNull(parser.parse(
-            "SELECT id FROM test WHERE query('address:query')"));
-    assertNotNull(parser.parse(
-            "SELECT id FROM test WHERE query('address:query OR notes:query')"));
-    assertNotNull(parser.parse(
-            "SELECT id FROM test WHERE query(\"address:query\")"));
-    assertNotNull(parser.parse(
-            "SELECT id FROM test WHERE query(\"address:query OR notes:query\")"));
-    assertNotNull(parser.parse(
-            "SELECT id FROM test WHERE query(`address:query`)"));
-    assertNotNull(parser.parse(
-            "SELECT id FROM test WHERE query(`address:query OR notes:query`)"));
-    assertNotNull(parser.parse(
-            "SELECT id FROM test WHERE query('*:query')"));
-    assertNotNull(parser.parse(
-            "SELECT id FROM test WHERE query(\"*:query\")"));
-    assertNotNull(parser.parse(
-            "SELECT id FROM test WHERE query(`*:query`)"));
-    assertNotNull(parser.parse(
-            "SELECT id FROM test WHERE query('address:*uery OR notes:?uery')"));
-    assertNotNull(parser.parse(
-            "SELECT id FROM test WHERE query(\"address:*uery OR notes:?uery\")"));
-    assertNotNull(parser.parse(
-            "SELECT id FROM test WHERE query(`address:*uery OR notes:?uery`)"));
-    assertNotNull(parser.parse(
-            "SELECT id FROM test WHERE query('address:qu*ry OR notes:qu?ry')"));
-    assertNotNull(parser.parse(
-            "SELECT id FROM test WHERE query(\"address:qu*ry OR notes:qu?ry\")"));
-    assertNotNull(parser.parse(
-            "SELECT id FROM test WHERE query(`address:qu*ry OR notes:qu?ry`)"));
-    assertNotNull(parser.parse(
-            "SELECT id FROM test WHERE query('address:query notes:query')"));
-    assertNotNull(parser.parse(
-            "SELECT id FROM test WHERE query(\"address:query notes:query\")"));
-    assertNotNull(parser.parse(
+    assertNotNull(parser.parse("SELECT id FROM test WHERE query('address:query')"));
+    assertNotNull(parser.parse("SELECT id FROM test WHERE query('address:query OR notes:query')"));
+    assertNotNull(parser.parse("SELECT id FROM test WHERE query(\"address:query\")"));
+    assertNotNull(
+        parser.parse("SELECT id FROM test WHERE query(\"address:query OR notes:query\")"));
+    assertNotNull(parser.parse("SELECT id FROM test WHERE query(`address:query`)"));
+    assertNotNull(parser.parse("SELECT id FROM test WHERE query(`address:query OR notes:query`)"));
+    assertNotNull(parser.parse("SELECT id FROM test WHERE query('*:query')"));
+    assertNotNull(parser.parse("SELECT id FROM test WHERE query(\"*:query\")"));
+    assertNotNull(parser.parse("SELECT id FROM test WHERE query(`*:query`)"));
+    assertNotNull(parser.parse("SELECT id FROM test WHERE query('address:*uery OR notes:?uery')"));
+    assertNotNull(
+        parser.parse("SELECT id FROM test WHERE query(\"address:*uery OR notes:?uery\")"));
+    assertNotNull(parser.parse("SELECT id FROM test WHERE query(`address:*uery OR notes:?uery`)"));
+    assertNotNull(parser.parse("SELECT id FROM test WHERE query('address:qu*ry OR notes:qu?ry')"));
+    assertNotNull(
+        parser.parse("SELECT id FROM test WHERE query(\"address:qu*ry OR notes:qu?ry\")"));
+    assertNotNull(parser.parse("SELECT id FROM test WHERE query(`address:qu*ry OR notes:qu?ry`)"));
+    assertNotNull(parser.parse("SELECT id FROM test WHERE query('address:query notes:query')"));
+    assertNotNull(parser.parse("SELECT id FROM test WHERE query(\"address:query notes:query\")"));
+    assertNotNull(
+        parser.parse(
             "SELECT id FROM test WHERE "
-                    + "query(\"Body:\'taste beer\' Tags:\'taste beer\'  Title:\'taste beer\'\")"));
+                + "query(\"Body:\'taste beer\' Tags:\'taste beer\'  Title:\'taste beer\'\")"));
   }
 
-
   @Test
   public void can_parse_match_relevance_function() {
     assertNotNull(parser.parse("SELECT * FROM test WHERE match(column, \"this is a test\")"));
@@ -551,19 +526,18 @@ public void can_parse_match_relevance_function() {
   public void can_parse_matchquery_relevance_function() {
     assertNotNull(parser.parse("SELECT * FROM test WHERE matchquery(column, \"this is a test\")"));
     assertNotNull(parser.parse("SELECT * FROM test WHERE matchquery(column, 'this is a test')"));
-    assertNotNull(parser.parse(
-        "SELECT * FROM test WHERE matchquery(`column`, \"this is a test\")"));
+    assertNotNull(
+        parser.parse("SELECT * FROM test WHERE matchquery(`column`, \"this is a test\")"));
     assertNotNull(parser.parse("SELECT * FROM test WHERE matchquery(`column`, 'this is a test')"));
     assertNotNull(parser.parse("SELECT * FROM test WHERE matchquery(column, 100500)"));
   }
 
   @Test
   public void can_parse_match_query_relevance_function() {
-    assertNotNull(parser.parse(
-        "SELECT * FROM test WHERE match_query(column, \"this is a test\")"));
+    assertNotNull(parser.parse("SELECT * FROM test WHERE match_query(column, \"this is a test\")"));
     assertNotNull(parser.parse("SELECT * FROM test WHERE match_query(column, 'this is a test')"));
-    assertNotNull(parser.parse(
-        "SELECT * FROM test WHERE match_query(`column`, \"this is a test\")"));
+    assertNotNull(
+        parser.parse("SELECT * FROM test WHERE match_query(`column`, \"this is a test\")"));
     assertNotNull(parser.parse("SELECT * FROM test WHERE match_query(`column`, 'this is a test')"));
     assertNotNull(parser.parse("SELECT * FROM test WHERE match_query(column, 100500)"));
   }
@@ -571,21 +545,24 @@ public void can_parse_match_query_relevance_function() {
   @Test
   public void can_parse_match_phrase_relevance_function() {
     assertNotNull(
-            parser.parse("SELECT * FROM test WHERE match_phrase(column, \"this is a test\")"));
+        parser.parse("SELECT * FROM test WHERE match_phrase(column, \"this is a test\")"));
     assertNotNull(parser.parse("SELECT * FROM test WHERE match_phrase(column, 'this is a test')"));
     assertNotNull(
-            parser.parse("SELECT * FROM test WHERE match_phrase(`column`, \"this is a test\")"));
+        parser.parse("SELECT * FROM test WHERE match_phrase(`column`, \"this is a test\")"));
     assertNotNull(
-            parser.parse("SELECT * FROM test WHERE match_phrase(`column`, 'this is a test')"));
+        parser.parse("SELECT * FROM test WHERE match_phrase(`column`, 'this is a test')"));
     assertNotNull(parser.parse("SELECT * FROM test WHERE match_phrase(column, 100500)"));
   }
 
   @Test
   public void can_parse_minute_of_day_function() {
     assertNotNull(parser.parse("SELECT minute_of_day(\"12:23:34\");"));
-    assertNotNull(parser.parse("SELECT minute_of_day('12:23:34');"));;
-    assertNotNull(parser.parse("SELECT minute_of_day(\"2022-12-14 12:23:34\");"));;
-    assertNotNull(parser.parse("SELECT minute_of_day('2022-12-14 12:23:34');"));;
+    assertNotNull(parser.parse("SELECT minute_of_day('12:23:34');"));
+    ;
+    assertNotNull(parser.parse("SELECT minute_of_day(\"2022-12-14 12:23:34\");"));
+    ;
+    assertNotNull(parser.parse("SELECT minute_of_day('2022-12-14 12:23:34');"));
+    ;
   }
 
   @Test
@@ -594,7 +571,7 @@ public void can_parse_sec_to_time_function() {
     assertNotNull(parser.parse("SELECT sec_to_time(6897)"));
     assertNotNull(parser.parse("SELECT sec_to_time(6897.123)"));
   }
-  
+
   @Test
   public void can_parse_last_day_function() {
     assertNotNull(parser.parse("SELECT last_day(\"2017-06-20\")"));
@@ -606,7 +583,7 @@ public void can_parse_timestampadd_function() {
     assertNotNull(parser.parse("SELECT TIMESTAMPADD(MINUTE, 1, '2003-01-02')"));
     assertNotNull(parser.parse("SELECT TIMESTAMPADD(WEEK,1,'2003-01-02')"));
   }
-  
+
   @Test
   public void can_parse_timestampdiff_function() {
     assertNotNull(parser.parse("SELECT TIMESTAMPDIFF(MINUTE, '2003-01-02', '2003-01-02')"));
@@ -630,35 +607,20 @@ public void can_parse_wildcard_query_relevance_function() {
     assertNotNull(
         parser.parse("SELECT * FROM test WHERE wildcard_query(`column`, 'this is a test*')"));
     assertNotNull(
-        parser.parse("SELECT * FROM test WHERE wildcard_query(`column`, 'this is a test*', "
-            + "boost=1.5, case_insensitive=true, rewrite=\"scoring_boolean\")"));
+        parser.parse(
+            "SELECT * FROM test WHERE wildcard_query(`column`, 'this is a test*', "
+                + "boost=1.5, case_insensitive=true, rewrite=\"scoring_boolean\")"));
   }
 
   @Test
   public void can_parse_nested_function() {
-    assertNotNull(
-        parser.parse("SELECT NESTED(PATH.INNER_FIELD) FROM TEST"));
-    assertNotNull(
-        parser.parse("SELECT NESTED('PATH.INNER_FIELD') FROM TEST"));
-    assertNotNull(
-        parser.parse("SELECT SUM(NESTED(PATH.INNER_FIELD)) FROM TEST"));
-    assertNotNull(
-        parser.parse("SELECT NESTED(PATH.INNER_FIELD, PATH) FROM TEST"));
-    assertNotNull(
-        parser.parse(
-            "SELECT * FROM TEST WHERE NESTED(PATH.INNER_FIELDS) = 'A'"
-        )
-    );
-    assertNotNull(
-        parser.parse(
-            "SELECT * FROM TEST WHERE NESTED(PATH.INNER_FIELDS, PATH) = 'A'"
-        )
-    );
-    assertNotNull(
-        parser.parse(
-        "SELECT FIELD FROM TEST ORDER BY nested(PATH.INNER_FIELD, PATH)"
-        )
-    );
+    assertNotNull(parser.parse("SELECT NESTED(PATH.INNER_FIELD) FROM TEST"));
+    assertNotNull(parser.parse("SELECT NESTED('PATH.INNER_FIELD') FROM TEST"));
+    assertNotNull(parser.parse("SELECT SUM(NESTED(PATH.INNER_FIELD)) FROM TEST"));
+    assertNotNull(parser.parse("SELECT NESTED(PATH.INNER_FIELD, PATH) FROM TEST"));
+    assertNotNull(parser.parse("SELECT * FROM TEST WHERE NESTED(PATH.INNER_FIELDS) = 'A'"));
+    assertNotNull(parser.parse("SELECT * FROM TEST WHERE NESTED(PATH.INNER_FIELDS, PATH) = 'A'"));
+    assertNotNull(parser.parse("SELECT FIELD FROM TEST ORDER BY nested(PATH.INNER_FIELD, PATH)"));
   }
 
   @Test
@@ -669,37 +631,34 @@ public void can_parse_yearweek_function() {
 
   @ParameterizedTest
   @MethodSource({
-      "matchPhraseComplexQueries",
-      "matchPhraseGeneratedQueries",
-      "generateMatchPhraseQueries",
-      "matchPhraseQueryComplexQueries"
+    "matchPhraseComplexQueries",
+    "matchPhraseGeneratedQueries",
+    "generateMatchPhraseQueries",
+    "matchPhraseQueryComplexQueries"
   })
   public void canParseComplexMatchPhraseArgsTest(String query) {
     assertNotNull(parser.parse(query));
   }
 
   @ParameterizedTest
-  @MethodSource({
-      "generateMatchPhrasePrefixQueries"
-  })
+  @MethodSource({"generateMatchPhrasePrefixQueries"})
   public void canParseComplexMatchPhrasePrefixQueries(String query) {
     assertNotNull(parser.parse(query));
   }
 
   private static Stream matchPhraseComplexQueries() {
     return Stream.of(
-      "SELECT * FROM t WHERE match_phrase(c, 3)",
-      "SELECT * FROM t WHERE match_phrase(c, 3, fuzziness=AUTO)",
-      "SELECT * FROM t WHERE match_phrase(c, 3, zero_terms_query=\"all\")",
-      "SELECT * FROM t WHERE match_phrase(c, 3, lenient=true)",
-      "SELECT * FROM t WHERE match_phrase(c, 3, lenient='true')",
-      "SELECT * FROM t WHERE match_phrase(c, 3, operator=xor)",
-      "SELECT * FROM t WHERE match_phrase(c, 3, cutoff_frequency=0.04)",
-      "SELECT * FROM t WHERE match_phrase(c, 3, cutoff_frequency=0.04, analyzer = english, "
-              + "prefix_length=34, fuzziness='auto', minimum_should_match='2<-25% 9<-3')",
-      "SELECT * FROM t WHERE match_phrase(c, 3, minimum_should_match='2<-25% 9<-3')",
-      "SELECT * FROM t WHERE match_phrase(c, 3, operator='AUTO')"
-    );
+        "SELECT * FROM t WHERE match_phrase(c, 3)",
+        "SELECT * FROM t WHERE match_phrase(c, 3, fuzziness=AUTO)",
+        "SELECT * FROM t WHERE match_phrase(c, 3, zero_terms_query=\"all\")",
+        "SELECT * FROM t WHERE match_phrase(c, 3, lenient=true)",
+        "SELECT * FROM t WHERE match_phrase(c, 3, lenient='true')",
+        "SELECT * FROM t WHERE match_phrase(c, 3, operator=xor)",
+        "SELECT * FROM t WHERE match_phrase(c, 3, cutoff_frequency=0.04)",
+        "SELECT * FROM t WHERE match_phrase(c, 3, cutoff_frequency=0.04, analyzer = english, "
+            + "prefix_length=34, fuzziness='auto', minimum_should_match='2<-25% 9<-3')",
+        "SELECT * FROM t WHERE match_phrase(c, 3, minimum_should_match='2<-25% 9<-3')",
+        "SELECT * FROM t WHERE match_phrase(c, 3, operator='AUTO')");
   }
 
   @Test
@@ -738,50 +697,51 @@ private static Stream matchPhraseQueryComplexQueries() {
         "SELECT * FROM t WHERE matchphrasequery(c, 3, cutoff_frequency=0.04, analyzer = english, "
             + "prefix_length=34, fuzziness='auto', minimum_should_match='2<-25% 9<-3')",
         "SELECT * FROM t WHERE matchphrasequery(c, 3, minimum_should_match='2<-25% 9<-3')",
-        "SELECT * FROM t WHERE matchphrasequery(c, 3, operator='AUTO')"
-    );
+        "SELECT * FROM t WHERE matchphrasequery(c, 3, operator='AUTO')");
   }
 
   private static Stream matchPhraseGeneratedQueries() {
     var matchArgs = new HashMap();
-    matchArgs.put("fuzziness", new String[]{ "AUTO", "AUTO:1,5", "1" });
-    matchArgs.put("fuzzy_transpositions", new Boolean[]{ true, false });
-    matchArgs.put("operator", new String[]{ "and", "or" });
-    matchArgs.put("minimum_should_match",
-            new String[]{ "3", "-2", "75%", "-25%", "3<90%", "2<-25% 9<-3" });
-    matchArgs.put("analyzer", new String[]{ "standard", "stop", "english" });
-    matchArgs.put("zero_terms_query", new String[]{ "none", "all" });
-    matchArgs.put("lenient", new Boolean[]{ true, false });
+    matchArgs.put("fuzziness", new String[] {"AUTO", "AUTO:1,5", "1"});
+    matchArgs.put("fuzzy_transpositions", new Boolean[] {true, false});
+    matchArgs.put("operator", new String[] {"and", "or"});
+    matchArgs.put(
+        "minimum_should_match", new String[] {"3", "-2", "75%", "-25%", "3<90%", "2<-25% 9<-3"});
+    matchArgs.put("analyzer", new String[] {"standard", "stop", "english"});
+    matchArgs.put("zero_terms_query", new String[] {"none", "all"});
+    matchArgs.put("lenient", new Boolean[] {true, false});
     // deprecated
-    matchArgs.put("cutoff_frequency", new Double[]{ .0, 0.001, 1., 42. });
-    matchArgs.put("prefix_length", new Integer[]{ 0, 2, 5 });
-    matchArgs.put("max_expansions", new Integer[]{ 0, 5, 20 });
-    matchArgs.put("boost", new Double[]{ .5, 1., 2.3 });
+    matchArgs.put("cutoff_frequency", new Double[] {.0, 0.001, 1., 42.});
+    matchArgs.put("prefix_length", new Integer[] {0, 2, 5});
+    matchArgs.put("max_expansions", new Integer[] {0, 5, 20});
+    matchArgs.put("boost", new Double[] {.5, 1., 2.3});
 
     return generateQueries("match", matchArgs);
   }
 
   private static Stream generateMatchPhraseQueries() {
     var matchPhraseArgs = new HashMap();
-    matchPhraseArgs.put("analyzer", new String[]{ "standard", "stop", "english" });
-    matchPhraseArgs.put("max_expansions", new Integer[]{ 0, 5, 20 });
-    matchPhraseArgs.put("slop", new Integer[]{ 0, 1, 2 });
+    matchPhraseArgs.put("analyzer", new String[] {"standard", "stop", "english"});
+    matchPhraseArgs.put("max_expansions", new Integer[] {0, 5, 20});
+    matchPhraseArgs.put("slop", new Integer[] {0, 1, 2});
 
     return generateQueries("match_phrase", matchPhraseArgs);
   }
 
   private static Stream generateMatchPhrasePrefixQueries() {
-    return generateQueries("match_phrase_prefix", ImmutableMap.builder()
-        .put("analyzer", new String[] {"standard", "stop", "english"})
-        .put("slop", new Integer[] {0, 1, 2})
-        .put("max_expansions", new Integer[] {0, 3, 10})
-        .put("zero_terms_query", new String[] {"NONE", "ALL", "NULL"})
-        .put("boost", new Float[] {-0.5f, 1.0f, 1.2f})
-        .build());
-  }
-
-  private static Stream generateQueries(String function,
-                                                Map functionArgs) {
+    return generateQueries(
+        "match_phrase_prefix",
+        ImmutableMap.builder()
+            .put("analyzer", new String[] {"standard", "stop", "english"})
+            .put("slop", new Integer[] {0, 1, 2})
+            .put("max_expansions", new Integer[] {0, 3, 10})
+            .put("zero_terms_query", new String[] {"NONE", "ALL", "NULL"})
+            .put("boost", new Float[] {-0.5f, 1.0f, 1.2f})
+            .build());
+  }
+
+  private static Stream generateQueries(
+      String function, Map functionArgs) {
     var rand = new Random(0);
 
     class QueryGenerator implements Iterator {
@@ -789,7 +749,7 @@ class QueryGenerator implements Iterator {
       private int currentQuery = 0;
 
       private String randomIdentifier() {
-        return RandomStringUtils.random(10, 0, 0,true, false, null, rand);
+        return RandomStringUtils.random(10, 0, 0, true, false, null, rand);
       }
 
       @Override
@@ -803,16 +763,17 @@ public String next() {
         currentQuery += 1;
 
         StringBuilder query = new StringBuilder();
-        query.append(String.format("SELECT * FROM test WHERE %s(%s, %s", function,
-            randomIdentifier(),
-            randomIdentifier()));
+        query.append(
+            String.format(
+                "SELECT * FROM test WHERE %s(%s, %s",
+                function, randomIdentifier(), randomIdentifier()));
         var args = new ArrayList();
         for (var pair : functionArgs.entrySet()) {
           if (rand.nextBoolean()) {
             var arg = new StringBuilder();
             arg.append(rand.nextBoolean() ? "," : ", ");
-            arg.append(rand.nextBoolean() ? pair.getKey().toLowerCase()
-                    : pair.getKey().toUpperCase());
+            arg.append(
+                rand.nextBoolean() ? pair.getKey().toLowerCase() : pair.getKey().toUpperCase());
             arg.append(rand.nextBoolean() ? "=" : " = ");
             if (pair.getValue() instanceof String[] || rand.nextBoolean()) {
               var quoteSymbol = rand.nextBoolean() ? '\'' : '"';
diff --git a/sql/src/test/java/org/opensearch/sql/sql/domain/SQLQueryRequestTest.java b/sql/src/test/java/org/opensearch/sql/sql/domain/SQLQueryRequestTest.java
index 1ffa4f0fa8..2b64b13b35 100644
--- a/sql/src/test/java/org/opensearch/sql/sql/domain/SQLQueryRequestTest.java
+++ b/sql/src/test/java/org/opensearch/sql/sql/domain/SQLQueryRequestTest.java
@@ -3,7 +3,6 @@
  * SPDX-License-Identifier: Apache-2.0
  */
 
-
 package org.opensearch.sql.sql.domain;
 
 import static org.junit.jupiter.api.Assertions.assertAll;
@@ -32,21 +31,15 @@ public void should_support_query() {
 
   @Test
   public void should_support_query_with_JDBC_format() {
-    SQLQueryRequest request = SQLQueryRequestBuilder.request("SELECT 1")
-                                                    .format("jdbc")
-                                                    .build();
+    SQLQueryRequest request = SQLQueryRequestBuilder.request("SELECT 1").format("jdbc").build();
     assertAll(
-        () -> assertTrue(request.isSupported()),
-        () -> assertEquals(request.format(), Format.JDBC)
-    );
+        () -> assertTrue(request.isSupported()), () -> assertEquals(request.format(), Format.JDBC));
   }
 
   @Test
   public void should_support_query_with_query_field_only() {
     SQLQueryRequest request =
-        SQLQueryRequestBuilder.request("SELECT 1")
-                              .jsonContent("{\"query\": \"SELECT 1\"}")
-                              .build();
+        SQLQueryRequestBuilder.request("SELECT 1").jsonContent("{\"query\": \"SELECT 1\"}").build();
     assertTrue(request.isSupported());
   }
 
@@ -57,21 +50,16 @@ public void should_support_query_with_parameters() {
             .jsonContent("{\"query\": \"SELECT 1\", \"parameters\":[]}")
             .build();
     SQLQueryRequest requestWithParams =
-        SQLQueryRequestBuilder.request("SELECT 1")
-            .params(Map.of("one", "two"))
-            .build();
+        SQLQueryRequestBuilder.request("SELECT 1").params(Map.of("one", "two")).build();
     assertAll(
         () -> assertTrue(requestWithContent.isSupported()),
-        () -> assertTrue(requestWithParams.isSupported())
-    );
+        () -> assertTrue(requestWithParams.isSupported()));
   }
 
   @Test
   public void should_support_query_without_parameters() {
     SQLQueryRequest requestWithNoParams =
-        SQLQueryRequestBuilder.request("SELECT 1")
-            .params(Map.of())
-            .build();
+        SQLQueryRequestBuilder.request("SELECT 1").params(Map.of()).build();
     assertTrue(requestWithNoParams.isSupported());
   }
 
@@ -79,8 +67,8 @@ public void should_support_query_without_parameters() {
   public void should_support_query_with_zero_fetch_size() {
     SQLQueryRequest request =
         SQLQueryRequestBuilder.request("SELECT 1")
-                              .jsonContent("{\"query\": \"SELECT 1\", \"fetch_size\": 0}")
-                              .build();
+            .jsonContent("{\"query\": \"SELECT 1\", \"fetch_size\": 0}")
+            .build();
     assertTrue(request.isSupported());
   }
 
@@ -96,52 +84,37 @@ public void should_support_query_with_parameters_and_zero_fetch_size() {
   @Test
   public void should_support_explain() {
     SQLQueryRequest explainRequest =
-        SQLQueryRequestBuilder.request("SELECT 1")
-                              .path("_plugins/_sql/_explain")
-                              .build();
+        SQLQueryRequestBuilder.request("SELECT 1").path("_plugins/_sql/_explain").build();
 
     assertAll(
         () -> assertTrue(explainRequest.isExplainRequest()),
-        () -> assertTrue(explainRequest.isSupported())
-    );
+        () -> assertTrue(explainRequest.isSupported()));
   }
 
   @Test
   public void should_support_cursor_request() {
     SQLQueryRequest fetchSizeRequest =
         SQLQueryRequestBuilder.request("SELECT 1")
-                              .jsonContent("{\"query\": \"SELECT 1\", \"fetch_size\": 5}")
-                              .build();
+            .jsonContent("{\"query\": \"SELECT 1\", \"fetch_size\": 5}")
+            .build();
 
     SQLQueryRequest cursorRequest =
-        SQLQueryRequestBuilder.request(null)
-                              .cursor("abcdefgh...")
-                              .build();
+        SQLQueryRequestBuilder.request(null).cursor("abcdefgh...").build();
 
     assertAll(
         () -> assertTrue(fetchSizeRequest.isSupported()),
-        () -> assertTrue(cursorRequest.isSupported())
-    );
+        () -> assertTrue(cursorRequest.isSupported()));
   }
 
   @Test
   public void should_support_cursor_close_request() {
     SQLQueryRequest closeRequest =
-        SQLQueryRequestBuilder.request(null)
-                              .cursor("pewpew")
-                              .path("_plugins/_sql/close")
-                              .build();
+        SQLQueryRequestBuilder.request(null).cursor("pewpew").path("_plugins/_sql/close").build();
 
     SQLQueryRequest emptyCloseRequest =
-        SQLQueryRequestBuilder.request(null)
-                              .cursor("")
-                              .path("_plugins/_sql/close")
-                              .build();
+        SQLQueryRequestBuilder.request(null).cursor("").path("_plugins/_sql/close").build();
 
-    SQLQueryRequest pagingRequest =
-        SQLQueryRequestBuilder.request(null)
-                              .cursor("pewpew")
-                              .build();
+    SQLQueryRequest pagingRequest = SQLQueryRequestBuilder.request(null).cursor("pewpew").build();
 
     assertAll(
         () -> assertTrue(closeRequest.isSupported()),
@@ -149,71 +122,52 @@ public void should_support_cursor_close_request() {
         () -> assertTrue(pagingRequest.isSupported()),
         () -> assertFalse(pagingRequest.isCursorCloseRequest()),
         () -> assertFalse(emptyCloseRequest.isSupported()),
-        () -> assertTrue(emptyCloseRequest.isCursorCloseRequest())
-    );
+        () -> assertTrue(emptyCloseRequest.isCursorCloseRequest()));
   }
 
   @Test
   public void should_not_support_request_with_empty_cursor() {
     SQLQueryRequest requestWithEmptyCursor =
-        SQLQueryRequestBuilder.request(null)
-                              .cursor("")
-                              .build();
+        SQLQueryRequestBuilder.request(null).cursor("").build();
     SQLQueryRequest requestWithNullCursor =
-        SQLQueryRequestBuilder.request(null)
-                              .cursor(null)
-                              .build();
+        SQLQueryRequestBuilder.request(null).cursor(null).build();
     assertAll(
         () -> assertFalse(requestWithEmptyCursor.isSupported()),
-        () -> assertFalse(requestWithNullCursor.isSupported())
-    );
+        () -> assertFalse(requestWithNullCursor.isSupported()));
   }
 
   @Test
   public void should_not_support_request_with_unknown_field() {
     SQLQueryRequest request =
-        SQLQueryRequestBuilder.request("SELECT 1")
-                              .jsonContent("{\"pewpew\": 42}")
-                              .build();
+        SQLQueryRequestBuilder.request("SELECT 1").jsonContent("{\"pewpew\": 42}").build();
     assertFalse(request.isSupported());
   }
 
   @Test
   public void should_not_support_request_with_cursor_and_something_else() {
     SQLQueryRequest requestWithQuery =
-        SQLQueryRequestBuilder.request("SELECT 1")
-                              .cursor("n:12356")
-                              .build();
+        SQLQueryRequestBuilder.request("SELECT 1").cursor("n:12356").build();
     SQLQueryRequest requestWithParams =
-        SQLQueryRequestBuilder.request(null)
-                              .cursor("n:12356")
-                              .params(Map.of("one", "two"))
-                              .build();
+        SQLQueryRequestBuilder.request(null).cursor("n:12356").params(Map.of("one", "two")).build();
     SQLQueryRequest requestWithParamsWithFormat =
         SQLQueryRequestBuilder.request(null)
-                              .cursor("n:12356")
-                              .params(Map.of("format", "jdbc"))
-                              .build();
+            .cursor("n:12356")
+            .params(Map.of("format", "jdbc"))
+            .build();
     SQLQueryRequest requestWithParamsWithFormatAnd =
         SQLQueryRequestBuilder.request(null)
-                              .cursor("n:12356")
-                              .params(Map.of("format", "jdbc", "something", "else"))
-                              .build();
+            .cursor("n:12356")
+            .params(Map.of("format", "jdbc", "something", "else"))
+            .build();
     SQLQueryRequest requestWithFetchSize =
         SQLQueryRequestBuilder.request(null)
-                              .cursor("n:12356")
-                              .jsonContent("{\"fetch_size\": 5}")
-                              .build();
+            .cursor("n:12356")
+            .jsonContent("{\"fetch_size\": 5}")
+            .build();
     SQLQueryRequest requestWithNoParams =
-        SQLQueryRequestBuilder.request(null)
-                              .cursor("n:12356")
-                              .params(Map.of())
-                              .build();
+        SQLQueryRequestBuilder.request(null).cursor("n:12356").params(Map.of()).build();
     SQLQueryRequest requestWithNoContent =
-        SQLQueryRequestBuilder.request(null)
-                              .cursor("n:12356")
-                              .jsonContent("{}")
-                              .build();
+        SQLQueryRequestBuilder.request(null).cursor("n:12356").jsonContent("{}").build();
     assertAll(
         () -> assertFalse(requestWithQuery.isSupported()),
         () -> assertFalse(requestWithParams.isSupported()),
@@ -221,8 +175,7 @@ public void should_not_support_request_with_cursor_and_something_else() {
         () -> assertTrue(requestWithNoParams.isSupported()),
         () -> assertTrue(requestWithParamsWithFormat.isSupported()),
         () -> assertFalse(requestWithParamsWithFormatAnd.isSupported()),
-        () -> assertTrue(requestWithNoContent.isSupported())
-    );
+        () -> assertTrue(requestWithNoContent.isSupported()));
   }
 
   @Test
@@ -234,15 +187,11 @@ public void should_use_JDBC_format_by_default() {
 
   @Test
   public void should_support_CSV_format_and_sanitize() {
-    SQLQueryRequest csvRequest =
-        SQLQueryRequestBuilder.request("SELECT 1")
-                              .format("csv")
-                              .build();
+    SQLQueryRequest csvRequest = SQLQueryRequestBuilder.request("SELECT 1").format("csv").build();
     assertAll(
         () -> assertTrue(csvRequest.isSupported()),
         () -> assertEquals(csvRequest.format(), Format.CSV),
-        () -> assertTrue(csvRequest.sanitize())
-    );
+        () -> assertTrue(csvRequest.sanitize()));
   }
 
   @Test
@@ -252,36 +201,28 @@ public void should_skip_sanitize_if_set_false() {
     SQLQueryRequest csvRequest = SQLQueryRequestBuilder.request("SELECT 1").params(params).build();
     assertAll(
         () -> assertEquals(csvRequest.format(), Format.CSV),
-        () -> assertFalse(csvRequest.sanitize())
-    );
+        () -> assertFalse(csvRequest.sanitize()));
   }
 
   @Test
   public void should_not_support_other_format() {
-    SQLQueryRequest csvRequest =
-        SQLQueryRequestBuilder.request("SELECT 1")
-            .format("other")
-            .build();
+    SQLQueryRequest csvRequest = SQLQueryRequestBuilder.request("SELECT 1").format("other").build();
 
     assertAll(
         () -> assertFalse(csvRequest.isSupported()),
-        () -> assertEquals("response in other format is not supported.",
-            assertThrows(IllegalArgumentException.class, csvRequest::format).getMessage())
-    );
+        () ->
+            assertEquals(
+                "response in other format is not supported.",
+                assertThrows(IllegalArgumentException.class, csvRequest::format).getMessage()));
   }
 
   @Test
   public void should_support_raw_format() {
-    SQLQueryRequest csvRequest =
-            SQLQueryRequestBuilder.request("SELECT 1")
-                    .format("raw")
-                    .build();
+    SQLQueryRequest csvRequest = SQLQueryRequestBuilder.request("SELECT 1").format("raw").build();
     assertTrue(csvRequest.isSupported());
   }
 
-  /**
-   * SQL query request build helper to improve test data setup readability.
-   */
+  /** SQL query request build helper to improve test data setup readability. */
   private static class SQLQueryRequestBuilder {
     private String jsonContent;
     private String query;
@@ -325,9 +266,8 @@ SQLQueryRequest build() {
       if (format != null) {
         params.put("format", format);
       }
-      return new SQLQueryRequest(jsonContent == null ? null : new JSONObject(jsonContent),
-          query, path, params, cursor);
+      return new SQLQueryRequest(
+          jsonContent == null ? null : new JSONObject(jsonContent), query, path, params, cursor);
     }
   }
-
 }
diff --git a/sql/src/test/java/org/opensearch/sql/sql/parser/AnonymizerListenerTest.java b/sql/src/test/java/org/opensearch/sql/sql/parser/AnonymizerListenerTest.java
index 59d723e3a2..4d2addf3d3 100644
--- a/sql/src/test/java/org/opensearch/sql/sql/parser/AnonymizerListenerTest.java
+++ b/sql/src/test/java/org/opensearch/sql/sql/parser/AnonymizerListenerTest.java
@@ -3,7 +3,6 @@
  * SPDX-License-Identifier: Apache-2.0
  */
 
-
 package org.opensearch.sql.sql.parser;
 
 import static org.junit.jupiter.api.Assertions.assertEquals;
@@ -23,6 +22,7 @@ public class AnonymizerListenerTest {
 
   /**
    * Helper function to parse SQl queries for testing purposes.
+   *
    * @param query SQL query to be anonymized.
    */
   private void parse(String query) {
@@ -36,8 +36,9 @@ private void parse(String query) {
   @Test
   public void queriesShouldHaveAnonymousFieldAndIndex() {
     String query = "SELECT ABS(balance) FROM accounts WHERE age > 30 GROUP BY ABS(balance)";
-    String expectedQuery = "( SELECT ABS ( identifier ) FROM table "
-        + "WHERE identifier > number GROUP BY ABS ( identifier ) )";
+    String expectedQuery =
+        "( SELECT ABS ( identifier ) FROM table "
+            + "WHERE identifier > number GROUP BY ABS ( identifier ) )";
     parse(query);
     assertEquals(expectedQuery, anonymizerListener.getAnonymizedQueryString());
   }
@@ -92,12 +93,13 @@ public void queriesWithAggregatesShouldAnonymizeSensitiveData() {
 
   @Test
   public void queriesWithSubqueriesShouldAnonymizeSensitiveData() {
-    String query = "SELECT a.f, a.l, a.a FROM "
-        + "(SELECT firstname AS f, lastname AS l, age AS a FROM accounts WHERE age > 30) a";
+    String query =
+        "SELECT a.f, a.l, a.a FROM "
+            + "(SELECT firstname AS f, lastname AS l, age AS a FROM accounts WHERE age > 30) a";
     String expectedQuery =
-        "( SELECT identifier.identifier, identifier.identifier, identifier.identifier FROM "
-        + "( SELECT identifier AS identifier, identifier AS identifier, identifier AS identifier "
-        + "FROM table WHERE identifier > number ) identifier )";
+        "( SELECT identifier.identifier, identifier.identifier, identifier.identifier FROM ( SELECT"
+            + " identifier AS identifier, identifier AS identifier, identifier AS identifier FROM"
+            + " table WHERE identifier > number ) identifier )";
     parse(query);
     assertEquals(expectedQuery, anonymizerListener.getAnonymizedQueryString());
   }
@@ -121,8 +123,9 @@ public void queriesWithOrderByShouldAnonymizeSensitiveData() {
   @Test
   public void queriesWithHavingShouldAnonymizeSensitiveData() {
     String query = "SELECT SUM(balance) FROM accounts GROUP BY lastname HAVING COUNT(balance) > 2";
-    String expectedQuery = "( SELECT SUM ( identifier ) FROM table "
-        + "GROUP BY identifier HAVING COUNT ( identifier ) > number )";
+    String expectedQuery =
+        "( SELECT SUM ( identifier ) FROM table "
+            + "GROUP BY identifier HAVING COUNT ( identifier ) > number )";
     parse(query);
     assertEquals(expectedQuery, anonymizerListener.getAnonymizedQueryString());
   }
@@ -130,8 +133,9 @@ public void queriesWithHavingShouldAnonymizeSensitiveData() {
   @Test
   public void queriesWithHighlightShouldAnonymizeSensitiveData() {
     String query = "SELECT HIGHLIGHT(str0) FROM CALCS WHERE QUERY_STRING(['str0'], 'FURNITURE')";
-    String expectedQuery = "( SELECT HIGHLIGHT ( identifier ) FROM table WHERE "
-        + "QUERY_STRING ( [ 'string_literal' ], 'string_literal' ) )";
+    String expectedQuery =
+        "( SELECT HIGHLIGHT ( identifier ) FROM table WHERE "
+            + "QUERY_STRING ( [ 'string_literal' ], 'string_literal' ) )";
     parse(query);
     assertEquals(expectedQuery, anonymizerListener.getAnonymizedQueryString());
   }
@@ -139,8 +143,8 @@ public void queriesWithHighlightShouldAnonymizeSensitiveData() {
   @Test
   public void queriesWithMatchShouldAnonymizeSensitiveData() {
     String query = "SELECT str0 FROM CALCS WHERE MATCH(str0, 'FURNITURE')";
-    String expectedQuery = "( SELECT identifier FROM table "
-        + "WHERE MATCH ( identifier, 'string_literal' ) )";
+    String expectedQuery =
+        "( SELECT identifier FROM table " + "WHERE MATCH ( identifier, 'string_literal' ) )";
     parse(query);
     assertEquals(expectedQuery, anonymizerListener.getAnonymizedQueryString());
   }
@@ -155,10 +159,12 @@ public void queriesWithPositionShouldAnonymizeSensitiveData() {
 
   @Test
   public void queriesWithMatch_Bool_Prefix_ShouldAnonymizeSensitiveData() {
-    String query = "SELECT firstname, address FROM accounts WHERE "
-        + "match_bool_prefix(address, 'Bristol Street', minimum_should_match=2)";
-    String expectedQuery = "( SELECT identifier, identifier FROM table WHERE MATCH_BOOL_PREFIX "
-        + "( identifier, 'string_literal', MINIMUM_SHOULD_MATCH = number ) )";
+    String query =
+        "SELECT firstname, address FROM accounts WHERE "
+            + "match_bool_prefix(address, 'Bristol Street', minimum_should_match=2)";
+    String expectedQuery =
+        "( SELECT identifier, identifier FROM table WHERE MATCH_BOOL_PREFIX "
+            + "( identifier, 'string_literal', MINIMUM_SHOULD_MATCH = number ) )";
     parse(query);
     assertEquals(expectedQuery, anonymizerListener.getAnonymizedQueryString());
   }
@@ -195,10 +201,7 @@ public void queriesWithNotEqualAlternateShouldAnonymizeSensitiveData() {
     assertEquals(expectedQuery, anonymizerListener.getAnonymizedQueryString());
   }
 
-
-  /**
-   * Test added for coverage, but the errorNode will not be hit normally.
-   */
+  /** Test added for coverage, but the errorNode will not be hit normally. */
   @Test
   public void enterErrorNote() {
     ErrorNode node = mock(ErrorNode.class);
diff --git a/sql/src/test/java/org/opensearch/sql/sql/parser/AstAggregationBuilderTest.java b/sql/src/test/java/org/opensearch/sql/sql/parser/AstAggregationBuilderTest.java
index 79896d9400..95188e20b6 100644
--- a/sql/src/test/java/org/opensearch/sql/sql/parser/AstAggregationBuilderTest.java
+++ b/sql/src/test/java/org/opensearch/sql/sql/parser/AstAggregationBuilderTest.java
@@ -3,7 +3,6 @@
  * SPDX-License-Identifier: Apache-2.0
  */
 
-
 package org.opensearch.sql.sql.parser;
 
 import static java.util.Collections.emptyList;
@@ -30,7 +29,6 @@
 import org.junit.jupiter.api.DisplayNameGeneration;
 import org.junit.jupiter.api.DisplayNameGenerator;
 import org.junit.jupiter.api.Test;
-import org.opensearch.sql.ast.expression.AllFields;
 import org.opensearch.sql.ast.expression.UnresolvedExpression;
 import org.opensearch.sql.ast.tree.Aggregation;
 import org.opensearch.sql.ast.tree.UnresolvedPlan;
@@ -60,10 +58,9 @@ void can_build_group_by_clause_with_scalar_expression() {
         buildAggregation("SELECT ABS(age + 1) FROM test GROUP BY ABS(age + 1)"),
         allOf(
             hasGroupByItems(
-                alias("ABS(+(age, 1))", function("ABS",
-                    function("+",
-                        qualifiedName("age"),
-                        intLiteral(1))))),
+                alias(
+                    "ABS(+(age, 1))",
+                    function("ABS", function("+", qualifiedName("age"), intLiteral(1))))),
             hasAggregators()));
   }
 
@@ -80,9 +77,7 @@ void can_build_group_by_clause_with_complicated_aggregators() {
   void can_build_group_by_clause_without_aggregators() {
     assertThat(
         buildAggregation("SELECT state FROM test GROUP BY state"),
-        allOf(
-            hasGroupByItems(alias("state", qualifiedName("state"))),
-            hasAggregators()));
+        allOf(hasGroupByItems(alias("state", qualifiedName("state"))), hasAggregators()));
   }
 
   @Test
@@ -102,50 +97,43 @@ void can_build_implicit_group_by_for_aggregator_in_having_clause() {
         buildAggregation("SELECT true FROM test HAVING AVG(age) > 30"),
         allOf(
             hasGroupByItems(),
-            hasAggregators(
-                alias("AVG(age)", aggregate("AVG", qualifiedName("age"))))));
+            hasAggregators(alias("AVG(age)", aggregate("AVG", qualifiedName("age"))))));
 
     assertThat(
-            buildAggregation("SELECT PI() FROM test HAVING AVG(age) > 30"),
-            allOf(
-                    hasGroupByItems(),
-                    hasAggregators(
-                            alias("AVG(age)", aggregate("AVG", qualifiedName("age"))))));
+        buildAggregation("SELECT PI() FROM test HAVING AVG(age) > 30"),
+        allOf(
+            hasGroupByItems(),
+            hasAggregators(alias("AVG(age)", aggregate("AVG", qualifiedName("age"))))));
 
     assertThat(
-            buildAggregation("SELECT ABS(1.5) FROM test HAVING AVG(age) > 30"),
-            allOf(
-                    hasGroupByItems(),
-                    hasAggregators(
-                            alias("AVG(age)", aggregate("AVG", qualifiedName("age"))))));
+        buildAggregation("SELECT ABS(1.5) FROM test HAVING AVG(age) > 30"),
+        allOf(
+            hasGroupByItems(),
+            hasAggregators(alias("AVG(age)", aggregate("AVG", qualifiedName("age"))))));
 
     assertThat(
-            buildAggregation("SELECT ABS(ABS(1.5)) FROM test HAVING AVG(age) > 30"),
-            allOf(
-                    hasGroupByItems(),
-                    hasAggregators(
-                            alias("AVG(age)", aggregate("AVG", qualifiedName("age"))))));
+        buildAggregation("SELECT ABS(ABS(1.5)) FROM test HAVING AVG(age) > 30"),
+        allOf(
+            hasGroupByItems(),
+            hasAggregators(alias("AVG(age)", aggregate("AVG", qualifiedName("age"))))));
 
     assertThat(
         buildAggregation("SELECT INTERVAL 1 DAY FROM test HAVING AVG(age) > 30"),
         allOf(
             hasGroupByItems(),
-            hasAggregators(
-                alias("AVG(age)", aggregate("AVG", qualifiedName("age"))))));
+            hasAggregators(alias("AVG(age)", aggregate("AVG", qualifiedName("age"))))));
 
     assertThat(
         buildAggregation("SELECT CAST(1 AS LONG) FROM test HAVING AVG(age) > 30"),
         allOf(
             hasGroupByItems(),
-            hasAggregators(
-                alias("AVG(age)", aggregate("AVG", qualifiedName("age"))))));
+            hasAggregators(alias("AVG(age)", aggregate("AVG", qualifiedName("age"))))));
 
     assertThat(
         buildAggregation("SELECT CASE WHEN true THEN 1 ELSE 2 END FROM test HAVING AVG(age) > 30"),
         allOf(
             hasGroupByItems(),
-            hasAggregators(
-                alias("AVG(age)", aggregate("AVG", qualifiedName("age"))))));
+            hasAggregators(alias("AVG(age)", aggregate("AVG", qualifiedName("age"))))));
   }
 
   @Test
@@ -155,8 +143,7 @@ void can_build_distinct_aggregator() {
         allOf(
             hasGroupByItems(alias("age", qualifiedName("age"))),
             hasAggregators(
-                alias("COUNT(DISTINCT name)", distinctAggregate("COUNT", qualifiedName(
-                    "name"))))));
+                alias("COUNT(DISTINCT name)", distinctAggregate("COUNT", qualifiedName("name"))))));
   }
 
   @Test
@@ -168,8 +155,8 @@ void should_build_nothing_if_no_group_by_and_no_aggregators_in_select() {
   void should_replace_group_by_alias_by_expression_in_select_clause() {
     assertThat(
         buildAggregation("SELECT state AS s, name FROM test GROUP BY s, name"),
-        hasGroupByItems(alias("state", qualifiedName("state")),
-            alias("name", qualifiedName("name"))));
+        hasGroupByItems(
+            alias("state", qualifiedName("state")), alias("name", qualifiedName("name"))));
 
     assertThat(
         buildAggregation("SELECT ABS(age) AS a FROM test GROUP BY a"),
@@ -191,25 +178,30 @@ void should_replace_group_by_ordinal_by_expression_in_select_clause() {
 
   @Test
   void should_report_error_for_non_integer_ordinal_in_group_by() {
-    SemanticCheckException error = assertThrows(SemanticCheckException.class, () ->
-        buildAggregation("SELECT state AS s FROM test GROUP BY 1.5"));
-    assertEquals(
-        "Non-integer constant [1.5] found in ordinal",
-        error.getMessage());
+    SemanticCheckException error =
+        assertThrows(
+            SemanticCheckException.class,
+            () -> buildAggregation("SELECT state AS s FROM test GROUP BY 1.5"));
+    assertEquals("Non-integer constant [1.5] found in ordinal", error.getMessage());
   }
 
-  @Disabled("This validation is supposed to be in analyzing phase. This test should be enabled "
+  @Disabled(
+      "This validation is supposed to be in analyzing phase. This test should be enabled "
           + "once https://github.com/opensearch-project/sql/issues/910 has been resolved")
   @Test
   void should_report_error_for_mismatch_between_select_and_group_by_items() {
-    SemanticCheckException error1 = assertThrows(SemanticCheckException.class, () ->
-        buildAggregation("SELECT name FROM test GROUP BY state"));
+    SemanticCheckException error1 =
+        assertThrows(
+            SemanticCheckException.class,
+            () -> buildAggregation("SELECT name FROM test GROUP BY state"));
     assertEquals(
         "Expression [name] that contains non-aggregated column is not present in group by clause",
         error1.getMessage());
 
-    SemanticCheckException error2 = assertThrows(SemanticCheckException.class, () ->
-        buildAggregation("SELECT ABS(name + 1) FROM test GROUP BY name"));
+    SemanticCheckException error2 =
+        assertThrows(
+            SemanticCheckException.class,
+            () -> buildAggregation("SELECT ABS(name + 1) FROM test GROUP BY name"));
     assertEquals(
         "Expression [Function(funcName=ABS, funcArgs=[Function(funcName=+, "
             + "funcArgs=[name, Literal(value=1, type=INTEGER)])])] that contains "
@@ -219,15 +211,19 @@ void should_report_error_for_mismatch_between_select_and_group_by_items() {
 
   @Test
   void should_report_error_for_non_aggregated_item_in_select_if_no_group_by() {
-    SemanticCheckException error1 = assertThrows(SemanticCheckException.class, () ->
-        buildAggregation("SELECT age, AVG(balance) FROM tests"));
+    SemanticCheckException error1 =
+        assertThrows(
+            SemanticCheckException.class,
+            () -> buildAggregation("SELECT age, AVG(balance) FROM tests"));
     assertEquals(
         "Explicit GROUP BY clause is required because expression [age] "
             + "contains non-aggregated column",
         error1.getMessage());
 
-    SemanticCheckException error2 = assertThrows(SemanticCheckException.class, () ->
-        buildAggregation("SELECT ABS(age + 1), AVG(balance) FROM tests"));
+    SemanticCheckException error2 =
+        assertThrows(
+            SemanticCheckException.class,
+            () -> buildAggregation("SELECT ABS(age + 1), AVG(balance) FROM tests"));
     assertEquals(
         "Explicit GROUP BY clause is required because expression [ABS(+(age, 1))] "
             + "contains non-aggregated column",
@@ -236,19 +232,25 @@ void should_report_error_for_non_aggregated_item_in_select_if_no_group_by() {
 
   @Test
   void should_report_error_for_group_by_ordinal_out_of_bound_of_select_list() {
-    SemanticCheckException error1 = assertThrows(SemanticCheckException.class, () ->
-        buildAggregation("SELECT age, AVG(balance) FROM tests GROUP BY 0"));
+    SemanticCheckException error1 =
+        assertThrows(
+            SemanticCheckException.class,
+            () -> buildAggregation("SELECT age, AVG(balance) FROM tests GROUP BY 0"));
     assertEquals("Ordinal [0] is out of bound of select item list", error1.getMessage());
 
-    SemanticCheckException error2 = assertThrows(SemanticCheckException.class, () ->
-        buildAggregation("SELECT age, AVG(balance) FROM tests GROUP BY 3"));
+    SemanticCheckException error2 =
+        assertThrows(
+            SemanticCheckException.class,
+            () -> buildAggregation("SELECT age, AVG(balance) FROM tests GROUP BY 3"));
     assertEquals("Ordinal [3] is out of bound of select item list", error2.getMessage());
   }
 
   @Test
   void should_report_error_for_non_aggregated_item_in_select_if_only_having() {
-    SemanticCheckException error = assertThrows(SemanticCheckException.class, () ->
-        buildAggregation("SELECT age FROM tests HAVING AVG(balance) > 30"));
+    SemanticCheckException error =
+        assertThrows(
+            SemanticCheckException.class,
+            () -> buildAggregation("SELECT age FROM tests HAVING AVG(balance) > 30"));
     assertEquals(
         "Explicit GROUP BY clause is required because expression [age] "
             + "contains non-aggregated column",
@@ -263,10 +265,10 @@ private Matcher hasAggregators(UnresolvedExpression... exprs) {
     return featureValueOf("aggregators", Aggregation::getAggExprList, exprs);
   }
 
-  private Matcher featureValueOf(String name,
-                                                 Function> getter,
-                                                 UnresolvedExpression... exprs) {
+  private Matcher featureValueOf(
+      String name,
+      Function> getter,
+      UnresolvedExpression... exprs) {
     Matcher> subMatcher =
         (exprs.length == 0) ? equalTo(emptyList()) : equalTo(Arrays.asList(exprs));
     return new FeatureMatcher>(subMatcher, name, "") {
@@ -296,5 +298,4 @@ private QuerySpecificationContext parse(String query) {
     parser.addErrorListener(new SyntaxAnalysisErrorListener());
     return parser.querySpecification();
   }
-
 }
diff --git a/sql/src/test/java/org/opensearch/sql/sql/parser/AstBuilderTest.java b/sql/src/test/java/org/opensearch/sql/sql/parser/AstBuilderTest.java
index 4b44c0344c..edee692500 100644
--- a/sql/src/test/java/org/opensearch/sql/sql/parser/AstBuilderTest.java
+++ b/sql/src/test/java/org/opensearch/sql/sql/parser/AstBuilderTest.java
@@ -3,7 +3,6 @@
  * SPDX-License-Identifier: Apache-2.0
  */
 
-
 package org.opensearch.sql.sql.parser;
 
 import static java.util.Collections.emptyList;
@@ -53,36 +52,20 @@ public void can_build_select_literals() {
             alias("'hello'", stringLiteral("hello")),
             alias("\"world\"", stringLiteral("world")),
             alias("false", booleanLiteral(false)),
-            alias("-4.567", doubleLiteral(-4.567))
-        ),
-        buildAST("SELECT 123, 'hello', \"world\", false, -4.567")
-    );
+            alias("-4.567", doubleLiteral(-4.567))),
+        buildAST("SELECT 123, 'hello', \"world\", false, -4.567"));
   }
 
   @Test
   public void can_build_select_function_call_with_alias() {
     assertEquals(
-        project(
-            relation("test"),
-            alias(
-                "ABS(age)",
-                function("ABS", qualifiedName("age")),
-                "a"
-            )
-        ),
-        buildAST("SELECT ABS(age) AS a FROM test")
-    );
+        project(relation("test"), alias("ABS(age)", function("ABS", qualifiedName("age")), "a")),
+        buildAST("SELECT ABS(age) AS a FROM test"));
   }
 
   @Test
   public void can_build_select_all_from_index() {
-    assertEquals(
-        project(
-            relation("test"),
-            AllFields.of()
-        ),
-        buildAST("SELECT * FROM test")
-    );
+    assertEquals(project(relation("test"), AllFields.of()), buildAST("SELECT * FROM test"));
 
     assertThrows(SyntaxCheckException.class, () -> buildAST("SELECT *"));
   }
@@ -90,14 +73,8 @@ public void can_build_select_all_from_index() {
   @Test
   public void can_build_nested_select_all() {
     assertEquals(
-        project(
-            relation("test"),
-            alias("nested(field.*)",
-                new NestedAllTupleFields("field")
-            )
-        ),
-        buildAST("SELECT nested(field.*) FROM test")
-    );
+        project(relation("test"), alias("nested(field.*)", new NestedAllTupleFields("field"))),
+        buildAST("SELECT nested(field.*) FROM test"));
   }
 
   @Test
@@ -107,32 +84,22 @@ public void can_build_select_all_and_fields_from_index() {
             relation("test"),
             AllFields.of(),
             alias("age", qualifiedName("age")),
-            alias("age", qualifiedName("age"), "a")
-        ),
-        buildAST("SELECT *, age, age as a FROM test")
-    );
+            alias("age", qualifiedName("age"), "a")),
+        buildAST("SELECT *, age, age as a FROM test"));
   }
 
   @Test
   public void can_build_select_fields_from_index() {
     assertEquals(
-        project(
-            relation("test"),
-            alias("age", qualifiedName("age"))
-        ),
-        buildAST("SELECT age FROM test")
-    );
+        project(relation("test"), alias("age", qualifiedName("age"))),
+        buildAST("SELECT age FROM test"));
   }
 
   @Test
   public void can_build_select_fields_with_alias() {
     assertEquals(
-        project(
-            relation("test"),
-            alias("age", qualifiedName("age"), "a")
-        ),
-        buildAST("SELECT age AS a FROM test")
-    );
+        project(relation("test"), alias("age", qualifiedName("age"), "a")),
+        buildAST("SELECT age AS a FROM test"));
   }
 
   @Test
@@ -140,17 +107,8 @@ public void can_build_select_fields_with_alias_quoted() {
     assertEquals(
         project(
             relation("test"),
-            alias(
-                "(age + 10)",
-                function("+", qualifiedName("age"), intLiteral(10)),
-                "Age_Expr"
-            )
-        ),
-        buildAST("SELECT"
-                + " (age + 10) AS `Age_Expr` "
-                + "FROM test"
-        )
-    );
+            alias("(age + 10)", function("+", qualifiedName("age"), intLiteral(10)), "Age_Expr")),
+        buildAST("SELECT" + " (age + 10) AS `Age_Expr` " + "FROM test"));
   }
 
   @Test
@@ -158,42 +116,27 @@ public void can_build_from_index_with_alias() {
     assertEquals(
         project(
             filter(
-                relation("test", "tt"),
-                function("=", qualifiedName("tt", "age"), intLiteral(30))),
-            alias("tt.name", qualifiedName("tt", "name"))
-        ),
-        buildAST("SELECT tt.name FROM test AS tt WHERE tt.age = 30")
-    );
+                relation("test", "tt"), function("=", qualifiedName("tt", "age"), intLiteral(30))),
+            alias("tt.name", qualifiedName("tt", "name"))),
+        buildAST("SELECT tt.name FROM test AS tt WHERE tt.age = 30"));
   }
 
   @Test
   public void can_build_from_index_with_alias_quoted() {
     assertEquals(
         project(
-            filter(
-                relation("test", "t"),
-                function("=", qualifiedName("t", "age"), intLiteral(30))),
-            alias("`t`.name", qualifiedName("t", "name"))
-        ),
-        buildAST("SELECT `t`.name FROM test `t` WHERE `t`.age = 30")
-    );
+            filter(relation("test", "t"), function("=", qualifiedName("t", "age"), intLiteral(30))),
+            alias("`t`.name", qualifiedName("t", "name"))),
+        buildAST("SELECT `t`.name FROM test `t` WHERE `t`.age = 30"));
   }
 
   @Test
   public void can_build_where_clause() {
     assertEquals(
         project(
-            filter(
-                relation("test"),
-                function(
-                    "=",
-                    qualifiedName("name"),
-                    stringLiteral("John"))
-            ),
-            alias("name", qualifiedName("name"))
-        ),
-        buildAST("SELECT name FROM test WHERE name = 'John'")
-    );
+            filter(relation("test"), function("=", qualifiedName("name"), stringLiteral("John"))),
+            alias("name", qualifiedName("name"))),
+        buildAST("SELECT name FROM test WHERE name = 'John'"));
   }
 
   @Test
@@ -202,8 +145,7 @@ public void can_build_count_literal() {
         project(
             agg(
                 relation("test"),
-                ImmutableList.of(
-                    alias("COUNT(1)", aggregate("COUNT", intLiteral(1)))),
+                ImmutableList.of(alias("COUNT(1)", aggregate("COUNT", intLiteral(1)))),
                 emptyList(),
                 emptyList(),
                 emptyList()),
@@ -217,8 +159,7 @@ public void can_build_count_star() {
         project(
             agg(
                 relation("test"),
-                ImmutableList.of(
-                    alias("COUNT(*)", aggregate("COUNT", AllFields.of()))),
+                ImmutableList.of(alias("COUNT(*)", aggregate("COUNT", AllFields.of()))),
                 emptyList(),
                 emptyList(),
                 emptyList()),
@@ -328,9 +269,7 @@ public void can_build_having_clause() {
                     emptyList(),
                     ImmutableList.of(alias("name", qualifiedName("name"))),
                     emptyList()),
-                function(">",
-                    aggregate("MIN", qualifiedName("balance")),
-                    intLiteral(1000))),
+                function(">", aggregate("MIN", qualifiedName("balance")), intLiteral(1000))),
             alias("name", qualifiedName("name")),
             alias("AVG(age)", aggregate("AVG", qualifiedName("age")))),
         buildAST("SELECT name, AVG(age) FROM test GROUP BY name HAVING MIN(balance) > 1000"));
@@ -343,14 +282,11 @@ public void can_build_having_condition_using_alias() {
             filter(
                 agg(
                     relation("test"),
-                    ImmutableList.of(
-                        alias("AVG(age)", aggregate("AVG", qualifiedName("age")))),
+                    ImmutableList.of(alias("AVG(age)", aggregate("AVG", qualifiedName("age")))),
                     emptyList(),
                     ImmutableList.of(alias("name", qualifiedName("name"))),
                     emptyList()),
-                function(">",
-                    aggregate("AVG", qualifiedName("age")),
-                    intLiteral(1000))),
+                function(">", aggregate("AVG", qualifiedName("age")), intLiteral(1000))),
             alias("name", qualifiedName("name")),
             alias("AVG(age)", aggregate("AVG", qualifiedName("age")), "a")),
         buildAST("SELECT name, AVG(age) AS a FROM test GROUP BY name HAVING a > 1000"));
@@ -360,9 +296,7 @@ public void can_build_having_condition_using_alias() {
   public void can_build_order_by_field_name() {
     assertEquals(
         project(
-            sort(
-                relation("test"),
-                field("name", argument("asc", booleanLiteral(true)))),
+            sort(relation("test"), field("name", argument("asc", booleanLiteral(true)))),
             alias("name", qualifiedName("name"))),
         buildAST("SELECT name FROM test ORDER BY name"));
   }
@@ -374,8 +308,7 @@ public void can_build_order_by_function() {
             sort(
                 relation("test"),
                 field(
-                    function("ABS", qualifiedName("name")),
-                    argument("asc", booleanLiteral(true)))),
+                    function("ABS", qualifiedName("name")), argument("asc", booleanLiteral(true)))),
             alias("name", qualifiedName("name"))),
         buildAST("SELECT name FROM test ORDER BY ABS(name)"));
   }
@@ -384,9 +317,7 @@ public void can_build_order_by_function() {
   public void can_build_order_by_alias() {
     assertEquals(
         project(
-            sort(
-                relation("test"),
-                field("name", argument("asc", booleanLiteral(true)))),
+            sort(relation("test"), field("name", argument("asc", booleanLiteral(true)))),
             alias("name", qualifiedName("name"), "n")),
         buildAST("SELECT name AS n FROM test ORDER BY n ASC"));
   }
@@ -395,9 +326,7 @@ public void can_build_order_by_alias() {
   public void can_build_order_by_ordinal() {
     assertEquals(
         project(
-            sort(
-                relation("test"),
-                field("name", argument("asc", booleanLiteral(false)))),
+            sort(relation("test"), field("name", argument("asc", booleanLiteral(false)))),
             alias("name", qualifiedName("name"))),
         buildAST("SELECT name FROM test ORDER BY 1 DESC"));
   }
@@ -424,8 +353,7 @@ public void can_build_select_distinct_clause() {
                 emptyList(),
                 emptyList(),
                 ImmutableList.of(
-                    alias("name", qualifiedName("name")),
-                    alias("age", qualifiedName("age"))),
+                    alias("name", qualifiedName("name")), alias("age", qualifiedName("age"))),
                 emptyList()),
             alias("name", qualifiedName("name")),
             alias("age", qualifiedName("age"))),
@@ -441,26 +369,21 @@ public void can_build_select_distinct_clause_with_function() {
                 emptyList(),
                 emptyList(),
                 ImmutableList.of(
-                    alias("SUBSTRING(name, 1, 2)",
+                    alias(
+                        "SUBSTRING(name, 1, 2)",
                         function(
-                            "SUBSTRING",
-                            qualifiedName("name"),
-                            intLiteral(1), intLiteral(2)))),
+                            "SUBSTRING", qualifiedName("name"), intLiteral(1), intLiteral(2)))),
                 emptyList()),
-            alias("SUBSTRING(name, 1, 2)",
-                function(
-                    "SUBSTRING",
-                    qualifiedName("name"),
-                    intLiteral(1), intLiteral(2)))),
+            alias(
+                "SUBSTRING(name, 1, 2)",
+                function("SUBSTRING", qualifiedName("name"), intLiteral(1), intLiteral(2)))),
         buildAST("SELECT DISTINCT SUBSTRING(name, 1, 2) FROM test"));
   }
 
   @Test
   public void can_build_select_all_clause() {
     assertEquals(
-        buildAST("SELECT name, age FROM test"),
-        buildAST("SELECT ALL name, age FROM test")
-    );
+        buildAST("SELECT name, age FROM test"), buildAST("SELECT ALL name, age FROM test"));
   }
 
   @Test
@@ -469,22 +392,24 @@ public void can_build_order_by_null_option() {
         project(
             sort(
                 relation("test"),
-                field("name",
+                field(
+                    "name",
                     argument("asc", booleanLiteral(true)),
                     argument("nullFirst", booleanLiteral(false)))),
-        alias("name", qualifiedName("name"))),
+            alias("name", qualifiedName("name"))),
         buildAST("SELECT name FROM test ORDER BY name NULLS LAST"));
   }
 
   /**
-   * Ensure Nested function falls back to legacy engine when used in an HAVING clause.
-   * TODO Remove this test when support is added.
+   * Ensure Nested function falls back to legacy engine when used in an HAVING clause. TODO Remove
+   * this test when support is added.
    */
   @Test
   public void nested_in_having_clause_throws_exception() {
-    SyntaxCheckException exception = assertThrows(SyntaxCheckException.class,
-        () -> buildAST("SELECT count(*) FROM test HAVING nested(message.info)")
-    );
+    SyntaxCheckException exception =
+        assertThrows(
+            SyntaxCheckException.class,
+            () -> buildAST("SELECT count(*) FROM test HAVING nested(message.info)"));
 
     assertEquals(
         "Falling back to legacy engine. Nested function is not supported in the HAVING clause.",
@@ -495,23 +420,15 @@ public void nested_in_having_clause_throws_exception() {
   public void can_build_order_by_sort_order_keyword_insensitive() {
     assertEquals(
         project(
-            sort(
-                relation("test"),
-                field("age",
-                    argument("asc", booleanLiteral(true)))),
+            sort(relation("test"), field("age", argument("asc", booleanLiteral(true)))),
             alias("age", qualifiedName("age"))),
-        buildAST("SELECT age FROM test ORDER BY age ASC")
-    );
+        buildAST("SELECT age FROM test ORDER BY age ASC"));
 
     assertEquals(
         project(
-            sort(
-                relation("test"),
-                field("age",
-                    argument("asc", booleanLiteral(true)))),
+            sort(relation("test"), field("age", argument("asc", booleanLiteral(true)))),
             alias("age", qualifiedName("age"))),
-        buildAST("SELECT age FROM test ORDER BY age asc")
-    );
+        buildAST("SELECT age FROM test ORDER BY age asc"));
   }
 
   @Test
@@ -523,20 +440,15 @@ public void can_build_from_subquery() {
                     project(
                         relation("test"),
                         alias("firstname", qualifiedName("firstname"), "firstName"),
-                        alias("lastname", qualifiedName("lastname"), "lastName")
-                    ),
-                    "a"
-                ),
-                function(">", qualifiedName("age"), intLiteral(20))
-            ),
+                        alias("lastname", qualifiedName("lastname"), "lastName")),
+                    "a"),
+                function(">", qualifiedName("age"), intLiteral(20))),
             alias("a.firstName", qualifiedName("a", "firstName")),
             alias("lastName", qualifiedName("lastName"))),
         buildAST(
             "SELECT a.firstName, lastName FROM ("
                 + "SELECT firstname AS firstName, lastname AS lastName FROM test"
-                + ") AS a where age > 20"
-        )
-    );
+                + ") AS a where age > 20"));
   }
 
   @Test
@@ -545,19 +457,15 @@ public void can_build_from_subquery_with_backquoted_alias() {
         project(
             relationSubquery(
                 project(
-                    relation("test"),
-                    alias("firstname", qualifiedName("firstname"), "firstName")),
+                    relation("test"), alias("firstname", qualifiedName("firstname"), "firstName")),
                 "a"),
-            alias("a.firstName", qualifiedName("a", "firstName"))
-        ),
+            alias("a.firstName", qualifiedName("a", "firstName"))),
         buildAST(
             "SELECT a.firstName "
                 + "FROM ( "
                 + " SELECT `firstname` AS `firstName` "
                 + " FROM `test` "
-                + ") AS `a`"
-        )
-    );
+                + ") AS `a`"));
   }
 
   @Test
@@ -566,12 +474,9 @@ public void can_build_show_all_tables() {
         project(
             filter(
                 relation(TABLE_INFO),
-                function("like", qualifiedName("TABLE_NAME"), stringLiteral("%"))
-            ),
-            AllFields.of()
-        ),
-        buildAST("SHOW TABLES LIKE '%'")
-    );
+                function("like", qualifiedName("TABLE_NAME"), stringLiteral("%"))),
+            AllFields.of()),
+        buildAST("SHOW TABLES LIKE '%'"));
   }
 
   @Test
@@ -580,17 +485,14 @@ public void can_build_show_selected_tables() {
         project(
             filter(
                 relation(TABLE_INFO),
-                function("like", qualifiedName("TABLE_NAME"), stringLiteral("a_c%"))
-            ),
-            AllFields.of()
-        ),
-        buildAST("SHOW TABLES LIKE 'a_c%'")
-    );
+                function("like", qualifiedName("TABLE_NAME"), stringLiteral("a_c%"))),
+            AllFields.of()),
+        buildAST("SHOW TABLES LIKE 'a_c%'"));
   }
 
   /**
-   * Todo, ideally the identifier (%) couldn't be used in LIKE operator, only the string literal
-   * is allowed.
+   * Todo, ideally the identifier (%) couldn't be used in LIKE operator, only the string literal is
+   * allowed.
    */
   @Test
   public void show_compatible_with_old_engine_syntax() {
@@ -598,34 +500,23 @@ public void show_compatible_with_old_engine_syntax() {
         project(
             filter(
                 relation(TABLE_INFO),
-                function("like", qualifiedName("TABLE_NAME"), stringLiteral("%"))
-            ),
-            AllFields.of()
-        ),
-        buildAST("SHOW TABLES LIKE %")
-    );
+                function("like", qualifiedName("TABLE_NAME"), stringLiteral("%"))),
+            AllFields.of()),
+        buildAST("SHOW TABLES LIKE %"));
   }
 
   @Test
   public void describe_compatible_with_old_engine_syntax() {
     assertEquals(
-        project(
-            relation(mappingTable("a_c%")),
-            AllFields.of()
-        ),
-        buildAST("DESCRIBE TABLES LIKE a_c%")
-    );
+        project(relation(mappingTable("a_c%")), AllFields.of()),
+        buildAST("DESCRIBE TABLES LIKE a_c%"));
   }
 
   @Test
   public void can_build_describe_selected_tables() {
     assertEquals(
-        project(
-            relation(mappingTable("a_c%")),
-            AllFields.of()
-        ),
-        buildAST("DESCRIBE TABLES LIKE 'a_c%'")
-    );
+        project(relation(mappingTable("a_c%")), AllFields.of()),
+        buildAST("DESCRIBE TABLES LIKE 'a_c%'"));
   }
 
   @Test
@@ -634,17 +525,14 @@ public void can_build_describe_selected_tables_field_filter() {
         project(
             filter(
                 relation(mappingTable("a_c%")),
-                function("like", qualifiedName("COLUMN_NAME"), stringLiteral("name%"))
-            ),
-            AllFields.of()
-        ),
-        buildAST("DESCRIBE TABLES LIKE 'a_c%' COLUMNS LIKE 'name%'")
-    );
+                function("like", qualifiedName("COLUMN_NAME"), stringLiteral("name%"))),
+            AllFields.of()),
+        buildAST("DESCRIBE TABLES LIKE 'a_c%' COLUMNS LIKE 'name%'"));
   }
 
   /**
-   * Todo, ideally the identifier (%) couldn't be used in LIKE operator, only the string literal
-   * is allowed.
+   * Todo, ideally the identifier (%) couldn't be used in LIKE operator, only the string literal is
+   * allowed.
    */
   @Test
   public void describe_and_column_compatible_with_old_engine_syntax() {
@@ -652,23 +540,16 @@ public void describe_and_column_compatible_with_old_engine_syntax() {
         project(
             filter(
                 relation(mappingTable("a_c%")),
-                function("like", qualifiedName("COLUMN_NAME"), stringLiteral("name%"))
-            ),
-            AllFields.of()
-        ),
-        buildAST("DESCRIBE TABLES LIKE a_c% COLUMNS LIKE name%")
-    );
+                function("like", qualifiedName("COLUMN_NAME"), stringLiteral("name%"))),
+            AllFields.of()),
+        buildAST("DESCRIBE TABLES LIKE a_c% COLUMNS LIKE name%"));
   }
 
   @Test
   public void can_build_alias_by_keywords() {
     assertEquals(
-        project(
-            relation("test"),
-            alias("avg_age", qualifiedName("avg_age"), "avg")
-        ),
-        buildAST("SELECT avg_age AS avg FROM test")
-    );
+        project(relation("test"), alias("avg_age", qualifiedName("avg_age"), "avg")),
+        buildAST("SELECT avg_age AS avg FROM test"));
   }
 
   @Test
@@ -676,42 +557,20 @@ public void can_build_limit_clause() {
     assertEquals(
         project(
             limit(
-                sort(
-                    relation("test"),
-                    field("age", argument("asc", booleanLiteral(true)))
-                ),
-                10,
-                0
-            ),
+                sort(relation("test"), field("age", argument("asc", booleanLiteral(true)))), 10, 0),
             alias("name", qualifiedName("name")),
-            alias("age", qualifiedName("age"))
-        ),
-        buildAST("SELECT name, age FROM test ORDER BY age LIMIT 10")
-    );
+            alias("age", qualifiedName("age"))),
+        buildAST("SELECT name, age FROM test ORDER BY age LIMIT 10"));
   }
 
   @Test
   public void can_build_limit_clause_with_offset() {
     assertEquals(
-        project(
-            limit(
-                relation("test"),
-                10,
-                5
-            ),
-            alias("name", qualifiedName("name"))
-        ),
+        project(limit(relation("test"), 10, 5), alias("name", qualifiedName("name"))),
         buildAST("SELECT name FROM test LIMIT 10 OFFSET 5"));
 
     assertEquals(
-        project(
-            limit(
-                relation("test"),
-                10,
-                5
-            ),
-            alias("name", qualifiedName("name"))
-        ),
+        project(limit(relation("test"), 10, 5), alias("name", qualifiedName("name"))),
         buildAST("SELECT name FROM test LIMIT 5, 10"));
   }
 
@@ -719,11 +578,10 @@ public void can_build_limit_clause_with_offset() {
   public void can_build_qualified_name_highlight() {
     Map args = new HashMap<>();
     assertEquals(
-        project(relation("test"),
-            alias("highlight(fieldA)",
-                highlight(AstDSL.qualifiedName("fieldA"), args))),
-        buildAST("SELECT highlight(fieldA) FROM test")
-    );
+        project(
+            relation("test"),
+            alias("highlight(fieldA)", highlight(AstDSL.qualifiedName("fieldA"), args))),
+        buildAST("SELECT highlight(fieldA) FROM test"));
   }
 
   @Test
@@ -732,22 +590,22 @@ public void can_build_qualified_highlight_with_arguments() {
     args.put("pre_tags", new Literal("", DataType.STRING));
     args.put("post_tags", new Literal("", DataType.STRING));
     assertEquals(
-        project(relation("test"),
-            alias("highlight(fieldA, pre_tags='', post_tags='')",
+        project(
+            relation("test"),
+            alias(
+                "highlight(fieldA, pre_tags='', post_tags='')",
                 highlight(AstDSL.qualifiedName("fieldA"), args))),
-        buildAST("SELECT highlight(fieldA, pre_tags='', post_tags='') "
-            + "FROM test")
-    );
+        buildAST(
+            "SELECT highlight(fieldA, pre_tags='', post_tags='') " + "FROM test"));
   }
 
   @Test
   public void can_build_string_literal_highlight() {
     Map args = new HashMap<>();
     assertEquals(
-        project(relation("test"),
-            alias("highlight(\"fieldA\")",
-                highlight(AstDSL.stringLiteral("fieldA"), args))),
-        buildAST("SELECT highlight(\"fieldA\") FROM test")
-    );
+        project(
+            relation("test"),
+            alias("highlight(\"fieldA\")", highlight(AstDSL.stringLiteral("fieldA"), args))),
+        buildAST("SELECT highlight(\"fieldA\") FROM test"));
   }
 }
diff --git a/sql/src/test/java/org/opensearch/sql/sql/parser/AstBuilderTestBase.java b/sql/src/test/java/org/opensearch/sql/sql/parser/AstBuilderTestBase.java
index 2161eb5b1a..602f17ce85 100644
--- a/sql/src/test/java/org/opensearch/sql/sql/parser/AstBuilderTestBase.java
+++ b/sql/src/test/java/org/opensearch/sql/sql/parser/AstBuilderTestBase.java
@@ -10,9 +10,7 @@
 import org.opensearch.sql.sql.antlr.SQLSyntaxParser;
 
 public class AstBuilderTestBase {
-  /**
-   * SQL syntax parser that helps prepare parse tree as AstBuilder input.
-   */
+  /** SQL syntax parser that helps prepare parse tree as AstBuilder input. */
   private final SQLSyntaxParser parser = new SQLSyntaxParser();
 
   protected UnresolvedPlan buildAST(String query) {
diff --git a/sql/src/test/java/org/opensearch/sql/sql/parser/AstExpressionBuilderTest.java b/sql/src/test/java/org/opensearch/sql/sql/parser/AstExpressionBuilderTest.java
index 20655bc020..f2e7fdb2d8 100644
--- a/sql/src/test/java/org/opensearch/sql/sql/parser/AstExpressionBuilderTest.java
+++ b/sql/src/test/java/org/opensearch/sql/sql/parser/AstExpressionBuilderTest.java
@@ -3,7 +3,6 @@
  * SPDX-License-Identifier: Apache-2.0
  */
 
-
 package org.opensearch.sql.sql.parser;
 
 import static org.junit.jupiter.api.Assertions.assertEquals;
@@ -57,185 +56,122 @@ class AstExpressionBuilderTest {
 
   @Test
   public void canBuildStringLiteral() {
-    assertEquals(
-        stringLiteral("hello"),
-        buildExprAst("'hello'")
-    );
-    assertEquals(
-        stringLiteral("hello"),
-        buildExprAst("\"hello\"")
-    );
+    assertEquals(stringLiteral("hello"), buildExprAst("'hello'"));
+    assertEquals(stringLiteral("hello"), buildExprAst("\"hello\""));
   }
 
   @Test
   public void canBuildIntegerLiteral() {
-    assertEquals(
-        intLiteral(123),
-        buildExprAst("123")
-    );
-    assertEquals(
-        intLiteral(Integer.MAX_VALUE),
-        buildExprAst(String.valueOf(Integer.MAX_VALUE))
-    );
-    assertEquals(
-        intLiteral(Integer.MIN_VALUE),
-        buildExprAst(String.valueOf(Integer.MIN_VALUE))
-    );
+    assertEquals(intLiteral(123), buildExprAst("123"));
+    assertEquals(intLiteral(Integer.MAX_VALUE), buildExprAst(String.valueOf(Integer.MAX_VALUE)));
+    assertEquals(intLiteral(Integer.MIN_VALUE), buildExprAst(String.valueOf(Integer.MIN_VALUE)));
   }
 
   @Test
   public void canBuildLongLiteral() {
+    assertEquals(longLiteral(1234567890123L), buildExprAst("1234567890123"));
     assertEquals(
-        longLiteral(1234567890123L),
-        buildExprAst("1234567890123")
-    );
+        longLiteral(Integer.MAX_VALUE + 1L), buildExprAst(String.valueOf(Integer.MAX_VALUE + 1L)));
     assertEquals(
-        longLiteral(Integer.MAX_VALUE + 1L),
-        buildExprAst(String.valueOf(Integer.MAX_VALUE + 1L))
-    );
-    assertEquals(
-        longLiteral(Integer.MIN_VALUE - 1L),
-        buildExprAst(String.valueOf(Integer.MIN_VALUE - 1L))
-    );
+        longLiteral(Integer.MIN_VALUE - 1L), buildExprAst(String.valueOf(Integer.MIN_VALUE - 1L)));
   }
 
   @Test
   public void canBuildNegativeRealLiteral() {
-    assertEquals(
-        doubleLiteral(-4.567),
-        buildExprAst("-4.567")
-    );
+    assertEquals(doubleLiteral(-4.567), buildExprAst("-4.567"));
   }
 
   @Test
   public void canBuildBooleanLiteral() {
-    assertEquals(
-        booleanLiteral(true),
-        buildExprAst("true")
-    );
+    assertEquals(booleanLiteral(true), buildExprAst("true"));
   }
 
   @Test
   public void canBuildDateLiteral() {
-    assertEquals(
-        dateLiteral("2020-07-07"),
-        buildExprAst("DATE '2020-07-07'")
-    );
+    assertEquals(dateLiteral("2020-07-07"), buildExprAst("DATE '2020-07-07'"));
   }
 
   @Test
   public void canBuildTimeLiteral() {
-    assertEquals(
-        timeLiteral("11:30:45"),
-        buildExprAst("TIME '11:30:45'")
-    );
+    assertEquals(timeLiteral("11:30:45"), buildExprAst("TIME '11:30:45'"));
   }
 
   @Test
   public void canBuildTimestampLiteral() {
     assertEquals(
-        timestampLiteral("2020-07-07 11:30:45"),
-        buildExprAst("TIMESTAMP '2020-07-07 11:30:45'")
-    );
+        timestampLiteral("2020-07-07 11:30:45"), buildExprAst("TIMESTAMP '2020-07-07 11:30:45'"));
   }
 
   @Test
   public void canBuildIntervalLiteral() {
-    assertEquals(
-        intervalLiteral(1, DataType.INTEGER, "day"),
-        buildExprAst("interval 1 day")
-    );
+    assertEquals(intervalLiteral(1, DataType.INTEGER, "day"), buildExprAst("interval 1 day"));
   }
 
   @Test
   public void canBuildArithmeticExpression() {
-    assertEquals(
-        function("+", intLiteral(1), intLiteral(2)),
-        buildExprAst("1 + 2")
-    );
+    assertEquals(function("+", intLiteral(1), intLiteral(2)), buildExprAst("1 + 2"));
   }
 
   @Test
   public void canBuildArithmeticExpressionPrecedence() {
     assertEquals(
-        function("+",
-            intLiteral(1),
-            function("*",
-                intLiteral(2), intLiteral(3))),
-        buildExprAst("1 + 2 * 3")
-    );
+        function("+", intLiteral(1), function("*", intLiteral(2), intLiteral(3))),
+        buildExprAst("1 + 2 * 3"));
   }
 
   @Test
   public void canBuildFunctionWithoutArguments() {
-    assertEquals(
-        function("PI"),
-        buildExprAst("PI()")
-    );
+    assertEquals(function("PI"), buildExprAst("PI()"));
   }
 
   @Test
   public void canBuildExpressionWithParentheses() {
     assertEquals(
-        function("*",
+        function(
+            "*",
             function("+", doubleLiteral(-1.0), doubleLiteral(2.3)),
-            function("-", intLiteral(3), intLiteral(1))
-        ),
-        buildExprAst("(-1.0 + 2.3) * (3 - 1)")
-    );
+            function("-", intLiteral(3), intLiteral(1))),
+        buildExprAst("(-1.0 + 2.3) * (3 - 1)"));
   }
 
   @Test
   public void canBuildFunctionCall() {
-    assertEquals(
-        function("abs", intLiteral(-1)),
-        buildExprAst("abs(-1)")
-    );
+    assertEquals(function("abs", intLiteral(-1)), buildExprAst("abs(-1)"));
   }
 
   @Test
   public void canBuildExtractFunctionCall() {
     assertEquals(
         function("extract", stringLiteral("DAY"), dateLiteral("2023-02-09")).toString(),
-        buildExprAst("extract(DAY FROM \"2023-02-09\")").toString()
-    );
+        buildExprAst("extract(DAY FROM \"2023-02-09\")").toString());
   }
 
   @Test
   public void canBuildGetFormatFunctionCall() {
     assertEquals(
         function("get_format", stringLiteral("DATE"), stringLiteral("USA")),
-        buildExprAst("get_format(DATE,\"USA\")")
-    );
+        buildExprAst("get_format(DATE,\"USA\")"));
   }
 
   @Test
   public void canBuildNestedFunctionCall() {
     assertEquals(
-        function("abs",
-            function("*",
-              function("abs", intLiteral(-5)),
-              intLiteral(-1)
-            )
-        ),
-        buildExprAst("abs(abs(-5) * -1)")
-    );
+        function("abs", function("*", function("abs", intLiteral(-5)), intLiteral(-1))),
+        buildExprAst("abs(abs(-5) * -1)"));
   }
 
   @Test
   public void canBuildDateAndTimeFunctionCall() {
     assertEquals(
         function("dayofmonth", dateLiteral("2020-07-07")),
-        buildExprAst("dayofmonth(DATE '2020-07-07')")
-    );
+        buildExprAst("dayofmonth(DATE '2020-07-07')"));
   }
 
   @Test
   public void canBuildTimestampAddFunctionCall() {
     assertEquals(
         function("timestampadd", stringLiteral("WEEK"), intLiteral(1), dateLiteral("2023-03-14")),
-        buildExprAst("timestampadd(WEEK, 1, DATE '2023-03-14')")
-    );
+        buildExprAst("timestampadd(WEEK, 1, DATE '2023-03-14')"));
   }
 
   @Test
@@ -246,105 +182,69 @@ public void canBuildTimstampDiffFunctionCall() {
             stringLiteral("WEEK"),
             timestampLiteral("2023-03-15 00:00:01"),
             dateLiteral("2023-03-14")),
-        buildExprAst("timestampdiff(WEEK, TIMESTAMP '2023-03-15 00:00:01', DATE '2023-03-14')")
-    );
+        buildExprAst("timestampdiff(WEEK, TIMESTAMP '2023-03-15 00:00:01', DATE '2023-03-14')"));
   }
 
   @Test
   public void canBuildComparisonExpression() {
-    assertEquals(
-        function("!=", intLiteral(1), intLiteral(2)),
-        buildExprAst("1 != 2")
-    );
+    assertEquals(function("!=", intLiteral(1), intLiteral(2)), buildExprAst("1 != 2"));
 
-    assertEquals(
-        function("!=", intLiteral(1), intLiteral(2)),
-        buildExprAst("1 <> 2")
-    );
+    assertEquals(function("!=", intLiteral(1), intLiteral(2)), buildExprAst("1 <> 2"));
   }
 
   @Test
   public void canBuildNullTestExpression() {
-    assertEquals(
-        function("is null", intLiteral(1)),
-        buildExprAst("1 is NULL")
-    );
+    assertEquals(function("is null", intLiteral(1)), buildExprAst("1 is NULL"));
 
-    assertEquals(
-        function("is not null", intLiteral(1)),
-        buildExprAst("1 IS NOT null")
-    );
+    assertEquals(function("is not null", intLiteral(1)), buildExprAst("1 IS NOT null"));
   }
 
   @Test
   public void canBuildNullTestExpressionWithNULLLiteral() {
-    assertEquals(
-        function("is null", nullLiteral()),
-        buildExprAst("NULL is NULL")
-    );
+    assertEquals(function("is null", nullLiteral()), buildExprAst("NULL is NULL"));
 
-    assertEquals(
-        function("is not null", nullLiteral()),
-        buildExprAst("NULL IS NOT null")
-    );
+    assertEquals(function("is not null", nullLiteral()), buildExprAst("NULL IS NOT null"));
   }
 
   @Test
   public void canBuildLikeExpression() {
     assertEquals(
         function("like", stringLiteral("str"), stringLiteral("st%")),
-        buildExprAst("'str' like 'st%'")
-    );
+        buildExprAst("'str' like 'st%'"));
 
     assertEquals(
         function("not like", stringLiteral("str"), stringLiteral("st%")),
-        buildExprAst("'str' not like 'st%'")
-    );
+        buildExprAst("'str' not like 'st%'"));
   }
 
   @Test
   public void canBuildRegexpExpression() {
     assertEquals(
         function("regexp", stringLiteral("str"), stringLiteral(".*")),
-        buildExprAst("'str' regexp '.*'")
-    );
+        buildExprAst("'str' regexp '.*'"));
   }
 
   @Test
   public void canBuildBetweenExpression() {
     assertEquals(
-        between(
-            qualifiedName("age"), intLiteral(10), intLiteral(30)),
-        buildExprAst("age BETWEEN 10 AND 30")
-    );
+        between(qualifiedName("age"), intLiteral(10), intLiteral(30)),
+        buildExprAst("age BETWEEN 10 AND 30"));
   }
 
   @Test
   public void canBuildNotBetweenExpression() {
     assertEquals(
-        not(
-            between(
-                qualifiedName("age"), intLiteral(10), intLiteral(30))),
-        buildExprAst("age NOT BETWEEN 10 AND 30")
-    );
+        not(between(qualifiedName("age"), intLiteral(10), intLiteral(30))),
+        buildExprAst("age NOT BETWEEN 10 AND 30"));
   }
 
   @Test
   public void canBuildLogicalExpression() {
-    assertEquals(
-        and(booleanLiteral(true), booleanLiteral(false)),
-        buildExprAst("true AND false")
-    );
+    assertEquals(and(booleanLiteral(true), booleanLiteral(false)), buildExprAst("true AND false"));
 
-    assertEquals(
-        or(booleanLiteral(true), booleanLiteral(false)),
-        buildExprAst("true OR false")
-    );
+    assertEquals(or(booleanLiteral(true), booleanLiteral(false)), buildExprAst("true OR false"));
 
-    assertEquals(
-        not(booleanLiteral(false)),
-        buildExprAst("NOT false")
-    );
+    assertEquals(not(booleanLiteral(false)), buildExprAst("NOT false"));
   }
 
   @Test
@@ -373,8 +273,8 @@ public void canBuildWindowFunctionWithNullOrderSpecified() {
         window(
             function("DENSE_RANK"),
             ImmutableList.of(),
-            ImmutableList.of(ImmutablePair.of(
-                new SortOption(ASC, NULL_LAST), qualifiedName("age")))),
+            ImmutableList.of(
+                ImmutablePair.of(new SortOption(ASC, NULL_LAST), qualifiedName("age")))),
         buildExprAst("DENSE_RANK() OVER (ORDER BY age ASC NULLS LAST)"));
   }
 
@@ -382,35 +282,27 @@ public void canBuildWindowFunctionWithNullOrderSpecified() {
   public void canBuildStringLiteralHighlightFunction() {
     HashMap args = new HashMap<>();
     assertEquals(
-        highlight(AstDSL.stringLiteral("fieldA"), args),
-        buildExprAst("highlight(\"fieldA\")")
-    );
+        highlight(AstDSL.stringLiteral("fieldA"), args), buildExprAst("highlight(\"fieldA\")"));
   }
 
   @Test
   public void canBuildQualifiedNameHighlightFunction() {
     HashMap args = new HashMap<>();
     assertEquals(
-        highlight(AstDSL.qualifiedName("fieldA"), args),
-        buildExprAst("highlight(fieldA)")
-    );
+        highlight(AstDSL.qualifiedName("fieldA"), args), buildExprAst("highlight(fieldA)"));
   }
 
   @Test
   public void canBuildStringLiteralPositionFunction() {
     assertEquals(
-            function("position", stringLiteral("substr"), stringLiteral("str")),
-            buildExprAst("position(\"substr\" IN \"str\")")
-    );
+        function("position", stringLiteral("substr"), stringLiteral("str")),
+        buildExprAst("position(\"substr\" IN \"str\")"));
   }
 
   @Test
   public void canBuildWindowFunctionWithoutOrderBy() {
     assertEquals(
-        window(
-            function("RANK"),
-            ImmutableList.of(qualifiedName("state")),
-            ImmutableList.of()),
+        window(function("RANK"), ImmutableList.of(qualifiedName("state")), ImmutableList.of()),
         buildExprAst("RANK() OVER (PARTITION BY state)"));
   }
 
@@ -420,8 +312,7 @@ public void canBuildAggregateWindowFunction() {
         window(
             aggregate("AVG", qualifiedName("age")),
             ImmutableList.of(qualifiedName("state")),
-            ImmutableList.of(ImmutablePair.of(
-                new SortOption(null, null), qualifiedName("age")))),
+            ImmutableList.of(ImmutablePair.of(new SortOption(null, null), qualifiedName("age")))),
         buildExprAst("AVG(age) OVER (PARTITION BY state ORDER BY age)"));
   }
 
@@ -430,11 +321,8 @@ public void canBuildCaseConditionStatement() {
     assertEquals(
         caseWhen(
             null, // no else statement
-            when(
-                function(">", qualifiedName("age"), intLiteral(30)),
-                stringLiteral("age1"))),
-        buildExprAst("CASE WHEN age > 30 THEN 'age1' END")
-    );
+            when(function(">", qualifiedName("age"), intLiteral(30)), stringLiteral("age1"))),
+        buildExprAst("CASE WHEN age > 30 THEN 'age1' END"));
   }
 
   @Test
@@ -444,168 +332,147 @@ public void canBuildCaseValueStatement() {
             qualifiedName("age"),
             stringLiteral("age2"),
             when(intLiteral(30), stringLiteral("age1"))),
-        buildExprAst("CASE age WHEN 30 THEN 'age1' ELSE 'age2' END")
-    );
+        buildExprAst("CASE age WHEN 30 THEN 'age1' ELSE 'age2' END"));
   }
 
   @Test
   public void canBuildKeywordsAsIdentifiers() {
-    assertEquals(
-        qualifiedName("timestamp"),
-        buildExprAst("timestamp")
-    );
+    assertEquals(qualifiedName("timestamp"), buildExprAst("timestamp"));
   }
 
   @Test
   public void canBuildKeywordsAsIdentInQualifiedName() {
-    assertEquals(
-        qualifiedName("test", "timestamp"),
-        buildExprAst("test.timestamp")
-    );
+    assertEquals(qualifiedName("test", "timestamp"), buildExprAst("test.timestamp"));
   }
 
   @Test
   public void canBuildMetaDataFieldAsQualifiedName() {
-    Stream.of("_id", "_index", "_sort", "_score", "_maxscore").forEach(
-        field -> assertEquals(
-            qualifiedName(field),
-            buildExprAst(field)
-        )
-    );
+    Stream.of("_id", "_index", "_sort", "_score", "_maxscore")
+        .forEach(field -> assertEquals(qualifiedName(field), buildExprAst(field)));
   }
 
   @Test
   public void canBuildNonMetaDataFieldAsQualifiedName() {
-    Stream.of("id", "__id", "_routing", "___field").forEach(
-        field -> assertEquals(
-            qualifiedName(field),
-            buildExprAst(field)
-        )
-    );
+    Stream.of("id", "__id", "_routing", "___field")
+        .forEach(field -> assertEquals(qualifiedName(field), buildExprAst(field)));
   }
 
   @Test
   public void canCastFieldAsString() {
     assertEquals(
         AstDSL.cast(qualifiedName("state"), stringLiteral("string")),
-        buildExprAst("cast(state as string)")
-    );
+        buildExprAst("cast(state as string)"));
   }
 
   @Test
   public void canCastValueAsString() {
     assertEquals(
-        AstDSL.cast(intLiteral(1), stringLiteral("string")),
-        buildExprAst("cast(1 as string)")
-    );
+        AstDSL.cast(intLiteral(1), stringLiteral("string")), buildExprAst("cast(1 as string)"));
   }
 
   @Test
   public void filteredAggregation() {
     assertEquals(
-        AstDSL.filteredAggregate("avg", qualifiedName("age"),
-            function(">", qualifiedName("age"), intLiteral(20))),
-        buildExprAst("avg(age) filter(where age > 20)")
-    );
+        AstDSL.filteredAggregate(
+            "avg", qualifiedName("age"), function(">", qualifiedName("age"), intLiteral(20))),
+        buildExprAst("avg(age) filter(where age > 20)"));
   }
 
   @Test
   public void canBuildVarSamp() {
-    assertEquals(
-        aggregate("var_samp", qualifiedName("age")),
-        buildExprAst("var_samp(age)"));
+    assertEquals(aggregate("var_samp", qualifiedName("age")), buildExprAst("var_samp(age)"));
   }
 
   @Test
   public void canBuildVarPop() {
-    assertEquals(
-        aggregate("var_pop", qualifiedName("age")),
-        buildExprAst("var_pop(age)"));
+    assertEquals(aggregate("var_pop", qualifiedName("age")), buildExprAst("var_pop(age)"));
   }
 
   @Test
   public void canBuildVariance() {
-    assertEquals(
-        aggregate("variance", qualifiedName("age")),
-        buildExprAst("variance(age)"));
+    assertEquals(aggregate("variance", qualifiedName("age")), buildExprAst("variance(age)"));
   }
 
   @Test
   public void distinctCount() {
     assertEquals(
         AstDSL.distinctAggregate("count", qualifiedName("name")),
-        buildExprAst("count(distinct name)")
-    );
+        buildExprAst("count(distinct name)"));
   }
 
   @Test
   public void filteredDistinctCount() {
     assertEquals(
-        AstDSL.filteredDistinctCount("count", qualifiedName("name"), function(
-            ">", qualifiedName("age"), intLiteral(30))),
-        buildExprAst("count(distinct name) filter(where age > 30)")
-    );
+        AstDSL.filteredDistinctCount(
+            "count", qualifiedName("name"), function(">", qualifiedName("age"), intLiteral(30))),
+        buildExprAst("count(distinct name) filter(where age > 30)"));
   }
 
   @Test
   public void matchPhraseQueryAllParameters() {
     assertEquals(
-        AstDSL.function("matchphrasequery",
+        AstDSL.function(
+            "matchphrasequery",
             unresolvedArg("field", qualifiedName("test")),
             unresolvedArg("query", stringLiteral("search query")),
             unresolvedArg("slop", stringLiteral("3")),
             unresolvedArg("analyzer", stringLiteral("standard")),
-            unresolvedArg("zero_terms_query", stringLiteral("NONE"))
-        ),
-        buildExprAst("matchphrasequery(test, 'search query', slop = 3"
-            + ", analyzer = 'standard', zero_terms_query='NONE'"
-            + ")")
-    );
+            unresolvedArg("zero_terms_query", stringLiteral("NONE"))),
+        buildExprAst(
+            "matchphrasequery(test, 'search query', slop = 3"
+                + ", analyzer = 'standard', zero_terms_query='NONE'"
+                + ")"));
   }
 
   @Test
   public void matchPhrasePrefixAllParameters() {
     assertEquals(
-        AstDSL.function("match_phrase_prefix",
-          unresolvedArg("field", qualifiedName("test")),
-          unresolvedArg("query", stringLiteral("search query")),
-          unresolvedArg("slop", stringLiteral("3")),
-          unresolvedArg("boost", stringLiteral("1.5")),
-          unresolvedArg("analyzer", stringLiteral("standard")),
-          unresolvedArg("max_expansions", stringLiteral("4")),
-          unresolvedArg("zero_terms_query", stringLiteral("NONE"))
-          ),
-        buildExprAst("match_phrase_prefix(test, 'search query', slop = 3, boost = 1.5"
-            + ", analyzer = 'standard', max_expansions = 4, zero_terms_query='NONE'"
-            + ")")
-    );
+        AstDSL.function(
+            "match_phrase_prefix",
+            unresolvedArg("field", qualifiedName("test")),
+            unresolvedArg("query", stringLiteral("search query")),
+            unresolvedArg("slop", stringLiteral("3")),
+            unresolvedArg("boost", stringLiteral("1.5")),
+            unresolvedArg("analyzer", stringLiteral("standard")),
+            unresolvedArg("max_expansions", stringLiteral("4")),
+            unresolvedArg("zero_terms_query", stringLiteral("NONE"))),
+        buildExprAst(
+            "match_phrase_prefix(test, 'search query', slop = 3, boost = 1.5"
+                + ", analyzer = 'standard', max_expansions = 4, zero_terms_query='NONE'"
+                + ")"));
   }
 
   @Test
   public void relevanceMatch() {
-    assertEquals(AstDSL.function("match",
-        unresolvedArg("field", qualifiedName("message")),
-        unresolvedArg("query", stringLiteral("search query"))),
-        buildExprAst("match('message', 'search query')")
-    );
-
-    assertEquals(AstDSL.function("match",
-        unresolvedArg("field", qualifiedName("message")),
-        unresolvedArg("query", stringLiteral("search query")),
-        unresolvedArg("analyzer", stringLiteral("keyword")),
-        unresolvedArg("operator", stringLiteral("AND"))),
+    assertEquals(
+        AstDSL.function(
+            "match",
+            unresolvedArg("field", qualifiedName("message")),
+            unresolvedArg("query", stringLiteral("search query"))),
+        buildExprAst("match('message', 'search query')"));
+
+    assertEquals(
+        AstDSL.function(
+            "match",
+            unresolvedArg("field", qualifiedName("message")),
+            unresolvedArg("query", stringLiteral("search query")),
+            unresolvedArg("analyzer", stringLiteral("keyword")),
+            unresolvedArg("operator", stringLiteral("AND"))),
         buildExprAst("match('message', 'search query', analyzer='keyword', operator='AND')"));
   }
 
   @Test
   public void relevanceMatchQuery() {
-    assertEquals(AstDSL.function("matchquery",
+    assertEquals(
+        AstDSL.function(
+            "matchquery",
             unresolvedArg("field", qualifiedName("message")),
             unresolvedArg("query", stringLiteral("search query"))),
-        buildExprAst("matchquery('message', 'search query')")
-    );
+        buildExprAst("matchquery('message', 'search query')"));
 
-    assertEquals(AstDSL.function("matchquery",
+    assertEquals(
+        AstDSL.function(
+            "matchquery",
             unresolvedArg("field", qualifiedName("message")),
             unresolvedArg("query", stringLiteral("search query")),
             unresolvedArg("analyzer", stringLiteral("keyword")),
@@ -615,13 +482,16 @@ public void relevanceMatchQuery() {
 
   @Test
   public void relevanceMatch_Query() {
-    assertEquals(AstDSL.function("match_query",
+    assertEquals(
+        AstDSL.function(
+            "match_query",
             unresolvedArg("field", qualifiedName("message")),
             unresolvedArg("query", stringLiteral("search query"))),
-        buildExprAst("match_query('message', 'search query')")
-    );
+        buildExprAst("match_query('message', 'search query')"));
 
-    assertEquals(AstDSL.function("match_query",
+    assertEquals(
+        AstDSL.function(
+            "match_query",
             unresolvedArg("field", qualifiedName("message")),
             unresolvedArg("query", stringLiteral("search query")),
             unresolvedArg("analyzer", stringLiteral("keyword")),
@@ -631,238 +501,279 @@ public void relevanceMatch_Query() {
 
   @Test
   public void relevanceMatchQueryAltSyntax() {
-    assertEquals(AstDSL.function("match_query",
-            unresolvedArg("field", stringLiteral("message")),
-            unresolvedArg("query", stringLiteral("search query"))).toString(),
-        buildExprAst("message = match_query('search query')").toString()
-    );
+    assertEquals(
+        AstDSL.function(
+                "match_query",
+                unresolvedArg("field", stringLiteral("message")),
+                unresolvedArg("query", stringLiteral("search query")))
+            .toString(),
+        buildExprAst("message = match_query('search query')").toString());
 
-    assertEquals(AstDSL.function("match_query",
-            unresolvedArg("field", stringLiteral("message")),
-            unresolvedArg("query", stringLiteral("search query"))).toString(),
-        buildExprAst("message = match_query(\"search query\")").toString()
-    );
+    assertEquals(
+        AstDSL.function(
+                "match_query",
+                unresolvedArg("field", stringLiteral("message")),
+                unresolvedArg("query", stringLiteral("search query")))
+            .toString(),
+        buildExprAst("message = match_query(\"search query\")").toString());
 
-    assertEquals(AstDSL.function("matchquery",
-            unresolvedArg("field", stringLiteral("message")),
-            unresolvedArg("query", stringLiteral("search query"))).toString(),
-        buildExprAst("message = matchquery('search query')").toString()
-    );
+    assertEquals(
+        AstDSL.function(
+                "matchquery",
+                unresolvedArg("field", stringLiteral("message")),
+                unresolvedArg("query", stringLiteral("search query")))
+            .toString(),
+        buildExprAst("message = matchquery('search query')").toString());
 
-    assertEquals(AstDSL.function("matchquery",
-            unresolvedArg("field", stringLiteral("message")),
-            unresolvedArg("query", stringLiteral("search query"))).toString(),
-        buildExprAst("message = matchquery(\"search query\")").toString()
-    );
+    assertEquals(
+        AstDSL.function(
+                "matchquery",
+                unresolvedArg("field", stringLiteral("message")),
+                unresolvedArg("query", stringLiteral("search query")))
+            .toString(),
+        buildExprAst("message = matchquery(\"search query\")").toString());
   }
 
   @Test
   public void relevanceMatchPhraseAltSyntax() {
-    assertEquals(AstDSL.function("match_phrase",
-            unresolvedArg("field", stringLiteral("message")),
-            unresolvedArg("query", stringLiteral("search query"))).toString(),
-        buildExprAst("message = match_phrase('search query')").toString()
-    );
+    assertEquals(
+        AstDSL.function(
+                "match_phrase",
+                unresolvedArg("field", stringLiteral("message")),
+                unresolvedArg("query", stringLiteral("search query")))
+            .toString(),
+        buildExprAst("message = match_phrase('search query')").toString());
 
-    assertEquals(AstDSL.function("match_phrase",
-            unresolvedArg("field", stringLiteral("message")),
-            unresolvedArg("query", stringLiteral("search query"))).toString(),
-        buildExprAst("message = match_phrase(\"search query\")").toString()
-    );
+    assertEquals(
+        AstDSL.function(
+                "match_phrase",
+                unresolvedArg("field", stringLiteral("message")),
+                unresolvedArg("query", stringLiteral("search query")))
+            .toString(),
+        buildExprAst("message = match_phrase(\"search query\")").toString());
 
-    assertEquals(AstDSL.function("matchphrase",
-            unresolvedArg("field", stringLiteral("message")),
-            unresolvedArg("query", stringLiteral("search query"))).toString(),
-        buildExprAst("message = matchphrase('search query')").toString()
-    );
+    assertEquals(
+        AstDSL.function(
+                "matchphrase",
+                unresolvedArg("field", stringLiteral("message")),
+                unresolvedArg("query", stringLiteral("search query")))
+            .toString(),
+        buildExprAst("message = matchphrase('search query')").toString());
 
-    assertEquals(AstDSL.function("matchphrase",
-            unresolvedArg("field", stringLiteral("message")),
-            unresolvedArg("query", stringLiteral("search query"))).toString(),
-        buildExprAst("message = matchphrase(\"search query\")").toString()
-    );
+    assertEquals(
+        AstDSL.function(
+                "matchphrase",
+                unresolvedArg("field", stringLiteral("message")),
+                unresolvedArg("query", stringLiteral("search query")))
+            .toString(),
+        buildExprAst("message = matchphrase(\"search query\")").toString());
   }
 
   @Test
   public void relevanceMultiMatchAltSyntax() {
-    assertEquals(AstDSL.function("multi_match",
+    assertEquals(
+        AstDSL.function(
+            "multi_match",
             unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of("field1", 1.F))),
             unresolvedArg("query", stringLiteral("search query"))),
-        buildExprAst("field1 = multi_match('search query')")
-    );
+        buildExprAst("field1 = multi_match('search query')"));
 
-    assertEquals(AstDSL.function("multi_match",
+    assertEquals(
+        AstDSL.function(
+            "multi_match",
             unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of("field1", 1.F))),
             unresolvedArg("query", stringLiteral("search query"))),
-        buildExprAst("field1 = multi_match(\"search query\")")
-    );
+        buildExprAst("field1 = multi_match(\"search query\")"));
 
-    assertEquals(AstDSL.function("multimatch",
+    assertEquals(
+        AstDSL.function(
+            "multimatch",
             unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of("field1", 1.F))),
             unresolvedArg("query", stringLiteral("search query"))),
-        buildExprAst("field1 = multimatch('search query')")
-    );
+        buildExprAst("field1 = multimatch('search query')"));
 
-    assertEquals(AstDSL.function("multimatch",
+    assertEquals(
+        AstDSL.function(
+            "multimatch",
             unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of("field1", 1.F))),
             unresolvedArg("query", stringLiteral("search query"))),
-        buildExprAst("field1 = multimatch(\"search query\")")
-    );
+        buildExprAst("field1 = multimatch(\"search query\")"));
   }
 
   @Test
   public void relevanceMulti_match() {
-    assertEquals(AstDSL.function("multi_match",
-            unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of(
-                "field2", 3.2F, "field1", 1.F))),
+    assertEquals(
+        AstDSL.function(
+            "multi_match",
+            unresolvedArg(
+                "fields", new RelevanceFieldList(ImmutableMap.of("field2", 3.2F, "field1", 1.F))),
             unresolvedArg("query", stringLiteral("search query"))),
-        buildExprAst("multi_match(['field1', 'field2' ^ 3.2], 'search query')")
-    );
+        buildExprAst("multi_match(['field1', 'field2' ^ 3.2], 'search query')"));
 
-    assertEquals(AstDSL.function("multi_match",
-            unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of(
-                "field2", 3.2F, "field1", 1.F))),
+    assertEquals(
+        AstDSL.function(
+            "multi_match",
+            unresolvedArg(
+                "fields", new RelevanceFieldList(ImmutableMap.of("field2", 3.2F, "field1", 1.F))),
             unresolvedArg("query", stringLiteral("search query")),
             unresolvedArg("analyzer", stringLiteral("keyword")),
             unresolvedArg("operator", stringLiteral("AND"))),
-        buildExprAst("multi_match(['field1', 'field2' ^ 3.2], 'search query',"
-            + "analyzer='keyword', 'operator'='AND')"));
+        buildExprAst(
+            "multi_match(['field1', 'field2' ^ 3.2], 'search query',"
+                + "analyzer='keyword', 'operator'='AND')"));
   }
 
   @Test
   public void relevanceMultimatch_alternate_parameter_syntax() {
-    assertEquals(AstDSL.function("multimatch",
-            unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of(
-                "field1", 1F, "field2", 2F))),
+    assertEquals(
+        AstDSL.function(
+            "multimatch",
+            unresolvedArg(
+                "fields", new RelevanceFieldList(ImmutableMap.of("field1", 1F, "field2", 2F))),
             unresolvedArg("query", stringLiteral("search query"))),
-        buildExprAst("multimatch(query='search query', fields=['field1^1.0,field2^2.0'])")
-    );
+        buildExprAst("multimatch(query='search query', fields=['field1^1.0,field2^2.0'])"));
 
-    assertEquals(AstDSL.function("multimatch",
-            unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of(
-                "field1", 1F, "field2", 2F))),
+    assertEquals(
+        AstDSL.function(
+            "multimatch",
+            unresolvedArg(
+                "fields", new RelevanceFieldList(ImmutableMap.of("field1", 1F, "field2", 2F))),
             unresolvedArg("query", stringLiteral("search query")),
             unresolvedArg("analyzer", stringLiteral("keyword")),
             unresolvedArg("operator", stringLiteral("AND"))),
-        buildExprAst("multimatch(query='search query', fields=['field1^1.0,field2^2.0'],"
-            + "analyzer='keyword', operator='AND')"));
+        buildExprAst(
+            "multimatch(query='search query', fields=['field1^1.0,field2^2.0'],"
+                + "analyzer='keyword', operator='AND')"));
   }
 
   @Test
   public void relevanceMultimatchquery_alternate_parameter_syntax() {
-    assertEquals(AstDSL.function("multimatchquery",
-            unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of(
-                "field", 1F))),
+    assertEquals(
+        AstDSL.function(
+            "multimatchquery",
+            unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of("field", 1F))),
             unresolvedArg("query", stringLiteral("search query"))),
-        buildExprAst("multimatchquery(query='search query', fields='field')")
-    );
+        buildExprAst("multimatchquery(query='search query', fields='field')"));
 
-    assertEquals(AstDSL.function("multimatchquery",
-            unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of(
-                "field", 1F))),
+    assertEquals(
+        AstDSL.function(
+            "multimatchquery",
+            unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of("field", 1F))),
             unresolvedArg("query", stringLiteral("search query")),
             unresolvedArg("analyzer", stringLiteral("keyword")),
             unresolvedArg("operator", stringLiteral("AND"))),
-        buildExprAst("multimatchquery(query='search query', fields='field',"
-            + "analyzer='keyword', 'operator'='AND')"));
+        buildExprAst(
+            "multimatchquery(query='search query', fields='field',"
+                + "analyzer='keyword', 'operator'='AND')"));
   }
 
   @Test
   public void relevanceSimple_query_string() {
-    assertEquals(AstDSL.function("simple_query_string",
-            unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of(
-                "field2", 3.2F, "field1", 1.F))),
+    assertEquals(
+        AstDSL.function(
+            "simple_query_string",
+            unresolvedArg(
+                "fields", new RelevanceFieldList(ImmutableMap.of("field2", 3.2F, "field1", 1.F))),
             unresolvedArg("query", stringLiteral("search query"))),
-        buildExprAst("simple_query_string(['field1', 'field2' ^ 3.2], 'search query')")
-    );
+        buildExprAst("simple_query_string(['field1', 'field2' ^ 3.2], 'search query')"));
 
-    assertEquals(AstDSL.function("simple_query_string",
-            unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of(
-                "field2", 3.2F, "field1", 1.F))),
+    assertEquals(
+        AstDSL.function(
+            "simple_query_string",
+            unresolvedArg(
+                "fields", new RelevanceFieldList(ImmutableMap.of("field2", 3.2F, "field1", 1.F))),
             unresolvedArg("query", stringLiteral("search query")),
             unresolvedArg("analyzer", stringLiteral("keyword")),
             unresolvedArg("operator", stringLiteral("AND"))),
-        buildExprAst("simple_query_string(['field1', 'field2' ^ 3.2], 'search query',"
-            + "analyzer='keyword', operator='AND')"));
+        buildExprAst(
+            "simple_query_string(['field1', 'field2' ^ 3.2], 'search query',"
+                + "analyzer='keyword', operator='AND')"));
   }
 
   @Test
   public void relevanceQuery_string() {
-    assertEquals(AstDSL.function("query_string",
-            unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of(
-                "field2", 3.2F, "field1", 1.F))),
+    assertEquals(
+        AstDSL.function(
+            "query_string",
+            unresolvedArg(
+                "fields", new RelevanceFieldList(ImmutableMap.of("field2", 3.2F, "field1", 1.F))),
             unresolvedArg("query", stringLiteral("search query"))),
-        buildExprAst("query_string(['field1', 'field2' ^ 3.2], 'search query')")
-    );
+        buildExprAst("query_string(['field1', 'field2' ^ 3.2], 'search query')"));
 
-    assertEquals(AstDSL.function("query_string",
-            unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of(
-                "field2", 3.2F, "field1", 1.F))),
+    assertEquals(
+        AstDSL.function(
+            "query_string",
+            unresolvedArg(
+                "fields", new RelevanceFieldList(ImmutableMap.of("field2", 3.2F, "field1", 1.F))),
             unresolvedArg("query", stringLiteral("search query")),
             unresolvedArg("analyzer", stringLiteral("keyword")),
             unresolvedArg("time_zone", stringLiteral("Canada/Pacific")),
             unresolvedArg("tie_breaker", stringLiteral("1.3"))),
-        buildExprAst("query_string(['field1', 'field2' ^ 3.2], 'search query',"
-            + "analyzer='keyword', time_zone='Canada/Pacific', tie_breaker='1.3')"));
+        buildExprAst(
+            "query_string(['field1', 'field2' ^ 3.2], 'search query',"
+                + "analyzer='keyword', time_zone='Canada/Pacific', tie_breaker='1.3')"));
   }
 
   @Test
   public void relevanceWildcard_query() {
-    assertEquals(AstDSL.function("wildcard_query",
+    assertEquals(
+        AstDSL.function(
+            "wildcard_query",
             unresolvedArg("field", qualifiedName("field")),
             unresolvedArg("query", stringLiteral("search query*")),
             unresolvedArg("boost", stringLiteral("1.5")),
             unresolvedArg("case_insensitive", stringLiteral("true")),
             unresolvedArg("rewrite", stringLiteral("scoring_boolean"))),
-        buildExprAst("wildcard_query(field, 'search query*', boost=1.5,"
-            + "case_insensitive=true, rewrite='scoring_boolean'))")
-    );
+        buildExprAst(
+            "wildcard_query(field, 'search query*', boost=1.5,"
+                + "case_insensitive=true, rewrite='scoring_boolean'))"));
   }
 
   @Test
   public void relevanceScore_query() {
     assertEquals(
         AstDSL.score(
-            AstDSL.function("query_string",
-              unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of(
-                  "field1", 1.F, "field2", 3.2F))),
-              unresolvedArg("query", stringLiteral("search query"))
-            ),
-            AstDSL.doubleLiteral(1.0)
-        ),
-        buildExprAst("score(query_string(['field1', 'field2' ^ 3.2], 'search query'))")
-    );
+            AstDSL.function(
+                "query_string",
+                unresolvedArg(
+                    "fields",
+                    new RelevanceFieldList(ImmutableMap.of("field1", 1.F, "field2", 3.2F))),
+                unresolvedArg("query", stringLiteral("search query"))),
+            AstDSL.doubleLiteral(1.0)),
+        buildExprAst("score(query_string(['field1', 'field2' ^ 3.2], 'search query'))"));
   }
 
   @Test
   public void relevanceScore_withBoost_query() {
     assertEquals(
         AstDSL.score(
-            AstDSL.function("query_string",
-                unresolvedArg("fields", new RelevanceFieldList(ImmutableMap.of(
-                    "field1", 1.F, "field2", 3.2F))),
-                unresolvedArg("query", stringLiteral("search query"))
-            ),
-            doubleLiteral(1.0)
-        ),
-        buildExprAst("score(query_string(['field1', 'field2' ^ 3.2], 'search query'), 1.0)")
-    );
+            AstDSL.function(
+                "query_string",
+                unresolvedArg(
+                    "fields",
+                    new RelevanceFieldList(ImmutableMap.of("field1", 1.F, "field2", 3.2F))),
+                unresolvedArg("query", stringLiteral("search query"))),
+            doubleLiteral(1.0)),
+        buildExprAst("score(query_string(['field1', 'field2' ^ 3.2], 'search query'), 1.0)"));
   }
 
   @Test
   public void relevanceQuery() {
-    assertEquals(AstDSL.function("query",
-                    unresolvedArg("query", stringLiteral("field1:query OR field2:query"))),
-            buildExprAst("query('field1:query OR field2:query')")
-    );
+    assertEquals(
+        AstDSL.function(
+            "query", unresolvedArg("query", stringLiteral("field1:query OR field2:query"))),
+        buildExprAst("query('field1:query OR field2:query')"));
 
-    assertEquals(AstDSL.function("query",
-                    unresolvedArg("query", stringLiteral("search query")),
-                    unresolvedArg("analyzer", stringLiteral("keyword")),
-                    unresolvedArg("time_zone", stringLiteral("Canada/Pacific")),
-                    unresolvedArg("tie_breaker", stringLiteral("1.3"))),
-            buildExprAst("query('search query',"
-                    + "analyzer='keyword', time_zone='Canada/Pacific', tie_breaker='1.3')"));
+    assertEquals(
+        AstDSL.function(
+            "query",
+            unresolvedArg("query", stringLiteral("search query")),
+            unresolvedArg("analyzer", stringLiteral("keyword")),
+            unresolvedArg("time_zone", stringLiteral("Canada/Pacific")),
+            unresolvedArg("tie_breaker", stringLiteral("1.3"))),
+        buildExprAst(
+            "query('search query',"
+                + "analyzer='keyword', time_zone='Canada/Pacific', tie_breaker='1.3')"));
   }
 
   @Test
@@ -876,7 +787,8 @@ public void canBuildInClause() {
         buildExprAst("age not in (20, 30)"));
 
     assertEquals(
-        AstDSL.in(qualifiedName("age"),
+        AstDSL.in(
+            qualifiedName("age"),
             AstDSL.function("abs", AstDSL.intLiteral(20)),
             AstDSL.function("abs", AstDSL.intLiteral(30))),
         buildExprAst("age in (abs(20), abs(30))"));
diff --git a/sql/src/test/java/org/opensearch/sql/sql/parser/AstHavingFilterBuilderTest.java b/sql/src/test/java/org/opensearch/sql/sql/parser/AstHavingFilterBuilderTest.java
index 1cb1ab5f8b..b2e4c54160 100644
--- a/sql/src/test/java/org/opensearch/sql/sql/parser/AstHavingFilterBuilderTest.java
+++ b/sql/src/test/java/org/opensearch/sql/sql/parser/AstHavingFilterBuilderTest.java
@@ -3,7 +3,6 @@
  * SPDX-License-Identifier: Apache-2.0
  */
 
-
 package org.opensearch.sql.sql.parser;
 
 import static org.junit.jupiter.api.Assertions.assertEquals;
@@ -30,8 +29,7 @@
 @ExtendWith(MockitoExtension.class)
 class AstHavingFilterBuilderTest {
 
-  @Mock
-  private QuerySpecification querySpec;
+  @Mock private QuerySpecification querySpec;
 
   private AstHavingFilterBuilder builder;
 
diff --git a/sql/src/test/java/org/opensearch/sql/sql/parser/AstNowLikeFunctionTest.java b/sql/src/test/java/org/opensearch/sql/sql/parser/AstNowLikeFunctionTest.java
index 4ce2a2d3f7..639d73e419 100644
--- a/sql/src/test/java/org/opensearch/sql/sql/parser/AstNowLikeFunctionTest.java
+++ b/sql/src/test/java/org/opensearch/sql/sql/parser/AstNowLikeFunctionTest.java
@@ -25,32 +25,29 @@
 class AstNowLikeFunctionTest extends AstBuilderTestBase {
 
   private static Stream allFunctions() {
-    return Stream.of("curdate",
-        "current_date",
-        "current_time",
-        "current_timestamp",
-        "curtime",
-        "localtimestamp",
-        "localtime",
-        "now",
-        "sysdate",
-        "utc_date",
-        "utc_time",
-        "utc_timestamp")
+    return Stream.of(
+            "curdate",
+            "current_date",
+            "current_time",
+            "current_timestamp",
+            "curtime",
+            "localtimestamp",
+            "localtime",
+            "now",
+            "sysdate",
+            "utc_date",
+            "utc_time",
+            "utc_timestamp")
         .map(Arguments::of);
   }
 
   private static Stream supportFsp() {
-    return Stream.of("sysdate")
-        .map(Arguments::of);
+    return Stream.of("sysdate").map(Arguments::of);
   }
 
   private static Stream supportShortcut() {
-    return Stream.of("current_date",
-            "current_time",
-            "current_timestamp",
-            "localtimestamp",
-            "localtime")
+    return Stream.of(
+            "current_date", "current_time", "current_timestamp", "localtimestamp", "localtime")
         .map(Arguments::of);
   }
 
@@ -59,12 +56,7 @@ private static Stream supportShortcut() {
   void project_call(String name) {
     String call = name + "()";
     assertEquals(
-        project(
-            values(emptyList()),
-            alias(call, function(name))
-        ),
-        buildAST("SELECT " + call)
-    );
+        project(values(emptyList()), alias(call, function(name))), buildAST("SELECT " + call));
   }
 
   @ParameterizedTest
@@ -73,29 +65,16 @@ void filter_call(String name) {
     String call = name + "()";
     assertEquals(
         project(
-            filter(
-                relation("test"),
-                function(
-                    "=",
-                    qualifiedName("data"),
-                    function(name))
-            ),
-            AllFields.of()
-        ),
-        buildAST("SELECT * FROM test WHERE data = " + call)
-    );
+            filter(relation("test"), function("=", qualifiedName("data"), function(name))),
+            AllFields.of()),
+        buildAST("SELECT * FROM test WHERE data = " + call));
   }
 
-
   @ParameterizedTest
   @MethodSource("supportFsp")
   void fsp(String name) {
     assertEquals(
-        project(
-            values(emptyList()),
-            alias(name + "(0)", function(name, intLiteral(0)))
-        ),
-        buildAST("SELECT " + name + "(0)")
-    );
+        project(values(emptyList()), alias(name + "(0)", function(name, intLiteral(0)))),
+        buildAST("SELECT " + name + "(0)"));
   }
 }
diff --git a/sql/src/test/java/org/opensearch/sql/sql/parser/AstQualifiedNameBuilderTest.java b/sql/src/test/java/org/opensearch/sql/sql/parser/AstQualifiedNameBuilderTest.java
index 28665dd7ef..b0a7592990 100644
--- a/sql/src/test/java/org/opensearch/sql/sql/parser/AstQualifiedNameBuilderTest.java
+++ b/sql/src/test/java/org/opensearch/sql/sql/parser/AstQualifiedNameBuilderTest.java
@@ -3,7 +3,6 @@
  * SPDX-License-Identifier: Apache-2.0
  */
 
-
 package org.opensearch.sql.sql.parser;
 
 import static org.junit.jupiter.api.Assertions.assertEquals;
@@ -66,9 +65,10 @@ public void functionNameCanBeUsedAsIdentifier() {
 
   void assertFunctionNameCouldBeId(String antlrFunctionName) {
     List functionList =
-        Arrays.stream(antlrFunctionName.split("\\|")).map(String::stripLeading)
-            .map(String::stripTrailing).collect(
-            Collectors.toList());
+        Arrays.stream(antlrFunctionName.split("\\|"))
+            .map(String::stripLeading)
+            .map(String::stripTrailing)
+            .collect(Collectors.toList());
 
     assertFalse(functionList.isEmpty());
     for (String functionName : functionList) {
@@ -109,5 +109,4 @@ private OpenSearchSQLParser createParser(String expr) {
       return parser;
     }
   }
-
 }
diff --git a/sql/src/test/java/org/opensearch/sql/sql/parser/AstSortBuilderTest.java b/sql/src/test/java/org/opensearch/sql/sql/parser/AstSortBuilderTest.java
index 3c8d155e65..f72f1ba0ff 100644
--- a/sql/src/test/java/org/opensearch/sql/sql/parser/AstSortBuilderTest.java
+++ b/sql/src/test/java/org/opensearch/sql/sql/parser/AstSortBuilderTest.java
@@ -3,7 +3,6 @@
  * SPDX-License-Identifier: Apache-2.0
  */
 
-
 package org.opensearch.sql.sql.parser;
 
 import static org.junit.jupiter.api.Assertions.assertEquals;
@@ -40,14 +39,11 @@
 @ExtendWith(MockitoExtension.class)
 class AstSortBuilderTest {
 
-  @Mock
-  private QuerySpecification querySpec;
+  @Mock private QuerySpecification querySpec;
 
-  @Mock
-  private OrderByClauseContext orderByClause;
+  @Mock private OrderByClauseContext orderByClause;
 
-  @Mock
-  private UnresolvedPlan child;
+  @Mock private UnresolvedPlan child;
 
   @Test
   void can_build_sort_node() {
@@ -56,32 +52,35 @@ void can_build_sort_node() {
 
     ImmutableMap> expects =
         ImmutableMap.>builder()
-            .put(new SortOption(null, null),
-                ImmutableList.of(argument("asc", booleanLiteral(true))))
-            .put(new SortOption(ASC, null),
-                ImmutableList.of(argument("asc", booleanLiteral(true))))
-            .put(new SortOption(DESC, null),
+            .put(
+                new SortOption(null, null), ImmutableList.of(argument("asc", booleanLiteral(true))))
+            .put(new SortOption(ASC, null), ImmutableList.of(argument("asc", booleanLiteral(true))))
+            .put(
+                new SortOption(DESC, null),
                 ImmutableList.of(argument("asc", booleanLiteral(false))))
-            .put(new SortOption(null, NULL_LAST),
+            .put(
+                new SortOption(null, NULL_LAST),
                 ImmutableList.of(
                     argument("asc", booleanLiteral(true)),
                     argument("nullFirst", booleanLiteral(false))))
-            .put(new SortOption(DESC, NULL_FIRST),
+            .put(
+                new SortOption(DESC, NULL_FIRST),
                 ImmutableList.of(
                     argument("asc", booleanLiteral(false)),
                     argument("nullFirst", booleanLiteral(true))))
             .build();
 
-    expects.forEach((option, expect) -> {
-      when(querySpec.getOrderByOptions()).thenReturn(ImmutableList.of(option));
+    expects.forEach(
+        (option, expect) -> {
+          when(querySpec.getOrderByOptions()).thenReturn(ImmutableList.of(option));
 
-      AstSortBuilder sortBuilder = new AstSortBuilder(querySpec);
-      assertEquals(
-          new Sort(
-              child, // has to mock and attach child otherwise Guava ImmutableList NPE in getChild()
-              ImmutableList.of(field("name", expect))),
-          sortBuilder.visitOrderByClause(orderByClause).attach(child));
-    });
+          AstSortBuilder sortBuilder = new AstSortBuilder(querySpec);
+          assertEquals(
+              new Sort(
+                  child, // has to mock and attach child otherwise Guava ImmutableList NPE in
+                  // getChild()
+                  ImmutableList.of(field("name", expect))),
+              sortBuilder.visitOrderByClause(orderByClause).attach(child));
+        });
   }
-
 }
diff --git a/sql/src/test/java/org/opensearch/sql/sql/parser/context/QuerySpecificationTest.java b/sql/src/test/java/org/opensearch/sql/sql/parser/context/QuerySpecificationTest.java
index 2f75e89002..ed18b3e692 100644
--- a/sql/src/test/java/org/opensearch/sql/sql/parser/context/QuerySpecificationTest.java
+++ b/sql/src/test/java/org/opensearch/sql/sql/parser/context/QuerySpecificationTest.java
@@ -3,7 +3,6 @@
  * SPDX-License-Identifier: Apache-2.0
  */
 
-
 package org.opensearch.sql.sql.parser.context;
 
 import static org.junit.jupiter.api.Assertions.assertEquals;
@@ -35,32 +34,27 @@ class QuerySpecificationTest {
 
   @Test
   void can_collect_group_by_items_in_group_by_clause() {
-    QuerySpecification querySpec = collect(
-        "SELECT name, ABS(age) FROM test GROUP BY name, ABS(age)");
+    QuerySpecification querySpec =
+        collect("SELECT name, ABS(age) FROM test GROUP BY name, ABS(age)");
 
     assertEquals(
-        ImmutableList.of(
-            qualifiedName("name"),
-            function("ABS", qualifiedName("age"))),
+        ImmutableList.of(qualifiedName("name"), function("ABS", qualifiedName("age"))),
         querySpec.getGroupByItems());
   }
 
   @Test
   void can_collect_select_items_in_select_clause() {
-    QuerySpecification querySpec = collect(
-        "SELECT name, ABS(age) FROM test");
+    QuerySpecification querySpec = collect("SELECT name, ABS(age) FROM test");
 
     assertEquals(
-        ImmutableList.of(
-            qualifiedName("name"),
-            function("ABS", qualifiedName("age"))),
+        ImmutableList.of(qualifiedName("name"), function("ABS", qualifiedName("age"))),
         querySpec.getSelectItems());
   }
 
   @Test
   void can_collect_aggregators_in_select_clause() {
-    QuerySpecification querySpec = collect(
-        "SELECT name, AVG(age), SUM(balance) FROM test GROUP BY name");
+    QuerySpecification querySpec =
+        collect("SELECT name, AVG(age), SUM(balance) FROM test GROUP BY name");
 
     assertEquals(
         ImmutableSet.of(
@@ -71,29 +65,25 @@ void can_collect_aggregators_in_select_clause() {
 
   @Test
   void can_collect_nested_aggregators_in_select_clause() {
-    QuerySpecification querySpec = collect(
-        "SELECT name, ABS(1 + AVG(age)) FROM test GROUP BY name");
+    QuerySpecification querySpec =
+        collect("SELECT name, ABS(1 + AVG(age)) FROM test GROUP BY name");
 
     assertEquals(
-        ImmutableSet.of(
-            alias("AVG(age)", aggregate("AVG", qualifiedName("age")))),
+        ImmutableSet.of(alias("AVG(age)", aggregate("AVG", qualifiedName("age")))),
         querySpec.getAggregators());
   }
 
   @Test
   void can_collect_alias_in_select_clause() {
-    QuerySpecification querySpec = collect(
-        "SELECT name AS n FROM test GROUP BY n");
+    QuerySpecification querySpec = collect("SELECT name AS n FROM test GROUP BY n");
 
-    assertEquals(
-        ImmutableMap.of("n", qualifiedName("name")),
-        querySpec.getSelectItemsByAlias());
+    assertEquals(ImmutableMap.of("n", qualifiedName("name")), querySpec.getSelectItemsByAlias());
   }
 
   @Test
   void should_deduplicate_same_aggregators() {
-    QuerySpecification querySpec = collect(
-        "SELECT AVG(age), AVG(balance), AVG(age) FROM test GROUP BY name");
+    QuerySpecification querySpec =
+        collect("SELECT AVG(age), AVG(balance), AVG(age) FROM test GROUP BY name");
 
     assertEquals(
         ImmutableSet.of(
@@ -119,20 +109,24 @@ void can_collect_sort_options_in_order_by_clause() {
 
   @Test
   void should_skip_sort_items_in_window_function() {
-    assertEquals(1,
-        collect("SELECT name, RANK() OVER(ORDER BY age) "
-            + "FROM test ORDER BY name"
-        ).getOrderByOptions().size());
+    assertEquals(
+        1,
+        collect("SELECT name, RANK() OVER(ORDER BY age) " + "FROM test ORDER BY name")
+            .getOrderByOptions()
+            .size());
   }
 
   @Test
   void can_collect_filtered_aggregation() {
     assertEquals(
-        ImmutableSet.of(alias("AVG(age) FILTER(WHERE age > 20)",
-            filteredAggregate("AVG", qualifiedName("age"),
-                function(">", qualifiedName("age"), intLiteral(20))))),
-        collect("SELECT AVG(age) FILTER(WHERE age > 20) FROM test").getAggregators()
-    );
+        ImmutableSet.of(
+            alias(
+                "AVG(age) FILTER(WHERE age > 20)",
+                filteredAggregate(
+                    "AVG",
+                    qualifiedName("age"),
+                    function(">", qualifiedName("age"), intLiteral(20))))),
+        collect("SELECT AVG(age) FILTER(WHERE age > 20) FROM test").getAggregators());
   }
 
   private QuerySpecification collect(String query) {
@@ -147,5 +141,4 @@ private QuerySpecificationContext parse(String query) {
     parser.addErrorListener(new SyntaxAnalysisErrorListener());
     return parser.querySpecification();
   }
-
 }