From cd1470b9f384cbf7e6159ee8f9b5bd94fd662705 Mon Sep 17 00:00:00 2001 From: Binglin Chang Date: Tue, 24 Sep 2024 20:42:07 +0800 Subject: [PATCH] [BugFix] Fix duplicate entries in be_logs; Add reset_delvec in script; Add BE id in error message when query failed (#51204) Signed-off-by: Binglin Chang (cherry picked from commit 52c55f9c2b26bf1a2bfb570514a0eb695ea906f5) # Conflicts: # be/src/common/greplog.cpp # be/src/service/backend_options.h # be/test/storage/tablet_updates_test.cpp # test/sql/test_array_fn/R/test_array_sortby # test/sql/test_dict_mapping_function/R/test_dict_mapping_function # test/sql/test_dictionary/R/test_dictionary # test/sql/test_external_file/R/test_orc_predicates # test/sql/test_group_execution/R/test_group_execution_join # test/sql/test_inverted_index/R/test_inverted_index # test/sql/test_json/R/to_json # test/sql/test_string_functions/R/test_string_functions --- be/src/common/greplog.cpp | 20 +- be/src/common/greplog.h | 3 +- .../pipeline/pipeline_driver_executor.cpp | 4 + .../schema_scanner/schema_be_logs_scanner.cpp | 2 +- be/src/http/action/greplog_action.cpp | 2 +- be/src/script/script.cpp | 12 + be/src/service/backend_options.cpp | 9 +- be/src/service/backend_options.h | 12 +- be/src/service/service_be/starrocks_be.cpp | 2 +- be/test/storage/tablet_updates_test.cpp | 10 + test/sql/test_array_fn/R/test_array_fn | 4 +- test/sql/test_array_fn/R/test_array_sortby | 436 +++++ test/sql/test_decimal/R/test_decimal_overflow | 30 +- .../R/test_dict_mapping_function | 309 +++ test/sql/test_dictionary/R/test_dictionary | 1639 ++++++++++++++++ .../test_external_file/R/test_orc_predicates | 49 + test/sql/test_function/R/test_time_slice | 20 +- .../R/test_generate_series | 4 +- .../R/test_group_execution_join | 473 +++++ .../test_inverted_index/R/test_inverted_index | 1691 +++++++++++++++++ test/sql/test_json/R/to_json | 14 +- .../R/test_string_functions | 266 +++ .../test_trino_dialect/R/test_trino_dialect | 4 +- test/sql/test_udf/R/test_jvm_udf | 2 +- 24 files changed, 4973 insertions(+), 44 deletions(-) create mode 100644 test/sql/test_array_fn/R/test_array_sortby create mode 100644 test/sql/test_dict_mapping_function/R/test_dict_mapping_function create mode 100644 test/sql/test_dictionary/R/test_dictionary create mode 100644 test/sql/test_external_file/R/test_orc_predicates create mode 100644 test/sql/test_group_execution/R/test_group_execution_join create mode 100644 test/sql/test_inverted_index/R/test_inverted_index diff --git a/be/src/common/greplog.cpp b/be/src/common/greplog.cpp index ec5bb1d25ab630..e9ee18b37ef057 100644 --- a/be/src/common/greplog.cpp +++ b/be/src/common/greplog.cpp @@ -24,6 +24,7 @@ #include "gutil/strings/substitute.h" #include "hs/hs_compile.h" #include "hs/hs_runtime.h" +#include "service/backend_options.h" #include "util/defer_op.h" using namespace std; @@ -33,16 +34,18 @@ namespace starrocks { static std::vector list_log_files_in_dir(const string& log_dir, char level) { std::vector files; // if level in WARNING, ERROR, FATAL, use logging logs, else use info logs - const std::string pattern = string("WEF").find(level) == string::npos ? "be.INFO.log." : "be.WARNING.log."; + const std::string process = BackendOptions::is_cn() ? "cn" : "be"; + const std::string pattern = process + (string("WEF").find(level) == string::npos ? ".INFO.log." : ".WARNING.log."); for (const auto& entry : filesystem::directory_iterator(log_dir)) { if (entry.is_regular_file()) { auto name = entry.path().filename().string(); - if (name.length() > pattern.length() && name.substr(0, pattern.length()) == pattern) { + if (name.length() > pattern.length() && name.find(pattern) != string::npos) { files.push_back(entry.path().string()); } } } std::sort(files.begin(), files.end(), std::greater()); + LOG_IF(WARNING, files.empty()) << "list_log_files_in_dir failed, no log files in " << log_dir; return files; } @@ -180,7 +183,7 @@ Status grep_log_single_file(const string& path, int64_t start_ts, int64_t end_ts ctx.line_len = read; if (database == nullptr) { // no pattern, add all lines - scan_by_line_handler(0, 0, 0, 0, &ctx); + scan_by_line_handler(0, 0, read, 0, &ctx); } else { if (hs_scan(database, line, read, 0, scratch, scan_by_line_handler, &ctx) != HS_SUCCESS) { break; @@ -195,6 +198,7 @@ Status grep_log_single_file(const string& path, int64_t start_ts, int64_t end_ts Status grep_log(int64_t start_ts, int64_t end_ts, char level, const std::string& pattern, size_t limit, std::deque& entries) { + level = std::toupper(level); const string log_dir = config::sys_log_dir; if (log_dir.empty()) { return Status::InternalError(strings::Substitute("grep log failed $0 is empty", log_dir)); @@ -206,7 +210,12 @@ Status grep_log(int64_t start_ts, int64_t end_ts, char level, const std::string& hs_database_t* database = nullptr; if (!pattern.empty()) { hs_compile_error_t* compile_err; +<<<<<<< HEAD if (hs_compile(pattern.c_str(), 0, HS_MODE_BLOCK, NULL, &database, &compile_err) != HS_SUCCESS) { +======= + if (hs_compile(pattern.c_str(), HS_FLAG_SINGLEMATCH, HS_MODE_BLOCK, nullptr, &database, &compile_err) != + HS_SUCCESS) { +>>>>>>> 52c55f9c2b ([BugFix] Fix duplicate entries in be_logs; Add reset_delvec in script; Add BE id in error message when query failed (#51204)) hs_free_compile_error(compile_err); return Status::InternalError( strings::Substitute("grep log failed compile pattern $0 failed $1", pattern, compile_err->message)); @@ -253,10 +262,11 @@ Status grep_log(int64_t start_ts, int64_t end_ts, char level, const std::string& return Status::OK(); } -std::string grep_log_as_string(int64_t start_ts, int64_t end_ts, char level, const std::string& pattern, size_t limit) { +std::string grep_log_as_string(int64_t start_ts, int64_t end_ts, const std::string& level, const std::string& pattern, + size_t limit) { std::ostringstream ss; std::deque entries; - auto st = grep_log(start_ts, end_ts, level, pattern, limit, entries); + auto st = grep_log(start_ts, end_ts, level[0], pattern, limit, entries); if (!st.ok()) { ss << strings::Substitute("grep log failed $0 start_ts:$1 end_ts:$2 level:$3 pattern:$4 limit:$5\n", st.to_string(), start_ts, end_ts, level, pattern, limit); diff --git a/be/src/common/greplog.h b/be/src/common/greplog.h index 005878d80102ae..71884efeb1cc66 100644 --- a/be/src/common/greplog.h +++ b/be/src/common/greplog.h @@ -48,6 +48,7 @@ Status grep_log(int64_t start_ts, int64_t end_ts, char level, const std::string& * Grep log file and return all line as whole string, parameters are same as grep_log * @return log string */ -std::string grep_log_as_string(int64_t start_ts, int64_t end_ts, char level, const std::string& pattern, size_t limit); +std::string grep_log_as_string(int64_t start_ts, int64_t end_ts, const std::string& level, const std::string& pattern, + size_t limit); } // namespace starrocks diff --git a/be/src/exec/pipeline/pipeline_driver_executor.cpp b/be/src/exec/pipeline/pipeline_driver_executor.cpp index c35a09a35a994a..98838232afae0d 100644 --- a/be/src/exec/pipeline/pipeline_driver_executor.cpp +++ b/be/src/exec/pipeline/pipeline_driver_executor.cpp @@ -16,6 +16,7 @@ #include +#include "agent/master_info.h" #include "exec/pipeline/stream_pipeline_driver.h" #include "exec/workgroup/work_group.h" #include "gutil/strings/substitute.h" @@ -163,6 +164,9 @@ void GlobalDriverExecutor::_worker_thread() { } if (!status.ok()) { + auto o_id = get_backend_id(); + int64_t be_id = o_id.has_value() ? o_id.value() : -1; + status = status.clone_and_append(fmt::format("BE:{}", be_id)); LOG(WARNING) << "[Driver] Process error, query_id=" << print_id(driver->query_ctx()->query_id()) << ", instance_id=" << print_id(driver->fragment_ctx()->fragment_instance_id()) << ", status=" << status; diff --git a/be/src/exec/schema_scanner/schema_be_logs_scanner.cpp b/be/src/exec/schema_scanner/schema_be_logs_scanner.cpp index 229f0e7839d2dd..63a3c2ef0e0565 100644 --- a/be/src/exec/schema_scanner/schema_be_logs_scanner.cpp +++ b/be/src/exec/schema_scanner/schema_be_logs_scanner.cpp @@ -46,7 +46,7 @@ Status SchemaBeLogsScanner::start(RuntimeState* state) { if (_param->log_end_ts > 0) { end_ts = _param->log_end_ts; } - string level; + string level = "I"; string pattern; if (_param->log_level != nullptr) { level = *_param->log_level; diff --git a/be/src/http/action/greplog_action.cpp b/be/src/http/action/greplog_action.cpp index 214b189926a426..2399afe43641c7 100644 --- a/be/src/http/action/greplog_action.cpp +++ b/be/src/http/action/greplog_action.cpp @@ -66,7 +66,7 @@ void GrepLogAction::handle(HttpRequest* req) { return; } - auto ret = grep_log_as_string(start_ts, end_ts, std::toupper(level[0]), pattern, limit); + auto ret = grep_log_as_string(start_ts, end_ts, level, pattern, limit); HttpChannel::send_reply(req, HttpStatus::OK, ret); } diff --git a/be/src/script/script.cpp b/be/src/script/script.cpp index 7c3ac965819172..69b5df28383a14 100644 --- a/be/src/script/script.cpp +++ b/be/src/script/script.cpp @@ -26,6 +26,7 @@ #include "io/io_profiler.h" #include "runtime/exec_env.h" #include "runtime/mem_tracker.h" +#include "storage/del_vector.h" #include "storage/primary_key_dump.h" #include "storage/storage_engine.h" #include "storage/tablet.h" @@ -305,6 +306,16 @@ class StorageEngineRef { } } + // this method is specifically used to recover "no delete vector found" error caused by corrupt pk tablet metadata + static std::string reset_delvec(int64_t tablet_id, int64_t segment_id, int64_t version) { + auto tablet = get_tablet(tablet_id); + RETURN_IF_UNLIKELY_NULL(tablet, "tablet not found"); + DelVector dv; + dv.init(version, nullptr, 0); + auto st = TabletMetaManager::set_del_vector(tablet->data_dir()->get_meta(), tablet_id, segment_id, dv); + return st.to_string(); + } + static size_t submit_manual_compaction_task_for_table(int64_t table_id, int64_t rowset_size_threshold) { auto infos = get_tablet_infos(table_id, -1); for (auto& info : infos) { @@ -490,6 +501,7 @@ class StorageEngineRef { REG_STATIC_METHOD(StorageEngineRef, get_tablet_info); REG_STATIC_METHOD(StorageEngineRef, get_tablet_infos); REG_STATIC_METHOD(StorageEngineRef, get_tablet_meta_json); + REG_STATIC_METHOD(StorageEngineRef, reset_delvec); REG_STATIC_METHOD(StorageEngineRef, get_tablet); REG_STATIC_METHOD(StorageEngineRef, drop_tablet); REG_STATIC_METHOD(StorageEngineRef, get_data_dirs); diff --git a/be/src/service/backend_options.cpp b/be/src/service/backend_options.cpp index 298bc2aafcb29b..4551a266570d10 100644 --- a/be/src/service/backend_options.cpp +++ b/be/src/service/backend_options.cpp @@ -34,7 +34,14 @@ std::string BackendOptions::_s_localhost; std::vector BackendOptions::_s_priority_cidrs; TBackend BackendOptions::_backend; -bool BackendOptions::init() { +bool BackendOptions::_is_cn = false; + +bool BackendOptions::is_cn() { + return _is_cn; +} + +bool BackendOptions::init(bool is_cn) { + _is_cn = is_cn; if (!analyze_priority_cidrs()) { return false; } diff --git a/be/src/service/backend_options.h b/be/src/service/backend_options.h index 5f2c6483834471..45f0569a4718af 100644 --- a/be/src/service/backend_options.h +++ b/be/src/service/backend_options.h @@ -30,10 +30,15 @@ class CIDR; class BackendOptions { public: - static bool init(); + static bool init(bool is_cn); static std::string get_localhost(); static TBackend get_localBackend(); static void set_localhost(const std::string& host); +<<<<<<< HEAD +======= + static bool is_bind_ipv6(); + static bool is_cn(); +>>>>>>> 52c55f9c2b ([BugFix] Fix duplicate entries in be_logs; Add reset_delvec in script; Add BE id in error message when query failed (#51204)) private: static bool analyze_priority_cidrs(); @@ -42,6 +47,11 @@ class BackendOptions { static std::string _s_localhost; static std::vector _s_priority_cidrs; static TBackend _backend; +<<<<<<< HEAD +======= + static bool _bind_ipv6; + static bool _is_cn; +>>>>>>> 52c55f9c2b ([BugFix] Fix duplicate entries in be_logs; Add reset_delvec in script; Add BE id in error message when query failed (#51204)) BackendOptions(const BackendOptions&) = delete; const BackendOptions& operator=(const BackendOptions&) = delete; diff --git a/be/src/service/service_be/starrocks_be.cpp b/be/src/service/service_be/starrocks_be.cpp index ee1a8d290a2896..41e3359f9be3a2 100644 --- a/be/src/service/service_be/starrocks_be.cpp +++ b/be/src/service/service_be/starrocks_be.cpp @@ -119,7 +119,7 @@ void start_be(const std::vector& paths, bool as_cn) { LOG(INFO) << "BE start step " << start_step++ << ": jdbc driver manager init successfully"; // init network option - if (!BackendOptions::init()) { + if (!BackendOptions::init(as_cn)) { exit(-1); } LOG(INFO) << "BE start step " << start_step++ << ": backend network options init successfully"; diff --git a/be/test/storage/tablet_updates_test.cpp b/be/test/storage/tablet_updates_test.cpp index cc3efbaec49159..54e68b4303b918 100644 --- a/be/test/storage/tablet_updates_test.cpp +++ b/be/test/storage/tablet_updates_test.cpp @@ -22,6 +22,7 @@ #include #include +<<<<<<< HEAD #include "column/datum_tuple.h" #include "column/vectorized_fwd.h" #include "fs/fs.h" @@ -30,6 +31,9 @@ #include "storage/chunk_helper.h" #include "storage/empty_iterator.h" #include "storage/kv_store.h" +======= +#include "script/script.h" +>>>>>>> 52c55f9c2b ([BugFix] Fix duplicate entries in be_logs; Add reset_delvec in script; Add BE id in error message when query failed (#51204)) #include "storage/local_primary_key_recover.h" #include "storage/primary_key_dump.h" #include "storage/primary_key_encoder.h" @@ -1061,6 +1065,12 @@ void TabletUpdatesTest::test_writeread(bool enable_persistent_index) { auto rs0 = create_rowset(_tablet, keys); ASSERT_TRUE(_tablet->rowset_commit(2, rs0).ok()); ASSERT_EQ(2, _tablet->updates()->max_version()); + + string o; + ASSERT_TRUE(execute_script(fmt::format("StorageEngine.reset_delvec({}, {}, 2)", _tablet->tablet_id(), 0), o).ok()); + ASSERT_TRUE(execute_script("System.print(ExecEnv.grep_log_as_string(0,0,\"I\",\"tablet_manager\",1))", o).ok()); + LOG(INFO) << "grep log: " << o; + auto rs1 = create_rowset(_tablet, keys); ASSERT_TRUE(_tablet->rowset_commit(3, rs1).ok()); ASSERT_EQ(3, _tablet->updates()->max_version()); diff --git a/test/sql/test_array_fn/R/test_array_fn b/test/sql/test_array_fn/R/test_array_fn index 6b61d94447cca5..bc460a300d5ecd 100644 --- a/test/sql/test_array_fn/R/test_array_fn +++ b/test/sql/test_array_fn/R/test_array_fn @@ -4073,11 +4073,11 @@ None None -- !result select d_6, d_5, all_match(d_6,d_5, (x,y)->x >y) from array_test order by pk; -- result: -E: (1064, "Input array element's size is not equal in array_map().") +[REGEX].*Input array element's size is not equal in array_map().* -- !result select d_6, d_5, any_match(d_6,d_5, (x,y)->x >y) from array_test order by pk; -- result: -E: (1064, "Input array element's size is not equal in array_map().") +[REGEX].*Input array element's size is not equal in array_map().* -- !result select all_match((x,y) -> x < y, []); -- result: diff --git a/test/sql/test_array_fn/R/test_array_sortby b/test/sql/test_array_fn/R/test_array_sortby new file mode 100644 index 00000000000000..49ef8078924cc6 --- /dev/null +++ b/test/sql/test_array_fn/R/test_array_sortby @@ -0,0 +1,436 @@ +-- name: test_array_sortby_1 +CREATE TABLE t1 ( + id INT(11) not null, + array_col1 ARRAY, + array_col2 ARRAY, + array_col3 ARRAY, + array_col4 ARRAY +) ENGINE=OLAP +DUPLICATE KEY(id) +COMMENT "OLAP" +DISTRIBUTED BY HASH(id) +PROPERTIES ( + "replication_num" = "1" +); +-- result: +-- !result +INSERT INTO t1 VALUES +(1, [4, 3, 5], [1.1, 2.2, 2.2], ['a', 'b', 'c'], ['2023-01-01', '2023-01-02', '2023-01-03']), +(2, [6, 7, 8], [6.6, 5.5, 6.6], ['d', 'e', 'd'], ['2023-01-04', '2023-01-05', '2023-01-06']), +(3, NULL, [7.7, 8.8, 8.8], ['g', 'h', 'h'], ['2023-01-07', '2023-01-08', '2023-01-09']), +(4, [9, 10, 11], NULL, ['k', 'k', 'j'], ['2023-01-10', '2023-01-12', '2023-01-11']), +(5, [12, 13, 14], [10.10, 11.11, 11.11], NULL, ['2023-01-13', '2023-01-14', '2023-01-15']), +(6, [15, 16, 17], [14.14, 13.13, 14.14], ['m', 'o', 'o'], NULL), +(7, [18, 19, 20], [16.16, 16.16, 18.18], ['p', 'p', 'r'], ['2023-01-16', NULL, '2023-01-18']), +(8, [21, 22, 23], [19.19, 20.20, 19.19], ['a', 't', 'a'], ['2023-01-19', '2023-01-20', '2023-01-21']), +(9, [24, 25, 26], NULL, ['y', 'y', 'z'], ['2023-01-25', '2023-01-24', '2023-01-26']), +(10, [24, 25, 26], NULL, ['y', 'y', 'z'], ['2023-01-25', NULL, '2023-01-26']); +-- result: +-- !result +select id, array_col1, array_col2, array_sortby(array_col1, array_col2) from t1 order by id asc; +-- result: +1 [4,3,5] [1.1,2.2,2.2] [4,3,5] +2 [6,7,8] [6.6,5.5,6.6] [7,6,8] +3 None [7.7,8.8,8.8] None +4 [9,10,11] None [9,10,11] +5 [12,13,14] [10.1,11.11,11.11] [12,13,14] +6 [15,16,17] [14.14,13.13,14.14] [16,15,17] +7 [18,19,20] [16.16,16.16,18.18] [18,19,20] +8 [21,22,23] [19.19,20.2,19.19] [21,23,22] +9 [24,25,26] None [24,25,26] +10 [24,25,26] None [24,25,26] +-- !result +select id, array_col1, array_col2, array_col3, array_sortby(array_col1, array_col2, array_col3) from t1 order by id asc; +-- result: +1 [4,3,5] [1.1,2.2,2.2] ["a","b","c"] [4,3,5] +2 [6,7,8] [6.6,5.5,6.6] ["d","e","d"] [7,6,8] +3 None [7.7,8.8,8.8] ["g","h","h"] None +4 [9,10,11] None ["k","k","j"] [11,9,10] +5 [12,13,14] [10.1,11.11,11.11] None [12,13,14] +6 [15,16,17] [14.14,13.13,14.14] ["m","o","o"] [16,15,17] +7 [18,19,20] [16.16,16.16,18.18] ["p","p","r"] [18,19,20] +8 [21,22,23] [19.19,20.2,19.19] ["a","t","a"] [21,23,22] +9 [24,25,26] None ["y","y","z"] [24,25,26] +10 [24,25,26] None ["y","y","z"] [24,25,26] +-- !result +select id, array_col1, array_col2, array_col3, array_col4, array_sortby(array_col1, array_col2, array_col3, array_col4) from t1 order by id asc; +-- result: +1 [4,3,5] [1.1,2.2,2.2] ["a","b","c"] ["2023-01-01","2023-01-02","2023-01-03"] [4,3,5] +2 [6,7,8] [6.6,5.5,6.6] ["d","e","d"] ["2023-01-04","2023-01-05","2023-01-06"] [7,6,8] +3 None [7.7,8.8,8.8] ["g","h","h"] ["2023-01-07","2023-01-08","2023-01-09"] None +4 [9,10,11] None ["k","k","j"] ["2023-01-10","2023-01-12","2023-01-11"] [11,9,10] +5 [12,13,14] [10.1,11.11,11.11] None ["2023-01-13","2023-01-14","2023-01-15"] [12,13,14] +6 [15,16,17] [14.14,13.13,14.14] ["m","o","o"] None [16,15,17] +7 [18,19,20] [16.16,16.16,18.18] ["p","p","r"] ["2023-01-16",null,"2023-01-18"] [19,18,20] +8 [21,22,23] [19.19,20.2,19.19] ["a","t","a"] ["2023-01-19","2023-01-20","2023-01-21"] [21,23,22] +9 [24,25,26] None ["y","y","z"] ["2023-01-25","2023-01-24","2023-01-26"] [25,24,26] +10 [24,25,26] None ["y","y","z"] ["2023-01-25",null,"2023-01-26"] [25,24,26] +-- !result +-- name: test_array_sortby_2 +CREATE TABLE __row_util_base ( + k1 bigint NULL +) ENGINE=OLAP +DUPLICATE KEY(`k1`) +DISTRIBUTED BY HASH(`k1`) BUCKETS 32 +PROPERTIES ( + "replication_num" = "1" +); +-- result: +-- !result +insert into __row_util_base select generate_series from TABLE(generate_series(0, 10000 - 1)); +-- result: +-- !result +insert into __row_util_base select * from __row_util_base; -- 20000 +insert into __row_util_base select * from __row_util_base; -- 40000 +insert into __row_util_base select * from __row_util_base; -- 80000 +insert into __row_util_base select * from __row_util_base; -- 160000 +insert into __row_util_base select * from __row_util_base; -- 320000 +insert into __row_util_base select * from __row_util_base; -- 640000 + +CREATE TABLE __row_util ( + idx bigint NULL, + array_c1 ARRAY +) ENGINE=OLAP +DUPLICATE KEY(`idx`) +DISTRIBUTED BY HASH(`idx`) BUCKETS 32 +PROPERTIES ( + "replication_num" = "1" +); +-- result: +-- !result +insert into __row_util +select + row_number() over() as idx, + array_generate(10) +from __row_util_base; +-- result: +-- !result +CREATE TABLE t1 ( + id INT(11) not null, + int_1 ARRAY, + int_2 ARRAY, + str_1 ARRAY, + date_1 ARRAY +) ENGINE=OLAP +DUPLICATE KEY(id) +COMMENT "OLAP" +DISTRIBUTED BY HASH(id) BUCKETS 32 +PROPERTIES ( + "replication_num" = "1" +); +-- result: +-- !result +insert into t1 +select + idx, + array_c1, + array_map(array_c1, x -> case when idx % 13 != 0 then x % 3 else null end), + array_map(array_c1, x -> case when idx % 13 != 0 then concat('abc-', x % 5) else null end), + array_map(array_c1, x -> case when idx % 13 != 0 then date_sub('2023-11-02', interval cast(x % 2 as int) day) else null end) +from __row_util; +-- result: +-- !result +with w1 as ( + select *, array_sortby(int_1, int_2) as x from t1 +) +select array_join(x, '-'), int_1, int_2 +from w1 +order by id limit 10; +-- result: +3-6-9-1-4-7-10-2-5-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] +3-6-9-1-4-7-10-2-5-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] +3-6-9-1-4-7-10-2-5-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] +3-6-9-1-4-7-10-2-5-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] +3-6-9-1-4-7-10-2-5-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] +3-6-9-1-4-7-10-2-5-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] +3-6-9-1-4-7-10-2-5-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] +3-6-9-1-4-7-10-2-5-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] +3-6-9-1-4-7-10-2-5-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] +3-6-9-1-4-7-10-2-5-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] +-- !result +with w1 as ( + select *, array_sortby(int_1, int_2, str_1, date_1) as x from t1 +) +select array_join(x, '-'), int_1, int_2, str_1, date_1 +from w1 +order by id limit 10; +-- result: +6-3-9-10-1-7-4-5-2-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] ["abc-1","abc-2","abc-3","abc-4","abc-0","abc-1","abc-2","abc-3","abc-4","abc-0"] ["2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02"] +6-3-9-10-1-7-4-5-2-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] ["abc-1","abc-2","abc-3","abc-4","abc-0","abc-1","abc-2","abc-3","abc-4","abc-0"] ["2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02"] +6-3-9-10-1-7-4-5-2-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] ["abc-1","abc-2","abc-3","abc-4","abc-0","abc-1","abc-2","abc-3","abc-4","abc-0"] ["2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02"] +6-3-9-10-1-7-4-5-2-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] ["abc-1","abc-2","abc-3","abc-4","abc-0","abc-1","abc-2","abc-3","abc-4","abc-0"] ["2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02"] +6-3-9-10-1-7-4-5-2-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] ["abc-1","abc-2","abc-3","abc-4","abc-0","abc-1","abc-2","abc-3","abc-4","abc-0"] ["2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02"] +6-3-9-10-1-7-4-5-2-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] ["abc-1","abc-2","abc-3","abc-4","abc-0","abc-1","abc-2","abc-3","abc-4","abc-0"] ["2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02"] +6-3-9-10-1-7-4-5-2-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] ["abc-1","abc-2","abc-3","abc-4","abc-0","abc-1","abc-2","abc-3","abc-4","abc-0"] ["2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02"] +6-3-9-10-1-7-4-5-2-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] ["abc-1","abc-2","abc-3","abc-4","abc-0","abc-1","abc-2","abc-3","abc-4","abc-0"] ["2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02"] +6-3-9-10-1-7-4-5-2-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] ["abc-1","abc-2","abc-3","abc-4","abc-0","abc-1","abc-2","abc-3","abc-4","abc-0"] ["2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02"] +6-3-9-10-1-7-4-5-2-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] ["abc-1","abc-2","abc-3","abc-4","abc-0","abc-1","abc-2","abc-3","abc-4","abc-0"] ["2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02"] +-- !result +with w1 as ( + select id, array_sortby(int_1, int_2) as x from t1 +), w2 as ( + select array_join(x, '-') as x + from w1 +) +select ifnull(sum(murmur_hash3_32(x)), 0) +from w2; +-- result: +-1282820609342570 +-- !result +with w1 as ( + select id, array_sortby(int_1, int_2, str_1, date_1) as x from t1 +), w2 as ( + select array_join(x, '-') as x + from w1 +) +select ifnull(sum(murmur_hash3_32(x)), 0) +from w2; +-- result: +-1083325988181210 +-- !result +CREATE TABLE t2 ( + id INT(11) not null, + int_1 ARRAY, + int_2 ARRAY, + str_1 ARRAY, + date_1 ARRAY +) ENGINE=OLAP +DUPLICATE KEY(id) +COMMENT "OLAP" +DISTRIBUTED BY HASH(id) BUCKETS 32 +PROPERTIES ( + "replication_num" = "1" +); +-- result: +-- !result +insert into t2 +select + idx, + array_c1, + case when idx % 11 != 0 then array_map(array_c1, x -> case when idx % 13 != 0 then x % 3 else null end) else null end, + case when idx % 11 != 0 then array_map(array_c1, x -> case when idx % 13 != 0 then concat('abc-', x % 5) else null end) else null end, + case when idx % 11 != 0 then array_map(array_c1, x -> case when idx % 13 != 0 then date_sub('2023-11-02', interval cast(x % 2 as int) day) else null end) else null end +from __row_util; +-- result: +-- !result +with w1 as ( + select *, array_sortby(int_1, int_2) as x from t2 +) +select array_join(x, '-'), int_1, int_2 +from w1 +order by id limit 10; +-- result: +3-6-9-1-4-7-10-2-5-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] +3-6-9-1-4-7-10-2-5-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] +3-6-9-1-4-7-10-2-5-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] +3-6-9-1-4-7-10-2-5-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] +3-6-9-1-4-7-10-2-5-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] +3-6-9-1-4-7-10-2-5-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] +3-6-9-1-4-7-10-2-5-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] +3-6-9-1-4-7-10-2-5-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] +3-6-9-1-4-7-10-2-5-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] +3-6-9-1-4-7-10-2-5-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] +-- !result +with w1 as ( + select *, array_sortby(int_1, int_2, str_1, date_1) as x from t2 +) +select array_join(x, '-'), int_1, int_2, str_1, date_1 +from w1 +order by id limit 10; +-- result: +6-3-9-10-1-7-4-5-2-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] ["abc-1","abc-2","abc-3","abc-4","abc-0","abc-1","abc-2","abc-3","abc-4","abc-0"] ["2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02"] +6-3-9-10-1-7-4-5-2-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] ["abc-1","abc-2","abc-3","abc-4","abc-0","abc-1","abc-2","abc-3","abc-4","abc-0"] ["2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02"] +6-3-9-10-1-7-4-5-2-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] ["abc-1","abc-2","abc-3","abc-4","abc-0","abc-1","abc-2","abc-3","abc-4","abc-0"] ["2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02"] +6-3-9-10-1-7-4-5-2-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] ["abc-1","abc-2","abc-3","abc-4","abc-0","abc-1","abc-2","abc-3","abc-4","abc-0"] ["2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02"] +6-3-9-10-1-7-4-5-2-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] ["abc-1","abc-2","abc-3","abc-4","abc-0","abc-1","abc-2","abc-3","abc-4","abc-0"] ["2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02"] +6-3-9-10-1-7-4-5-2-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] ["abc-1","abc-2","abc-3","abc-4","abc-0","abc-1","abc-2","abc-3","abc-4","abc-0"] ["2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02"] +6-3-9-10-1-7-4-5-2-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] ["abc-1","abc-2","abc-3","abc-4","abc-0","abc-1","abc-2","abc-3","abc-4","abc-0"] ["2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02"] +6-3-9-10-1-7-4-5-2-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] ["abc-1","abc-2","abc-3","abc-4","abc-0","abc-1","abc-2","abc-3","abc-4","abc-0"] ["2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02"] +6-3-9-10-1-7-4-5-2-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] ["abc-1","abc-2","abc-3","abc-4","abc-0","abc-1","abc-2","abc-3","abc-4","abc-0"] ["2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02"] +6-3-9-10-1-7-4-5-2-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] ["abc-1","abc-2","abc-3","abc-4","abc-0","abc-1","abc-2","abc-3","abc-4","abc-0"] ["2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02"] +-- !result +with w1 as ( + select id, array_sortby(int_1, int_2) as x from t2 +), w2 as ( + select array_join(x, '-') as x + from w1 +) +select ifnull(sum(murmur_hash3_32(x)), 0) +from w2; +-- result: +-1201770879306824 +-- !result +with w1 as ( + select id, array_sortby(int_1, int_2, str_1, date_1) as x from t2 +), w2 as ( + select array_join(x, '-') as x + from w1 +) +select ifnull(sum(murmur_hash3_32(x)), 0) +from w2; +-- result: +-1020412010001672 +-- !result +CREATE TABLE t3 ( + id INT(11) not null, + int_1 ARRAY not null, + int_2 ARRAY not null, + str_1 ARRAY not null, + date_1 ARRAY not null +) ENGINE=OLAP +DUPLICATE KEY(id) +COMMENT "OLAP" +DISTRIBUTED BY HASH(id) BUCKETS 32 +PROPERTIES ( + "replication_num" = "1" +); +-- result: +-- !result +insert into t3 +select + idx, + array_c1, + array_map(array_c1, x -> x % 3), + array_map(array_c1, x -> concat('abc-', x % 5)), + array_map(array_c1, x -> date_sub('2023-11-02', interval cast(x % 2 as int) day)) +from __row_util; +-- result: +-- !result +with w1 as ( + select *, array_sortby(int_1, int_2) as x from t3 +) +select array_join(x, '-'), int_1, int_2 +from w1 +order by id limit 10; +-- result: +3-6-9-1-4-7-10-2-5-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] +3-6-9-1-4-7-10-2-5-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] +3-6-9-1-4-7-10-2-5-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] +3-6-9-1-4-7-10-2-5-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] +3-6-9-1-4-7-10-2-5-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] +3-6-9-1-4-7-10-2-5-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] +3-6-9-1-4-7-10-2-5-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] +3-6-9-1-4-7-10-2-5-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] +3-6-9-1-4-7-10-2-5-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] +3-6-9-1-4-7-10-2-5-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] +-- !result +with w1 as ( + select *, array_sortby(int_1, int_2, str_1, date_1) as x from t3 +) +select array_join(x, '-'), int_1, int_2, str_1, date_1 +from w1 +order by id limit 10; +-- result: +6-3-9-10-1-7-4-5-2-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] ["abc-1","abc-2","abc-3","abc-4","abc-0","abc-1","abc-2","abc-3","abc-4","abc-0"] ["2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02"] +6-3-9-10-1-7-4-5-2-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] ["abc-1","abc-2","abc-3","abc-4","abc-0","abc-1","abc-2","abc-3","abc-4","abc-0"] ["2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02"] +6-3-9-10-1-7-4-5-2-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] ["abc-1","abc-2","abc-3","abc-4","abc-0","abc-1","abc-2","abc-3","abc-4","abc-0"] ["2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02"] +6-3-9-10-1-7-4-5-2-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] ["abc-1","abc-2","abc-3","abc-4","abc-0","abc-1","abc-2","abc-3","abc-4","abc-0"] ["2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02"] +6-3-9-10-1-7-4-5-2-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] ["abc-1","abc-2","abc-3","abc-4","abc-0","abc-1","abc-2","abc-3","abc-4","abc-0"] ["2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02"] +6-3-9-10-1-7-4-5-2-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] ["abc-1","abc-2","abc-3","abc-4","abc-0","abc-1","abc-2","abc-3","abc-4","abc-0"] ["2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02"] +6-3-9-10-1-7-4-5-2-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] ["abc-1","abc-2","abc-3","abc-4","abc-0","abc-1","abc-2","abc-3","abc-4","abc-0"] ["2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02"] +6-3-9-10-1-7-4-5-2-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] ["abc-1","abc-2","abc-3","abc-4","abc-0","abc-1","abc-2","abc-3","abc-4","abc-0"] ["2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02"] +6-3-9-10-1-7-4-5-2-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] ["abc-1","abc-2","abc-3","abc-4","abc-0","abc-1","abc-2","abc-3","abc-4","abc-0"] ["2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02"] +6-3-9-10-1-7-4-5-2-8 [1,2,3,4,5,6,7,8,9,10] [1,2,0,1,2,0,1,2,0,1] ["abc-1","abc-2","abc-3","abc-4","abc-0","abc-1","abc-2","abc-3","abc-4","abc-0"] ["2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02","2023-11-01","2023-11-02"] +-- !result +with w1 as ( + select id, array_sortby(int_1, int_2) as x from t3 +), w2 as ( + select array_join(x, '-') as x + from w1 +) +select ifnull(sum(murmur_hash3_32(x)), 0) +from w2; +-- result: +-1357115440640000 +-- !result +with w1 as ( + select id, array_sortby(int_1, int_2, str_1, date_1) as x from t3 +), w2 as ( + select array_join(x, '-') as x + from w1 +) +select ifnull(sum(murmur_hash3_32(x)), 0) +from w2; +-- result: +-1140996549120000 +-- !result +with w1 as ( + select id, array_sortby(int_1, int_2) as x from t3 where id < -1 +) +select count(x) +from w1; +-- result: +0 +-- !result +select array_sortby([1,2,3,4,5,6], cast(null as array), cast(null as array), cast(null as array)); +-- result: +[1,2,3,4,5,6] +-- !result +select array_sortby([1,2,3,4,5,6], ['a', 'b', 'c', 'c', 'b', 'a'], cast(null as array), cast(null as array), [11, 22, 32, 31, 21, 12], cast(null as array)); +-- result: +[1,6,5,2,4,3] +-- !result +select array_sortby(cast(null as array), cast(null as array), cast(null as array)); +-- result: +None +-- !result +select array_sortby([1,2,3,4,5,6], ['a', 'b', 'c', 'c', 'b', 'a'], [11, 22, 32, 31, 21, 12]); +-- result: +[1,6,5,2,4,3] +-- !result +select array_sortby([1,2,3,4,5,6], ['a', 'b', 'c', 'c', 'b', 'a'], cast(['2023-11-02', '2023-11-03', '2023-11-04', '2023-11-05', '2023-11-06', '2023-11-07'] as array)); +-- result: +[1,6,2,5,3,4] +-- !result +select array_sortby([1,2,null,4,5,6], ['a', 'b', 'c', 'c', 'b', 'a'], [11, 22, 32, 31, 21, 12]); +-- result: +[1,6,5,2,4,null] +-- !result +select array_sortby([1,2,null,4,5,6], ['a', 'b', 'c', 'c', 'b', 'a'], cast(['2023-11-02', '2023-11-03', '2023-11-04', '2023-11-05', '2023-11-06', '2023-11-07'] as array)); +-- result: +[1,6,2,5,null,4] +-- !result +select array_sortby([1,2,3,4,5,6], ['a', 'b', null, null, 'b', 'a'], [11, 22, 32, 31, 21, 12]); +-- result: +[4,3,1,6,5,2] +-- !result +select array_sortby([1,2,3,4,5,6], ['a', 'b', null, null, 'b', 'a'], [11, 22, 32, 31, null, null]); +-- result: +[4,3,6,1,5,2] +-- !result +select array_sortby([1,2,3,4,5,6], ['a', 'b', null, null, 'b', 'a'], cast(['2023-11-02', '2023-11-03', '2023-11-04', '2023-11-05', null ,null] as array)); +-- result: +[3,4,6,1,5,2] +-- !result +select array_sortby([1,2,3,4,5,6], ['a', 'b', 'c', 'c', 'b', 'a'], cast(['2023-11-02', '2023-11-03', '2023-11-04', '2023-11-05', '2023-11-06', '2023-11-07'] as array)); +-- result: +[1,6,2,5,3,4] +-- !result +select array_sortby([1,2,null,4,5,6], ['a', 'b', 'c', 'c', 'b', 'a', 1], [11, 22, 32, 31, 21, 12]); +-- result: +[REGEX].*Input arrays' size are not equal in array_sortby.* +-- !result +select array_sortby([1,2,null,4,5,6], ['a', 'b', 'c', 'c', 'b', 'a', 1], cast(null as array), [11, 22, 32, 31, 21, 12]); +-- result: +[REGEX].*Input arrays' size are not equal in array_sortby.* +-- !result +select array_sortby([1,2,3,4,5,6], ['a', 'b', null, null, 'b', 'a', 1], [11, 22, 32, 31, 21, 12]); +-- result: +[REGEX].*Input arrays' size are not equal in array_sortby.* +-- !result +select array_sortby([1,2,3,4,5,6], ['a', 'b', null, null, 'b', 'a'], [11, 22, 32, 31, null, null, 1]); +-- result: +[REGEX].*Input arrays' size are not equal in array_sortby.* +-- !result +-- name: test_array_sortby_3 +with w1 as (select column_0 as source, column_1 as key1, column_2 as key2 from (values + ([1, 2], null, [1, 1]), + ([3, 4], [40, 30], [1, 1]), + ([5, 6], null, [1, 1]) +) t) +select array_sortby(source, key1, key2), source, key1, key2 from w1; +-- result: +[1,2] [1,2] None [1,1] +[4,3] [3,4] [40,30] [1,1] +[5,6] [5,6] None [1,1] +-- !result \ No newline at end of file diff --git a/test/sql/test_decimal/R/test_decimal_overflow b/test/sql/test_decimal/R/test_decimal_overflow index 082a529b9248a2..3e6939837f674b 100644 --- a/test/sql/test_decimal/R/test_decimal_overflow +++ b/test/sql/test_decimal/R/test_decimal_overflow @@ -43,7 +43,7 @@ None -- !result select /*+ SET_VAR(sql_mode='ERROR_IF_OVERFLOW')*/ 274.97790000000000000000 * (round(1103.00000000000000000000 * 1.0000,16) /round(1103.00000000000000000000,16)); -- result: -E: (1064, "Expr evaluate meet error: The 'mul' operation involving decimal values overflows") +[REGEX].*Expr evaluate meet error: The 'mul' operation involving decimal values overflows.* -- !result select cast(c_d32 * c_d32 as decimal32) from t_decimal_overflow where c_id = 1; -- result: @@ -95,51 +95,51 @@ None -- !result select /*+ SET_VAR(sql_mode='ERROR_IF_OVERFLOW')*/ cast(c_d32 * c_d32 as decimal32) from t_decimal_overflow where c_id = 1; -- result: -E: (1064, 'Expr evaluate meet error: The type cast from decimal to decimal overflows') +[REGEX].*Expr evaluate meet error: The type cast from decimal to decimal overflows.* -- !result select /*+ SET_VAR(sql_mode='ERROR_IF_OVERFLOW')*/ cast(c_d32 * c_d32 as decimal32) from t_decimal_overflow where c_id = 2; -- result: -E: (1064, 'Expr evaluate meet error: The type cast from decimal to decimal overflows') +[REGEX].*Expr evaluate meet error: The type cast from decimal to decimal overflows.* -- !result select /*+ SET_VAR(sql_mode='ERROR_IF_OVERFLOW')*/ cast(c_d64 * c_d64 as decimal64) from t_decimal_overflow where c_id = 1; -- result: -E: (1064, 'Expr evaluate meet error: The type cast from decimal to decimal overflows') +[REGEX].*Expr evaluate meet error: The type cast from decimal to decimal overflows.* -- !result select /*+ SET_VAR(sql_mode='ERROR_IF_OVERFLOW')*/ cast(c_d64 * c_d64 as decimal64) from t_decimal_overflow where c_id = 2; -- result: -E: (1064, 'Expr evaluate meet error: The type cast from decimal to decimal overflows') +[REGEX].*Expr evaluate meet error: The type cast from decimal to decimal overflows.* -- !result select /*+ SET_VAR(sql_mode='ERROR_IF_OVERFLOW')*/ cast(c_d128 * c_d128 as decimal128) from t_decimal_overflow where c_id = 1; -- result: -E: (1064, "Expr evaluate meet error: The 'mul' operation involving decimal values overflows") +[REGEX].*Expr evaluate meet error: The 'mul' operation involving decimal values overflows.* -- !result select /*+ SET_VAR(sql_mode='ERROR_IF_OVERFLOW')*/ cast(c_d128 * c_d128 as decimal128) from t_decimal_overflow where c_id = 2; -- result: -E: (1064, "Expr evaluate meet error: The 'mul' operation involving decimal values overflows") +[REGEX].*Expr evaluate meet error: The 'mul' operation involving decimal values overflows.* -- !result select /*+ SET_VAR(sql_mode='ERROR_IF_OVERFLOW')*/ cast(c_d32 * 1.000 as decimal32) from t_decimal_overflow where c_id = 1; -- result: -E: (1064, 'Expr evaluate meet error: The type cast from decimal to decimal overflows') +[REGEX].*Expr evaluate meet error: The type cast from decimal to decimal overflows.* -- !result select /*+ SET_VAR(sql_mode='ERROR_IF_OVERFLOW')*/ cast(c_d32 * 1.000 as decimal32) from t_decimal_overflow where c_id = 2; -- result: -E: (1064, 'Expr evaluate meet error: The type cast from decimal to decimal overflows') +[REGEX].*Expr evaluate meet error: The type cast from decimal to decimal overflows.* -- !result select /*+ SET_VAR(sql_mode='ERROR_IF_OVERFLOW')*/ cast(c_d64 * 1.000000 as decimal64) from t_decimal_overflow where c_id = 1; -- result: -E: (1064, 'Expr evaluate meet error: The type cast from decimal to decimal overflows') +[REGEX].*Expr evaluate meet error: The type cast from decimal to decimal overflows.* -- !result select /*+ SET_VAR(sql_mode='ERROR_IF_OVERFLOW')*/ cast(c_d64 * 1.000000 as decimal64) from t_decimal_overflow where c_id = 2; -- result: -E: (1064, 'Expr evaluate meet error: The type cast from decimal to decimal overflows') +[REGEX].*Expr evaluate meet error: The type cast from decimal to decimal overflows.* -- !result select /*+ SET_VAR(sql_mode='ERROR_IF_OVERFLOW')*/ cast(c_d128 * 1.000000000 as decimal128) from t_decimal_overflow where c_id = 1; -- result: -E: (1064, "Expr evaluate meet error: The 'mul' operation involving decimal values overflows") +[REGEX].*Expr evaluate meet error: The 'mul' operation involving decimal values overflows.* -- !result select /*+ SET_VAR(sql_mode='ERROR_IF_OVERFLOW')*/ cast(c_d128 * 1.000000000 as decimal128) from t_decimal_overflow where c_id = 2; -- result: -E: (1064, "Expr evaluate meet error: The 'mul' operation involving decimal values overflows") +[REGEX].*Expr evaluate meet error: The 'mul' operation involving decimal values overflows.* -- !result select c_id - 1.12345678901234567890 from t_decimal_overflow where c_id = 1; -- result: @@ -155,9 +155,9 @@ select avg(c0- 2.8665963056616452*(lt - 3.062472673706541)) as adjust_lt from (s -- !result select /*+ SET_VAR(sql_mode='ERROR_IF_OVERFLOW')*/ max(lt- 2.8665963056616452*(c2 - 3.062472673706541)) as adjust_lt from (select c0, array_sum(c1) lt, c2 from avg_test) t group by c0; -- result: -E: (1064, "Expr evaluate meet error: The 'mul' operation involving decimal values overflows") +[REGEX].*Expr evaluate meet error: The 'mul' operation involving decimal values overflows.* -- !result select /*+ SET_VAR(sql_mode='ERROR_IF_OVERFLOW')*/ avg(lt- 2.8665963056616452*(c2 - 3.062472673706541)) as adjust_lt from (select c0, array_sum(c1) lt, c2 from avg_test) t group by c0; -- result: -E: (1064, "Expr evaluate meet error: The 'mul' operation involving decimal values overflows") +[REGEX].*Expr evaluate meet error: The 'mul' operation involving decimal values overflows.* -- !result \ No newline at end of file diff --git a/test/sql/test_dict_mapping_function/R/test_dict_mapping_function b/test/sql/test_dict_mapping_function/R/test_dict_mapping_function new file mode 100644 index 00000000000000..0236dc565d3213 --- /dev/null +++ b/test/sql/test_dict_mapping_function/R/test_dict_mapping_function @@ -0,0 +1,309 @@ +-- name: test_dict_mapping_streamload +CREATE DATABASE test_dict_mapping_streamload; +-- result: +-- !result +USE test_dict_mapping_streamload; +-- result: +-- !result +CREATE TABLE `t_dict_mapping_streamload` ( + `id1` bigint(20) NOT NULL COMMENT "", + `id2` bigint(20) NOT NULL AUTO_INCREMENT COMMENT "" +) ENGINE=OLAP +PRIMARY KEY(`id1`) +DISTRIBUTED BY HASH(`id1`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "true", +"compression" = "LZ4" +); +-- result: +-- !result +INSERT INTO t_dict_mapping_streamload VALUES (1, DEFAULT),(2, DEFAULT),(3, DEFAULT); +-- result: +-- !result +CREATE TABLE `test_table` ( + `id1` bigint(20) NOT NULL COMMENT "", + `id2` bigint(20) NOT NULL COMMENT "" +) ENGINE=OLAP +PRIMARY KEY(`id1`) +DISTRIBUTED BY HASH(`id1`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "true", +"compression" = "LZ4" +); +-- result: +-- !result +shell: curl --location-trusted -u root: -T ${root_path}/lib/../common/data/stream_load/sr_dict_mapping_case_1.csv -XPUT -H partial_update:false -H label:stream_load_dict_mapping_case_1 -H column_separator:, -H columns:"id1, id2=dict_mapping('t_dict_mapping_streamload', id1)" ${url}/api/test_dict_mapping_streamload/test_table/_stream_load +-- result: +0 +{ + "Status": "Success", + "Message": "OK" +} +-- !result +sync; +-- result: +-- !result +SELECT * FROM test_table; +-- result: +1 1 +2 2 +3 3 +-- !result +DROP TABLE t_dict_mapping_streamload; +-- result: +-- !result +DROP DATABASE test_dict_mapping_streamload; +-- result: +-- !result +-- name: test_dictmapping_multiple_column +CREATE DATABASE test_dictmapping_multiple_column; +-- result: +-- !result +use test_dictmapping_multiple_column; +-- result: +-- !result +create table dict(col_1 int, col_2 string, col_3 bigint not null auto_increment, col_4 int) +primary key(col_1, col_2) +distributed by hash(col_1, col_2) +PROPERTIES ( +"replication_num" = "1" +); +-- result: +-- !result +INSERT INTO dict VALUES (1, "abc", default, 0); +-- result: +-- !result +create table t(col_1 int, col_2 string) +primary key(col_1) +distributed by hash(col_1) +PROPERTIES ( +"replication_num" = "1" +); +-- result: +-- !result +INSERT INTO t VALUES (1, "abc"); +-- result: +-- !result +SELECT dict_mapping("dict", col_1, col_2) FROM t; +-- result: +1 +-- !result +DROP DATABASE test_dictmapping_multiple_column; +-- result: +-- !result +-- name: test_dictmapping_null_column +CREATE DATABASE test_dictmapping_null_column; +-- result: +-- !result +use test_dictmapping_null_column; +-- result: +-- !result +create table dict(col_1 int, col_2 string, col_3 bigint not null auto_increment, col_4 int) +primary key(col_1, col_2) +distributed by hash(col_1, col_2) +PROPERTIES ( +"replication_num" = "1" +); +-- result: +-- !result +INSERT INTO dict VALUES (1, "abc", default, 0); +-- result: +-- !result +create table t(col_1 int, col_2 string) +primary key(col_1) +distributed by hash(col_1) +PROPERTIES ( +"replication_num" = "1" +); +-- result: +-- !result +INSERT INTO t VALUES (1, NULL); +-- result: +-- !result +SELECT dict_mapping("dict", col_1, col_2) FROM t; +-- result: +[REGEX].*invalid parameter : get NULL paramenter.* +-- !result +DROP DATABASE test_dictmapping_null_column; +-- result: +-- !result +-- name: test_dictmapping_DictQueryOperator_bug +CREATE DATABASE test_dictmapping_DictQueryOperator_bug; +-- result: +-- !result +USE test_dictmapping_DictQueryOperator_bug; +-- result: +-- !result +create table dict(col_1 int, col_2 string, col_3 bigint not null auto_increment, col_4 int) + primary key(col_1) + distributed by hash(col_1) +PROPERTIES ( +"replication_num" = "1" +); +-- result: +-- !result +insert into dict values(1, 'hello world 1', default, 1 * 10); +-- result: +-- !result +insert into dict values(2, 'hello world 2', default, 2 * 10); +-- result: +-- !result +insert into dict values(3, 'hello world 3', default, 3 * 10); +-- result: +-- !result +insert into dict values(4, 'hello world 4', default, 4 * 10); +-- result: +-- !result +insert into dict values(5, 'hello world 5', default, 5 * 10); +-- result: +-- !result +insert into dict values(6, 'hello world 6', default, 6 * 10); +-- result: +-- !result +insert into dict values(7, 'hello world 7', default, 7 * 10); +-- result: +-- !result +insert into dict values(8, 'hello world 8', default, 8 * 10); +-- result: +-- !result +insert into dict values(9, 'hello world 9', default, 9 * 10); +-- result: +-- !result +insert into dict values(10, 'hello world 10', default, 10 * 10); +-- result: +-- !result +create table fact_tbl_2(col_i int, col_2 varchar(20), col_mapvalue bigint) +PROPERTIES ( +"replication_num" = "1" +); +-- result: +-- !result +insert into fact_tbl_2 + values + (1, 'Beijing', DICT_mapping("dict", 1)), + (2, 'Shenzhen', DICT_MAPping("dict", 2, "col_3")), + (3, 'Shanghai', Dict_Mapping("dict", 3, "col_4")); +-- result: +-- !result +DROP DATABASE test_dictmapping_DictQueryOperator_bug; +-- result: +-- !result +-- name: test_dictmapping_add_generated_column_with_dict_mapping +CREATE DATABASE test_dictmapping_add_generated_column_with_dict_mapping; +-- result: +-- !result +USE test_dictmapping_add_generated_column_with_dict_mapping; +-- result: +-- !result +create table dict(col_1 int, col_2 string, col_3 bigint not null auto_increment, col_4 int) +primary key(col_1, col_2) +distributed by hash(col_1, col_2) +PROPERTIES ( +"replication_num" = "1" +); +-- result: +-- !result +INSERT INTO dict VALUES (1, "abc", default, 0); +-- result: +-- !result +create table t_dictmapping_add_generated_column_with_dict_mapping(col_1 int, col_2 string) +primary key(col_1) +distributed by hash(col_1) +PROPERTIES ( +"replication_num" = "1" +); +-- result: +-- !result +INSERT INTO t_dictmapping_add_generated_column_with_dict_mapping VALUES (1, "abc"); +-- result: +-- !result +ALTER TABLE t_dictmapping_add_generated_column_with_dict_mapping ADD COLUMN newcol BIGINT AS +dict_mapping("dict", col_1, col_2); +-- result: +-- !result +function: wait_alter_table_finish() +-- result: +None +-- !result +SELECT * FROM t_dictmapping_add_generated_column_with_dict_mapping; +-- result: +1 abc 1 +-- !result +DROP TABLE t_dictmapping_add_generated_column_with_dict_mapping; +-- result: +-- !result +DROP DATABASE test_dictmapping_add_generated_column_with_dict_mapping; +-- result: +-- !result +-- name: test_dictmapping_generated_column_in_create_table +CREATE DATABASE test_dictmapping_generated_column_in_create_table; +-- result: +-- !result +USE test_dictmapping_generated_column_in_create_table; +-- result: +-- !result +create table dict(col_1 int, col_2 string, col_3 bigint not null auto_increment, col_4 int) +primary key(col_1) +distributed by hash(col_1) +PROPERTIES ( +"replication_num" = "1" +); +-- result: +-- !result +create table fact_tbl_1(col_1 int, col_2 varchar(20), col_mapvalue bigint as Dict_Mapping("dict", COL_1, false)) +PROPERTIES ( +"replication_num" = "1" +); +-- result: +E: (1064, 'Getting analyzing error. Detail message: column:COL_1 does not exist.') +-- !result +DROP DATABASE test_dictmapping_generated_column_in_create_table; +-- result: +-- !result +-- name: test_dictmapping_null_if_not_found +CREATE DATABASE test_dictmapping_null_if_not_found; +-- result: +-- !result +USE test_dictmapping_null_if_not_found; +-- result: +-- !result +CREATE TABLE `t_dictmapping_null_if_not_found` ( + `k` BIGINT NOT NULL COMMENT "", + `v` BIGINT AUTO_INCREMENT +) ENGINE=OLAP +PRIMARY KEY(`k`) +DISTRIBUTED BY HASH(`k`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"in_memory" = "false", +"enable_persistent_index" = "false", +"replicated_storage" = "true", +"compression" = "LZ4" +); +-- result: +-- !result +insert into t_dictmapping_null_if_not_found values (1,default); +-- result: +-- !result +select dict_mapping("t_dictmapping_null_if_not_found", 2); +-- result: +[REGEX].*query failed if record not exist in dict table.* +-- !result +select dict_mapping("t_dictmapping_null_if_not_found", 2, false); +-- result: +[REGEX].*query failed if record not exist in dict table.* +-- !result +select dict_mapping("t_dictmapping_null_if_not_found", 2, true); +-- result: +None +-- !result +drop table t_dictmapping_null_if_not_found; +-- result: +-- !result +drop database test_dictmapping_null_if_not_found; +-- result: +-- !result diff --git a/test/sql/test_dictionary/R/test_dictionary b/test/sql/test_dictionary/R/test_dictionary new file mode 100644 index 00000000000000..5d649dbd7a67af --- /dev/null +++ b/test/sql/test_dictionary/R/test_dictionary @@ -0,0 +1,1639 @@ +-- name: test_dictionary_empty +CREATE TABLE `t_empty` ( + `id1` BIGINT NOT NULL COMMENT "", + `id2` BIGINT NOT NULL COMMENT "", + `id3` BIGINT NOT NULL COMMENT "" +) ENGINE=OLAP +PRIMARY KEY(`id1`) +DISTRIBUTED BY HASH(`id1`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "true", +"compression" = "LZ4" +); +-- result: +-- !result +[UC]DROP DICTIONARY test_dictionary_empty; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_empty does not exist.') +-- !result +CREATE DICTIONARY test_dictionary_empty USING t_empty (id1 KEY, id2 VALUE); +-- result: +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_empty", "FINISHED") +-- result: +None +-- !result +DROP DICTIONARY test_dictionary_empty; +-- result: +-- !result +DROP TABLE t_empty; +-- result: +-- !result +-- name: test_dictionary_basic_operation +CREATE TABLE `t_basic_operation` ( + `id1` BIGINT NOT NULL COMMENT "", + `id2` BIGINT NOT NULL COMMENT "", + `id3` BIGINT NOT NULL COMMENT "" +) ENGINE=OLAP +PRIMARY KEY(`id1`) +DISTRIBUTED BY HASH(`id1`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "true", +"compression" = "LZ4" +); +-- result: +-- !result +INSERT INTO t_basic_operation VALUES (1, 2, 3); +-- result: +-- !result +[UC]DROP DICTIONARY test_dictionary_basic_operation; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_basic_operation does not exist.') +-- !result +CREATE DICTIONARY test_dictionary_basic_operation USING t_basic_operation (id1 KEY, id2 VALUE); +-- result: +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_basic_operation", "FINISHED") +-- result: +None +-- !result +REFRESH DICTIONARY test_dictionary_basic_operation; +-- result: +-- !result +CANCEL REFRESH DICTIONARY test_dictionary_basic_operation; +-- result: +-- !result +REFRESH DICTIONARY test_dictionary_basic_operation; +-- result: +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_basic_operation", "FINISHED") +-- result: +None +-- !result +SELECT dictionary_get("test_dictionary_basic_operation", id1) FROM t_basic_operation; +-- result: +{"id2":2} +-- !result +SELECT dictionary_get("test_dictionary_basic_operation", 1); +-- result: +{"id2":2} +-- !result +SELECT dictionary_get("test_dictionary_basic_operation", 2); +-- result: +[REGEX].*key not found in dictionary cache.* +-- !result +DROP DICTIONARY test_dictionary_basic_operation CACHE; +-- result: +-- !result +SELECT dictionary_get("test_dictionary_basic_operation", id1) FROM t_basic_operation; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_basic_operation is in UNINITIALIZED state.') +-- !result +SELECT dictionary_get("test_dictionary_basic_operation", 1); +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_basic_operation is in UNINITIALIZED state.') +-- !result +DROP DICTIONARY test_dictionary_basic_operation; +-- result: +-- !result +DROP TABLE test_dictionary_basic_operation; +-- result: +E: (5502, "Getting analyzing error. Detail message: Unknown table 'test_dictionary_basic_operation'.") +-- !result +-- name: test_dictionary_type_combination +CREATE TABLE `t_type_combination` ( + `id1` BIGINT NOT NULL COMMENT "", + `id2` TINYINT NOT NULL COMMENT "", + `id3` INT NOT NULL COMMENT "", + `id4` BOOLEAN NOT NULL COMMENT "", + `id5` LARGEINT NOT NULL COMMENT "", + `id6` VARCHAR(2000) NOT NULL COMMENT "", + `id7` DATE NOT NULL COMMENT "", + `id8` DATETIME NOT NULL COMMENT "", + `id9` SMALLINT NOT NULL COMMENT "" +) ENGINE=OLAP +PRIMARY KEY(`id1`) +DISTRIBUTED BY HASH(`id1`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "true", +"compression" = "LZ4" +); +-- result: +-- !result +INSERT INTO t_type_combination VALUES (1, 2, 3, "true", 5, "a", "2020-01-01", "2020-01-01 00:00:00", 6); +-- result: +-- !result +[UC]DROP DICTIONARY test_dictionary_type_combination_1; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_type_combination_1 does not exist.') +-- !result +[UC]DROP DICTIONARY test_dictionary_type_combination_2; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_type_combination_2 does not exist.') +-- !result +[UC]DROP DICTIONARY test_dictionary_type_combination_3; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_type_combination_3 does not exist.') +-- !result +[UC]DROP DICTIONARY test_dictionary_type_combination_4; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_type_combination_4 does not exist.') +-- !result +[UC]DROP DICTIONARY test_dictionary_type_combination_5; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_type_combination_5 does not exist.') +-- !result +[UC]DROP DICTIONARY test_dictionary_type_combination_6; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_type_combination_6 does not exist.') +-- !result +[UC]DROP DICTIONARY test_dictionary_type_combination_7; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_type_combination_7 does not exist.') +-- !result +[UC]DROP DICTIONARY test_dictionary_type_combination_8; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_type_combination_8 does not exist.') +-- !result +[UC]DROP DICTIONARY test_dictionary_type_combination_9; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_type_combination_9 does not exist.') +-- !result +CREATE DICTIONARY test_dictionary_type_combination_1 USING t_type_combination (id1 KEY, id2 VALUE); +-- result: +-- !result +CREATE DICTIONARY test_dictionary_type_combination_2 USING t_type_combination (id2 KEY, id3 VALUE); +-- result: +-- !result +CREATE DICTIONARY test_dictionary_type_combination_3 USING t_type_combination (id3 KEY, id4 VALUE); +-- result: +-- !result +CREATE DICTIONARY test_dictionary_type_combination_4 USING t_type_combination (id4 KEY, id5 VALUE); +-- result: +-- !result +CREATE DICTIONARY test_dictionary_type_combination_5 USING t_type_combination (id5 KEY, id6 VALUE); +-- result: +-- !result +CREATE DICTIONARY test_dictionary_type_combination_6 USING t_type_combination (id6 KEY, id7 VALUE); +-- result: +-- !result +CREATE DICTIONARY test_dictionary_type_combination_7 USING t_type_combination (id7 KEY, id8 VALUE); +-- result: +-- !result +CREATE DICTIONARY test_dictionary_type_combination_8 USING t_type_combination (id8 KEY, id9 VALUE); +-- result: +-- !result +CREATE DICTIONARY test_dictionary_type_combination_9 USING t_type_combination (id9 KEY, id1 VALUE); +-- result: +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_type_combination_1", "FINISHED") +-- result: +None +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_type_combination_2", "FINISHED") +-- result: +None +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_type_combination_3", "FINISHED") +-- result: +None +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_type_combination_4", "FINISHED") +-- result: +None +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_type_combination_5", "FINISHED") +-- result: +None +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_type_combination_6", "FINISHED") +-- result: +None +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_type_combination_7", "FINISHED") +-- result: +None +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_type_combination_8", "FINISHED") +-- result: +None +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_type_combination_9", "FINISHED") +-- result: +None +-- !result +SELECT dictionary_get("test_dictionary_type_combination_1", id1) FROM t_type_combination; +-- result: +{"id2":2} +-- !result +SELECT dictionary_get("test_dictionary_type_combination_2", id2) FROM t_type_combination; +-- result: +{"id3":3} +-- !result +SELECT dictionary_get("test_dictionary_type_combination_3", id3) FROM t_type_combination; +-- result: +{"id4":1} +-- !result +SELECT dictionary_get("test_dictionary_type_combination_4", id4) FROM t_type_combination; +-- result: +{"id5":5} +-- !result +SELECT dictionary_get("test_dictionary_type_combination_5", id5) FROM t_type_combination; +-- result: +{"id6":"a"} +-- !result +SELECT dictionary_get("test_dictionary_type_combination_6", id6) FROM t_type_combination; +-- result: +{"id7":"2020-01-01"} +-- !result +SELECT dictionary_get("test_dictionary_type_combination_7", id7) FROM t_type_combination; +-- result: +{"id8":"2020-01-01 00:00:00"} +-- !result +SELECT dictionary_get("test_dictionary_type_combination_8", id8) FROM t_type_combination; +-- result: +{"id9":6} +-- !result +SELECT dictionary_get("test_dictionary_type_combination_9", id9) FROM t_type_combination; +-- result: +{"id1":1} +-- !result +DROP DICTIONARY test_dictionary_type_combination_1; +-- result: +-- !result +DROP DICTIONARY test_dictionary_type_combination_2; +-- result: +-- !result +DROP DICTIONARY test_dictionary_type_combination_3; +-- result: +-- !result +DROP DICTIONARY test_dictionary_type_combination_4; +-- result: +-- !result +DROP DICTIONARY test_dictionary_type_combination_5; +-- result: +-- !result +DROP DICTIONARY test_dictionary_type_combination_6; +-- result: +-- !result +DROP DICTIONARY test_dictionary_type_combination_7; +-- result: +-- !result +DROP DICTIONARY test_dictionary_type_combination_8; +-- result: +-- !result +DROP DICTIONARY test_dictionary_type_combination_9; +-- result: +-- !result +[UC]DROP DICTIONARY test_dictionary_type_combination_10; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_type_combination_10 does not exist.') +-- !result +[UC]DROP DICTIONARY test_dictionary_type_combination_11; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_type_combination_11 does not exist.') +-- !result +[UC]DROP DICTIONARY test_dictionary_type_combination_12; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_type_combination_12 does not exist.') +-- !result +[UC]DROP DICTIONARY test_dictionary_type_combination_13; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_type_combination_13 does not exist.') +-- !result +[UC]DROP DICTIONARY test_dictionary_type_combination_14; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_type_combination_14 does not exist.') +-- !result +[UC]DROP DICTIONARY test_dictionary_type_combination_15; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_type_combination_15 does not exist.') +-- !result +[UC]DROP DICTIONARY test_dictionary_type_combination_16; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_type_combination_16 does not exist.') +-- !result +[UC]DROP DICTIONARY test_dictionary_type_combination_17; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_type_combination_17 does not exist.') +-- !result +[UC]DROP DICTIONARY test_dictionary_type_combination_18; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_type_combination_18 does not exist.') +-- !result +CREATE DICTIONARY test_dictionary_type_combination_10 USING t_type_combination (id1 KEY, id2 KEY, id3 VALUE, id4 VALUE); +-- result: +-- !result +CREATE DICTIONARY test_dictionary_type_combination_11 USING t_type_combination (id2 KEY, id3 KEY, id4 VALUE, id5 VALUE); +-- result: +-- !result +CREATE DICTIONARY test_dictionary_type_combination_12 USING t_type_combination (id3 KEY, id4 KEY, id5 VALUE, id6 VALUE); +-- result: +-- !result +CREATE DICTIONARY test_dictionary_type_combination_13 USING t_type_combination (id4 KEY, id5 KEY, id6 VALUE, id7 VALUE); +-- result: +-- !result +CREATE DICTIONARY test_dictionary_type_combination_14 USING t_type_combination (id5 KEY, id6 KEY, id7 VALUE, id8 VALUE); +-- result: +-- !result +CREATE DICTIONARY test_dictionary_type_combination_15 USING t_type_combination (id6 KEY, id7 KEY, id8 VALUE, id9 VALUE); +-- result: +-- !result +CREATE DICTIONARY test_dictionary_type_combination_16 USING t_type_combination (id7 KEY, id8 KEY, id9 VALUE, id1 VALUE); +-- result: +-- !result +CREATE DICTIONARY test_dictionary_type_combination_17 USING t_type_combination (id8 KEY, id9 KEY, id1 VALUE, id2 VALUE); +-- result: +-- !result +CREATE DICTIONARY test_dictionary_type_combination_18 USING t_type_combination (id9 KEY, id1 KEY, id2 VALUE, id3 VALUE); +-- result: +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_type_combination_10", "FINISHED") +-- result: +None +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_type_combination_11", "FINISHED") +-- result: +None +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_type_combination_12", "FINISHED") +-- result: +None +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_type_combination_13", "FINISHED") +-- result: +None +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_type_combination_14", "FINISHED") +-- result: +None +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_type_combination_15", "FINISHED") +-- result: +None +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_type_combination_16", "FINISHED") +-- result: +None +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_type_combination_17", "FINISHED") +-- result: +None +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_type_combination_18", "FINISHED") +-- result: +None +-- !result +SELECT dictionary_get("test_dictionary_type_combination_10", id1, id2) FROM t_type_combination; +-- result: +{"id3":3,"id4":1} +-- !result +SELECT dictionary_get("test_dictionary_type_combination_11", id2, id3) FROM t_type_combination; +-- result: +{"id4":1,"id5":5} +-- !result +SELECT dictionary_get("test_dictionary_type_combination_12", id3, id4) FROM t_type_combination; +-- result: +{"id5":5,"id6":"a"} +-- !result +SELECT dictionary_get("test_dictionary_type_combination_13", id4, id5) FROM t_type_combination; +-- result: +{"id6":"a","id7":"2020-01-01"} +-- !result +SELECT dictionary_get("test_dictionary_type_combination_14", id5, id6) FROM t_type_combination; +-- result: +{"id7":"2020-01-01","id8":"2020-01-01 00:00:00"} +-- !result +SELECT dictionary_get("test_dictionary_type_combination_15", id6, id7) FROM t_type_combination; +-- result: +{"id8":"2020-01-01 00:00:00","id9":6} +-- !result +SELECT dictionary_get("test_dictionary_type_combination_16", id7, id8) FROM t_type_combination; +-- result: +{"id9":6,"id1":1} +-- !result +SELECT dictionary_get("test_dictionary_type_combination_17", id8, id9) FROM t_type_combination; +-- result: +{"id1":1,"id2":2} +-- !result +SELECT dictionary_get("test_dictionary_type_combination_18", id9, id1) FROM t_type_combination; +-- result: +{"id2":2,"id3":3} +-- !result +DROP DICTIONARY test_dictionary_type_combination_10; +-- result: +-- !result +DROP DICTIONARY test_dictionary_type_combination_11; +-- result: +-- !result +DROP DICTIONARY test_dictionary_type_combination_12; +-- result: +-- !result +DROP DICTIONARY test_dictionary_type_combination_13; +-- result: +-- !result +DROP DICTIONARY test_dictionary_type_combination_14; +-- result: +-- !result +DROP DICTIONARY test_dictionary_type_combination_15; +-- result: +-- !result +DROP DICTIONARY test_dictionary_type_combination_16; +-- result: +-- !result +DROP DICTIONARY test_dictionary_type_combination_17; +-- result: +-- !result +DROP DICTIONARY test_dictionary_type_combination_18; +-- result: +-- !result +DROP TABLE t_type_combination; +-- result: +-- !result +-- name: test_dictionary_definition +CREATE TABLE `t_dictionary_definition` ( + `id1` BIGINT NOT NULL COMMENT "", + `id2` STRING NOT NULL COMMENT "", + `id3` DATE NULL COMMENT "", + `id4` DATETIME NOT NULL COMMENT "", + `id5` STRING NULL COMMENT "", + `id6` DATE NULL COMMENT "", + `id7` DATETIME NOT NULL COMMENT "" +) ENGINE=OLAP +PRIMARY KEY(`id1`) +DISTRIBUTED BY HASH(`id1`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "true", +"compression" = "LZ4" +); +-- result: +-- !result +INSERT INTO t_dictionary_definition VALUES (1, "a", "2020-01-01", "2020-01-01 00:00:00", "a", "2020-01-01", "2020-01-01 00:00:00"); +-- result: +-- !result +[UC]DROP DICTIONARY test_dictionary_definition_1; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_definition_1 does not exist.') +-- !result +[UC]DROP DICTIONARY test_dictionary_definition_2; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_definition_2 does not exist.') +-- !result +[UC]DROP DICTIONARY test_dictionary_definition_3; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_definition_3 does not exist.') +-- !result +CREATE DICTIONARY test_dictionary_definition_1 USING t_dictionary_definition (id1 KEY, id2 KEY, id3 KEY, id4 KEY, id5 VALUE, id6 VALUE, id7 VALUE); +-- result: +-- !result +CREATE DICTIONARY test_dictionary_definition_2 USING t_dictionary_definition (id1 KEY, id6 KEY, id3 KEY, id4 KEY, id5 VALUE, id2 VALUE, id7 VALUE); +-- result: +-- !result +CREATE DICTIONARY test_dictionary_definition_3 USING t_dictionary_definition (id7 KEY, id6 KEY, id3 KEY, id4 KEY, id5 VALUE, id2 VALUE, id1 VALUE); +-- result: +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_definition_1", "FINISHED") +-- result: +None +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_definition_2", "FINISHED") +-- result: +None +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_definition_3", "FINISHED") +-- result: +None +-- !result +SELECT dictionary_get("test_dictionary_definition_1", id1, id2, id3, id4) FROM t_dictionary_definition; +-- result: +{"id5":"a","id6":"2020-01-01","id7":"2020-01-01 00:00:00"} +-- !result +SELECT dictionary_get("test_dictionary_definition_2", id1, id6, id3, id4) FROM t_dictionary_definition; +-- result: +{"id5":"a","id2":"a","id7":"2020-01-01 00:00:00"} +-- !result +SELECT dictionary_get("test_dictionary_definition_3", id7, id6, id3, id4) FROM t_dictionary_definition; +-- result: +{"id5":"a","id2":"a","id1":1} +-- !result +DROP DICTIONARY test_dictionary_definition_1; +-- result: +-- !result +DROP DICTIONARY test_dictionary_definition_2; +-- result: +-- !result +DROP DICTIONARY test_dictionary_definition_3; +-- result: +-- !result +INSERT INTO t_dictionary_definition VALUES (1, "a", NULL, "2020-01-01 00:00:00", NULL, NULL, "2020-01-01 00:00:00"); +-- result: +-- !result +[UC]DROP DICTIONARY test_dictionary_definition_4; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_definition_4 does not exist.') +-- !result +[UC]DROP DICTIONARY test_dictionary_definition_5; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_definition_5 does not exist.') +-- !result +[UC]DROP DICTIONARY test_dictionary_definition_6; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_definition_6 does not exist.') +-- !result +CREATE DICTIONARY test_dictionary_definition_4 USING t_dictionary_definition (id1 KEY, id2 KEY, id3 KEY, id4 KEY, id5 VALUE, id6 VALUE, id7 VALUE); +-- result: +-- !result +CREATE DICTIONARY test_dictionary_definition_5 USING t_dictionary_definition (id1 KEY, id6 KEY, id3 KEY, id4 KEY, id5 VALUE, id2 VALUE, id7 VALUE); +-- result: +-- !result +CREATE DICTIONARY test_dictionary_definition_6 USING t_dictionary_definition (id7 KEY, id6 KEY, id3 KEY, id4 KEY, id5 VALUE, id2 VALUE, id1 VALUE); +-- result: +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_definition_4", "CANCELLED") +-- result: +None +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_definition_5", "CANCELLED") +-- result: +None +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_definition_6", "CANCELLED") +-- result: +None +-- !result +SELECT dictionary_get("test_dictionary_definition_4", id1, id2, id3, id4) FROM t_dictionary_definition; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_definition_4 is in CANCELLED state.') +-- !result +SELECT dictionary_get("test_dictionary_definition_5", id1, id6, id3, id4) FROM t_dictionary_definition; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_definition_5 is in CANCELLED state.') +-- !result +SELECT dictionary_get("test_dictionary_definition_6", id7, id6, id3, id4) FROM t_dictionary_definition; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_definition_6 is in CANCELLED state.') +-- !result +DROP DICTIONARY test_dictionary_definition_4; +-- result: +-- !result +DROP DICTIONARY test_dictionary_definition_5; +-- result: +-- !result +DROP DICTIONARY test_dictionary_definition_6; +-- result: +-- !result +[UC]DROP DICTIONARY test_dictionary_definition_7; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_definition_7 does not exist.') +-- !result +TRUNCATE TABLE t_dictionary_definition; +-- result: +-- !result +INSERT INTO t_dictionary_definition VALUES (1, "a", "2020-01-01", "2020-01-01 00:00:00", "a", "2020-01-01", "2020-01-01 00:00:00"); +-- result: +-- !result +CREATE DICTIONARY test_dictionary_definition_7 USING t_dictionary_definition (id1 KEY, id4 KEY, id5 VALUE, id2 KEY, id3 KEY, id6 VALUE, id7 VALUE); +-- result: +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_definition_7", "FINISHED") +-- result: +None +-- !result +SELECT dictionary_get("test_dictionary_definition_7", id1, id4, id2, id3) FROM t_dictionary_definition; +-- result: +{"id5":"a","id6":"2020-01-01","id7":"2020-01-01 00:00:00"} +-- !result +DROP DICTIONARY test_dictionary_definition_7; +-- result: +-- !result +DROP TABLE t_dictionary_definition; +-- result: +-- !result +-- name: test_dictionary_error @slow +CREATE TABLE `t_dictionary_error` ( + `id1` BIGINT NOT NULL COMMENT "", + `id2` BIGINT NOT NULL COMMENT "", + `id3` BIGINT NOT NULL COMMENT "" +) ENGINE=OLAP +PRIMARY KEY(`id1`) +DISTRIBUTED BY HASH(`id1`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "true", +"compression" = "LZ4" +); +-- result: +-- !result +INSERT INTO t_dictionary_error VALUES (1, 2, 3); +-- result: +-- !result +[UC]DROP DICTIONARY test_dictionary_error_1; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_error_1 does not exist.') +-- !result +CREATE DICTIONARY test_dictionary_error_1 USING t_dictionary_error (id1 KEY, id2 VALUE); +-- result: +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_error_1", "FINISHED") +-- result: +None +-- !result +CREATE DICTIONARY test_dictionary_error_1 USING t_dictionary_error (id1 KEY, id2 VALUE); +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_error_1 is exist.') +-- !result +DROP DICTIONARY test_dictionary_error_1; +-- result: +-- !result +[UC]DROP DICTIONARY test_dictionary_error_2; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_error_2 does not exist.') +-- !result +CREATE DICTIONARY test_dictionary_error_2 USING t_dictionary_error (id1 KEY, id1 VALUE); +-- result: +E: (1064, 'Getting analyzing error. Detail message: column: id1 is both in keys and values.') +-- !result +CREATE DICTIONARY test_dictionary_error_2 USING t_dictionary_error (id1 KEY, id2 KEY); +-- result: +E: (1064, 'Getting analyzing error. Detail message: empty value list for dictionary: test_dictionary_error_2.') +-- !result +CREATE DICTIONARY test_dictionary_error_2 USING t_dictionary_error (id1 VALUE, id2 VALUE); +-- result: +E: (1064, 'Getting analyzing error. Detail message: empty key list for dictionary: test_dictionary_error_2.') +-- !result +CREATE DICTIONARY test_dictionary_error_2 USING t_dictionary_error (id1 KEY, id2 VALUE) +PROPERTIES("dictionary_warm_up" = "abc", "dictionary_memory_limit" = "1024", "dictionary_refresh_interval" = "3600"); +-- result: +E: (1064, 'parse dictionary_warm_up failed, given parameter: abc') +-- !result +CREATE DICTIONARY test_dictionary_error_2 USING t_dictionary_error (id1 KEY, id2 VALUE) +PROPERTIES("dictionary_warm_up" = "TRUE", "dictionary_memory_limit" = "abc", "dictionary_refresh_interval" = "3600"); +-- result: +E: (1064, 'parse dictionary_memory_limit failed, given parameter: abc') +-- !result +CREATE DICTIONARY test_dictionary_error_2 USING t_dictionary_error (id1 KEY, id2 VALUE) +PROPERTIES("dictionary_warm_up" = "TRUE", "dictionary_memory_limit" = "1024", "dictionary_refresh_interval" = "abc"); +-- result: +E: (1064, 'parse dictionary_refresh_interval failed, given parameter: abc') +-- !result +CREATE DICTIONARY test_dictionary_error_2 USING t_dictionary_error (id1 KEY, id2 VALUE) +PROPERTIES("abc" = "bcd", "dictionary_memory_limit" = "1024", "dictionary_refresh_interval" = "abc"); +-- result: +E: (1064, 'unknown property for dictionary: abc') +-- !result +CREATE DICTIONARY test_dictionary_error_2 USING t_dictionary_error (id1 KEY, id2 VALUE) +PROPERTIES("dictionary_warm_up" = "abc", "abc" = "bcd", "dictionary_refresh_interval" = "3600"); +-- result: +E: (1064, 'unknown property for dictionary: abc') +-- !result +CREATE DICTIONARY test_dictionary_error_2 USING t_dictionary_error (id1 KEY, id2 VALUE) +PROPERTIES("dictionary_warm_up" = "abc", "dictionary_memory_limit" = "1024", "abc" = "bcd"); +-- result: +E: (1064, 'unknown property for dictionary: abc') +-- !result +CREATE DICTIONARY test_dictionary_error_2 USING t_dictionary_error (id1 KEY, id2 VALUE) +PROPERTIES("dicTionary_warM_up" = "tRue", "dictionaRy_memory_liMit" = "1024", "dictiOnary_reFResh_inTerval" = "3600"); +-- result: +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_error_2", "FINISHED") +-- result: +None +-- !result +DROP DICTIONARY test_dictionary_error_2; +-- result: +-- !result +[UC]DROP DICTIONARY test_dictionary_error_3; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_error_3 does not exist.') +-- !result +CREATE DICTIONARY test_dictionary_error_3 USING t_dictionary_error (id1 KEY, id2 VALUE) +PROPERTIES("dictionaRy_memory_liMit" = "1024"); +-- result: +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_error_3", "FINISHED") +-- result: +None +-- !result +DROP DICTIONARY test_dictionary_error_3; +-- result: +-- !result +CREATE DICTIONARY test_dictionary_error_3 USING t_dictionary_error (id1 KEY, id2 VALUE) +PROPERTIES("dictionaRy_memory_liMit" = "1024Mb"); +-- result: +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_error_3", "FINISHED") +-- result: +None +-- !result +DROP DICTIONARY test_dictionary_error_3; +-- result: +-- !result +CREATE DICTIONARY test_dictionary_error_3 USING t_dictionary_error (id1 KEY, id2 VALUE) +PROPERTIES("dictionaRy_memory_liMit" = "1024mB"); +-- result: +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_error_3", "FINISHED") +-- result: +None +-- !result +DROP DICTIONARY test_dictionary_error_3; +-- result: +-- !result +CREATE DICTIONARY test_dictionary_error_3 USING t_dictionary_error (id1 KEY, id2 VALUE) +PROPERTIES("dictionaRy_memory_liMit" = "1024Gb"); +-- result: +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_error_3", "FINISHED") +-- result: +None +-- !result +DROP DICTIONARY test_dictionary_error_3; +-- result: +-- !result +CREATE DICTIONARY test_dictionary_error_3 USING t_dictionary_error (id1 KEY, id2 VALUE) +PROPERTIES("dictionaRy_memory_liMit" = "1024GB"); +-- result: +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_error_3", "FINISHED") +-- result: +None +-- !result +DROP DICTIONARY test_dictionary_error_3; +-- result: +-- !result +CREATE DICTIONARY test_dictionary_error_3 USING t_dictionary_error (id1 KEY, id2 VALUE) +PROPERTIES("dictionaRy_memory_liMit" = "1024b"); +-- result: +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_error_3", "FINISHED") +-- result: +None +-- !result +DROP DICTIONARY test_dictionary_error_3; +-- result: +-- !result +CREATE DICTIONARY test_dictionary_error_3 USING t_dictionary_error (id1 KEY, id2 VALUE) +PROPERTIES("dictionaRy_memory_liMit" = "1024B"); +-- result: +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_error_3", "FINISHED") +-- result: +None +-- !result +DROP DICTIONARY test_dictionary_error_3; +-- result: +-- !result +CREATE DICTIONARY test_dictionary_error_3 USING t_dictionary_error (id1 KEY, id2 VALUE) +PROPERTIES("dictionaRy_memory_liMit" = "1024kB"); +-- result: +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_error_3", "FINISHED") +-- result: +None +-- !result +DROP DICTIONARY test_dictionary_error_3; +-- result: +-- !result +CREATE DICTIONARY test_dictionary_error_3 USING t_dictionary_error (id1 KEY, id2 VALUE) +PROPERTIES("dictionaRy_memory_liMit" = "1024KB"); +-- result: +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_error_3", "FINISHED") +-- result: +None +-- !result +DROP DICTIONARY test_dictionary_error_3; +-- result: +-- !result +CREATE DICTIONARY test_dictionary_error_3 USING t_dictionary_error (id1 KEY, id2 VALUE) +PROPERTIES("dictionaRy_memory_liMit" = "1024G"); +-- result: +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_error_3", "FINISHED") +-- result: +None +-- !result +DROP DICTIONARY test_dictionary_error_3; +-- result: +-- !result +CREATE DICTIONARY test_dictionary_error_3 USING t_dictionary_error (id1 KEY, id2 VALUE) +PROPERTIES("dictionaRy_memory_liMit" = "1024M"); +-- result: +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_error_3", "FINISHED") +-- result: +None +-- !result +DROP DICTIONARY test_dictionary_error_3; +-- result: +-- !result +CREATE DICTIONARY test_dictionary_error_3 USING t_dictionary_error (id1 KEY, id2 VALUE) +PROPERTIES("dictionaRy_memory_liMit" = "1024g"); +-- result: +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_error_3", "FINISHED") +-- result: +None +-- !result +DROP DICTIONARY test_dictionary_error_3; +-- result: +-- !result +CREATE DICTIONARY test_dictionary_error_3 USING t_dictionary_error (id1 KEY, id2 VALUE) +PROPERTIES("dictionaRy_memory_liMit" = "KB"); +-- result: +E: (1064, 'parse dictionary_memory_limit failed, given parameter: KB') +-- !result +CREATE DICTIONARY test_dictionary_error_3 USING t_dictionary_error (id1 KEY, id2 VALUE) +PROPERTIES("dictionaRy_memory_liMit" = "MB"); +-- result: +E: (1064, 'parse dictionary_memory_limit failed, given parameter: MB') +-- !result +CREATE DICTIONARY test_dictionary_error_3 USING t_dictionary_error (id1 KEY, id2 VALUE) +PROPERTIES("dictionaRy_memory_liMit" = "GB"); +-- result: +E: (1064, 'parse dictionary_memory_limit failed, given parameter: GB') +-- !result +CREATE DICTIONARY test_dictionary_error_3 USING t_dictionary_error (id1 KEY, id2 VALUE) +PROPERTIES("dictionaRy_memory_liMit" = "B"); +-- result: +E: (1064, 'parse dictionary_memory_limit failed, given parameter: B') +-- !result +CREATE DICTIONARY test_dictionary_error_3 USING t_dictionary_error (id1 KEY, id2 VALUE) +PROPERTIES("dictionaRy_memory_liMit" = "G"); +-- result: +E: (1064, 'parse dictionary_memory_limit failed, given parameter: G') +-- !result +CREATE DICTIONARY test_dictionary_error_3 USING t_dictionary_error (id1 KEY, id2 VALUE) +PROPERTIES("dictionaRy_memory_liMit" = "m"); +-- result: +E: (1064, 'parse dictionary_memory_limit failed, given parameter: m') +-- !result +CREATE DICTIONARY test_dictionary_error_3 USING t_dictionary_error (id1 KEY, id2 VALUE) +PROPERTIES("dictionaRy_memory_liMit" = "K"); +-- result: +E: (1064, 'parse dictionary_memory_limit failed, given parameter: K') +-- !result +CREATE DICTIONARY test_dictionary_error_3 USING t_dictionary_error (id1 KEY, id2 VALUE) +PROPERTIES("dictionaRy_memory_liMit" = "asdK"); +-- result: +E: (1064, 'parse dictionary_memory_limit failed, given parameter: asdK') +-- !result +CREATE DICTIONARY test_dictionary_error_3 USING t_dictionary_error (id1 KEY, id2 VALUE) +PROPERTIES("dictionaRy_memory_liMit" = "GBKB"); +-- result: +E: (1064, 'parse dictionary_memory_limit failed, given parameter: GBKB') +-- !result +CREATE DICTIONARY test_dictionary_error_3 USING t_dictionary_error (id1 KEY, id2 VALUE) +PROPERTIES("dictionaRy_memory_liMit" = "123GBKB"); +-- result: +E: (1064, 'parse dictionary_memory_limit failed, given parameter: 123GBKB') +-- !result +CREATE DICTIONARY test_dictionary_error_3 USING t_dictionary_error (id1 KEY, id2 VALUE) +PROPERTIES("dictionaRy_memory_liMit" = ""); +-- result: +E: (1064, 'parse dictionary_memory_limit failed, given parameter: ') +-- !result +DROP TABLE t_dictionary_error; +-- result: +-- !result +-- name: test_dictionary_source +CREATE TABLE `t_dictionary_source_1` ( + `id1` BIGINT NOT NULL COMMENT "", + `id2` BIGINT NOT NULL COMMENT "", + `id3` BIGINT NOT NULL COMMENT "" +) ENGINE=OLAP +PRIMARY KEY(`id1`) +DISTRIBUTED BY HASH(`id1`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "true", +"compression" = "LZ4" +); +-- result: +-- !result +CREATE TABLE `t_dictionary_source_2` ( + `id4` BIGINT NOT NULL COMMENT "", + `id5` BIGINT NOT NULL COMMENT "", + `id6` BIGINT NOT NULL COMMENT "" +) ENGINE=OLAP +PRIMARY KEY(`id4`) +DISTRIBUTED BY HASH(`id4`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "true", +"compression" = "LZ4" +); +-- result: +-- !result +INSERT INTO t_dictionary_source_1 VALUES (1, 2, 3); +-- result: +-- !result +INSERT INTO t_dictionary_source_2 VALUES (4, 5, 6); +-- result: +-- !result +DROP VIEW IF EXISTS test_dictionary_source_view; +-- result: +-- !result +CREATE VIEW test_dictionary_source_view AS +SELECT t_dictionary_source_1.id1, t_dictionary_source_1.id2, t_dictionary_source_1.id3, +t_dictionary_source_2.id4, t_dictionary_source_2.id5, t_dictionary_source_2.id6 FROM t_dictionary_source_1, t_dictionary_source_2; +-- result: +-- !result +SELECT * FROM test_dictionary_source_view; +-- result: +1 2 3 4 5 6 +-- !result +[UC]DROP DICTIONARY test_dictionary_source_1; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_source_1 does not exist.') +-- !result +CREATE DICTIONARY test_dictionary_source_1 USING test_dictionary_source_view (id1 KEY, id2 KEY, id3 KEY, id4 VALUE, id5 VALUE, id6 VALUE); +-- result: +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_source_1", "FINISHED") +-- result: +None +-- !result +SELECT dictionary_get("test_dictionary_source_1", id1, id2, id3) FROM t_dictionary_source_1; +-- result: +{"id4":4,"id5":5,"id6":6} +-- !result +DROP DICTIONARY test_dictionary_source_1; +-- result: +-- !result +DROP VIEW test_dictionary_source_view; +-- result: +-- !result +DROP MATERIALIZED VIEW IF EXISTS test_dictionary_source_mv; +-- result: +-- !result +CREATE MATERIALIZED VIEW test_dictionary_source_mv +DISTRIBUTED BY HASH(`id1`) +REFRESH ASYNC +AS SELECT + t_dictionary_source_1.id1, t_dictionary_source_1.id2, t_dictionary_source_1.id3, + t_dictionary_source_2.id4, t_dictionary_source_2.id5, t_dictionary_source_2.id6 +FROM t_dictionary_source_1, t_dictionary_source_2; +-- result: +-- !result +[UC]REFRESH MATERIALIZED VIEW test_dictionary_source_mv WITH SYNC MODE; +SELECT * FROM test_dictionary_source_mv; +-- result: +1 2 3 4 5 6 +-- !result +[UC]DROP DICTIONARY test_dictionary_source_2; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_source_2 does not exist.') +-- !result +CREATE DICTIONARY test_dictionary_source_2 USING test_dictionary_source_mv (id1 KEY, id2 KEY, id3 KEY, id4 VALUE, id5 VALUE, id6 VALUE); +-- result: +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_source_2", "FINISHED") +-- result: +None +-- !result +SELECT dictionary_get("test_dictionary_source_2", id1, id2, id3) FROM t_dictionary_source_1; +-- result: +{"id4":4,"id5":5,"id6":6} +-- !result +DROP DICTIONARY test_dictionary_source_2; +-- result: +-- !result +DROP MATERIALIZED VIEW test_dictionary_source_mv; +-- result: +-- !result +DROP TABLE t_dictionary_source_1; +-- result: +-- !result +DROP TABLE t_dictionary_source_2; +-- result: +-- !result +-- name: test_dictionary_insert +CREATE TABLE `t_dictionary_insert_1` ( + `id1` BIGINT NOT NULL COMMENT "", + `id2` BIGINT NOT NULL COMMENT "" +) ENGINE=OLAP +PRIMARY KEY(`id1`) +DISTRIBUTED BY HASH(`id1`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "true", +"compression" = "LZ4" +); +-- result: +-- !result +CREATE TABLE `t_dictionary_insert_2` ( + `id3` BIGINT NOT NULL COMMENT "", + `id4` BIGINT NOT NULL COMMENT "" +) ENGINE=OLAP +PRIMARY KEY(`id3`) +DISTRIBUTED BY HASH(`id3`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "true", +"compression" = "LZ4" +); +-- result: +-- !result +INSERT INTO t_dictionary_insert_1 VALUES (1, 2); +-- result: +-- !result +[UC]DROP DICTIONARY test_dictionary_insert; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_insert does not exist.') +-- !result +CREATE DICTIONARY test_dictionary_insert USING t_dictionary_insert_1 (id1 KEY, id2 VALUE); +-- result: +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_insert", "FINISHED") +-- result: +None +-- !result +INSERT INTO t_dictionary_insert_2 VALUES (1, dictionary_get("test_dictionary_insert", 1)[1]); +-- result: +-- !result +SELECT * FROM t_dictionary_insert_2; +-- result: +1 2 +-- !result +DROP DICTIONARY test_dictionary_insert; +-- result: +-- !result +DROP TABLE t_dictionary_insert_1; +-- result: +-- !result +DROP TABLE t_dictionary_insert_2; +-- result: +-- !result +-- name: test_dictionary_generated_column_create_table +CREATE TABLE `t_dictionary_generated_column_create_table_1` ( + `id1` BIGINT NOT NULL COMMENT "", + `id2` BIGINT NOT NULL COMMENT "" +) ENGINE=OLAP +PRIMARY KEY(`id1`) +DISTRIBUTED BY HASH(`id1`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "true", +"compression" = "LZ4" +); +-- result: +-- !result +[UC]DROP DICTIONARY test_dictionary_generated_column_create_table; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_generated_column_create_table does not exist.') +-- !result +CREATE DICTIONARY test_dictionary_generated_column_create_table USING t_dictionary_generated_column_create_table_1 (id1 KEY, id2 VALUE) PROPERTIES("dictionary_warm_up" = "False"); +-- result: +-- !result +CREATE TABLE `t_dictionary_generated_column_create_table_2` ( + `id1` BIGINT NOT NULL COMMENT "", + `id2` BIGINT NOT NULL COMMENT "", + `id3` BIGINT AS dictionary_get("test_dictionary_generated_column_create_table", id1)[1] +) ENGINE=OLAP +PRIMARY KEY(`id1`) +DISTRIBUTED BY HASH(`id1`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "true", +"compression" = "LZ4" +); +-- result: +-- !result +DROP DICTIONARY test_dictionary_generated_column_create_table; +-- result: +-- !result +DROP TABLE t_dictionary_generated_column_create_table_1; +-- result: +-- !result +DROP TABLE t_dictionary_generated_column_create_table_2; +-- result: +-- !result +-- name: test_dictionary_common_expression +CREATE TABLE `t_dictionary_common_expression` ( + `id1` BIGINT NOT NULL COMMENT "", + `id2` BIGINT NOT NULL COMMENT "", + `id3` BIGINT NOT NULL COMMENT "", + `id4` BIGINT NOT NULL COMMENT "" +) ENGINE=OLAP +PRIMARY KEY(`id1`) +DISTRIBUTED BY HASH(`id1`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "true", +"compression" = "LZ4" +); +-- result: +-- !result +INSERT INTO t_dictionary_common_expression VALUES (1,2,3,4); +-- result: +-- !result +[UC]DROP DICTIONARY test_dictionary_common_expression; +-- result: +-- !result +CREATE DICTIONARY test_dictionary_common_expression USING t_dictionary_common_expression (id1 KEY, id2 VALUE, id3 VALUE, id4 VALUE); +-- result: +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_common_expression", "FINISHED") +-- result: +None +-- !result +[UC]explain select dictionary_get("test_dictionary_common_expression", id1)[1],dictionary_get("test_dictionary_common_expression", id1)[2],dictionary_get("test_dictionary_common_expression", id1)[3] FROM t_dictionary_common_expression; +-- result: +[REGEX]*common* +-- !result +SELECT dictionary_get("test_dictionary_common_expression", id1)[1],dictionary_get("test_dictionary_common_expression", id1)[2],dictionary_get("test_dictionary_common_expression", id1)[3] FROM t_dictionary_common_expression; +-- result: +2 3 4 +-- !result +DROP DICTIONARY test_dictionary_common_expression; +-- result: +-- !result +DROP TABLE t_dictionary_common_expression; +-- result: +-- !result +-- name: test_dictionary_multiple_row +CREATE TABLE `t_dictionary_multiple_row` ( + `id1` BIGINT NOT NULL COMMENT "", + `id2` BIGINT NOT NULL COMMENT "", + `id3` BIGINT NOT NULL COMMENT "", + `id4` BIGINT NOT NULL COMMENT "" +) ENGINE=OLAP +PRIMARY KEY(`id1`) +DISTRIBUTED BY HASH(`id1`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "true", +"compression" = "LZ4" +); +-- result: +-- !result +CREATE TABLE `t_dictionary_multiple_row_fact` ( + `id1` BIGINT NOT NULL COMMENT "", + `id2` BIGINT NOT NULL COMMENT "", + `id3` BIGINT NOT NULL COMMENT "", + `id4` BIGINT NOT NULL COMMENT "" +) ENGINE=OLAP +PRIMARY KEY(`id1`) +DISTRIBUTED BY HASH(`id1`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "true", +"compression" = "LZ4" +); +-- result: +-- !result +insert into t_dictionary_multiple_row select generate_series, generate_series, generate_series, generate_series from Table(generate_series(1, 20001)); +-- result: +-- !result +[UC]DROP DICTIONARY test_dictionary_multiple_row_multi_value; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_multiple_row_multi_value does not exist.') +-- !result +CREATE DICTIONARY test_dictionary_multiple_row_multi_value USING t_dictionary_multiple_row (id1 KEY, id2 VALUE, id3 VALUE, id4 VALUE); +-- result: +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_multiple_row_multi_value", "FINISHED") +-- result: +None +-- !result +[UC]DROP DICTIONARY test_dictionary_multiple_row_single_value; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_multiple_row_single_value does not exist.') +-- !result +CREATE DICTIONARY test_dictionary_multiple_row_single_value USING t_dictionary_multiple_row (id1 KEY, id2 VALUE); +-- result: +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_multiple_row_single_value", "FINISHED") +-- result: +None +-- !result +insert into t_dictionary_multiple_row_fact SELECT id1, dictionary_get("test_dictionary_multiple_row_multi_value", id1)[1], dictionary_get("test_dictionary_multiple_row_multi_value", id1)[2], dictionary_get("test_dictionary_multiple_row_multi_value", id1)[3] FROM t_dictionary_multiple_row; +-- result: +-- !result +insert into t_dictionary_multiple_row_fact SELECT id1, dictionary_get("test_dictionary_multiple_row_single_value", id1)[1], 1, 1 FROM t_dictionary_multiple_row; +-- result: +-- !result +DROP TABLE t_dictionary_multiple_row; +-- result: +-- !result +DROP TABLE t_dictionary_multiple_row_fact; +-- result: +-- !result +DROP DICTIONARY test_dictionary_multiple_row_multi_value; +-- result: +-- !result +DROP DICTIONARY test_dictionary_multiple_row_single_value; +-- result: +-- !result +-- name: test_dictionary_show_create_table_gen_col +CREATE TABLE `t_dictionary_show_create_table_gen_col_1` ( + `k` BIGINT NOT NULL COMMENT "", + `v` BIGINT NOT NULL COMMENT "" +) ENGINE=OLAP +PRIMARY KEY(`k`) +DISTRIBUTED BY HASH(`k`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "true", +"compression" = "LZ4" +); +-- result: +-- !result +INSERT INTO t_dictionary_show_create_table_gen_col_1 VALUES (1,1); +-- result: +-- !result +[UC]DROP DICTIONARY test_dictionary_show_create_table_gen_col; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_show_create_table_gen_col does not exist.') +-- !result +CREATE DICTIONARY test_dictionary_show_create_table_gen_col USING t_dictionary_show_create_table_gen_col_1 (k KEY, v VALUE); +-- result: +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_show_create_table_gen_col", "FINISHED") +-- result: +None +-- !result +CREATE TABLE `t_dictionary_show_create_table_gen_col_2` ( + `k` BIGINT NOT NULL COMMENT "", + `v` BIGINT AS dictionary_get("test_dictionary_show_create_table_gen_col", k)[1] COMMENT "" +) ENGINE=OLAP +PRIMARY KEY(`k`) +DISTRIBUTED BY HASH(`k`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "true", +"compression" = "LZ4" +); +-- result: +-- !result +INSERT INTO t_dictionary_show_create_table_gen_col_2 VALUES (1); +-- result: +-- !result +SELECT * FROM t_dictionary_show_create_table_gen_col_2; +-- result: +1 1 +-- !result +SHOW CREATE TABLE t_dictionary_show_create_table_gen_col_2; +-- result: +t_dictionary_show_create_table_gen_col_2 CREATE TABLE `t_dictionary_show_create_table_gen_col_2` ( + `k` bigint(20) NOT NULL COMMENT "", + `v` bigint(20) NULL AS DICTIONARY_GET('test_dictionary_show_create_table_gen_col', k, false)[1] COMMENT "" +) ENGINE=OLAP +PRIMARY KEY(`k`) +DISTRIBUTED BY HASH(`k`) BUCKETS 1 +PROPERTIES ( +"compression" = "LZ4", +"enable_persistent_index" = "false", +"fast_schema_evolution" = "true", +"replicated_storage" = "true", +"replication_num" = "1" +); +-- !result +DROP TABLE t_dictionary_show_create_table_gen_col_1; +-- result: +-- !result +DROP TABLE t_dictionary_show_create_table_gen_col_2; +-- result: +-- !result +DROP DICTIONARY test_dictionary_show_create_table_gen_col; +-- result: +-- !result +-- name: test_dictionary_null_if_not_exist +CREATE TABLE `t_dictionary_null_if_not_exist` ( + `k` BIGINT NOT NULL COMMENT "", + `v` BIGINT NOT NULL COMMENT "" +) ENGINE=OLAP +PRIMARY KEY(`k`) +DISTRIBUTED BY HASH(`k`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "true", +"compression" = "LZ4" +); +-- result: +-- !result +INSERT INTO t_dictionary_null_if_not_exist VALUES (1,1),(3,3),(5,5); +-- result: +-- !result +[UC]DROP DICTIONARY test_dictionary_null_if_not_exist; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_null_if_not_exist does not exist.') +-- !result +CREATE DICTIONARY test_dictionary_null_if_not_exist USING t_dictionary_null_if_not_exist (k KEY, v VALUE); +-- result: +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_null_if_not_exist", "FINISHED") +-- result: +None +-- !result +INSERT INTO t_dictionary_null_if_not_exist VALUES (2,2),(4,4); +-- result: +-- !result +SELECT k, dictionary_get("test_dictionary_null_if_not_exist", k) FROM t_dictionary_null_if_not_exist ORDER BY k; +-- result: +[REGEX].*key not found in dictionary cache.* +-- !result +SELECT k, dictionary_get("test_dictionary_null_if_not_exist", k, false) FROM t_dictionary_null_if_not_exist ORDER BY k; +-- result: +[REGEX].*key not found in dictionary cache.* +-- !result +SELECT k, dictionary_get("test_dictionary_null_if_not_exist", k, true) FROM t_dictionary_null_if_not_exist ORDER BY k; +-- result: +1 {"v":1} +2 None +3 {"v":3} +4 None +5 {"v":5} +-- !result +SELECT k, dictionary_get("test_dictionary_null_if_not_exist", k, tRuE) FROM t_dictionary_null_if_not_exist ORDER BY k; +-- result: +1 {"v":1} +2 None +3 {"v":3} +4 None +5 {"v":5} +-- !result +SELECT k, dictionary_get("test_dictionary_null_if_not_exist", k, FaLsE) FROM t_dictionary_null_if_not_exist ORDER BY k; +-- result: +[REGEX].*key not found in dictionary cache.* +-- !result +SELECT k, dictionary_get("test_dictionary_null_if_not_exist", k, xxxx) FROM t_dictionary_null_if_not_exist ORDER BY k; +-- result: +E: (1064, "Getting analyzing error. Detail message: Column 'xxxx' cannot be resolved.") +-- !result +SELECT k, dictionary_get("test_dictionary_null_if_not_exist", k, "true") FROM t_dictionary_null_if_not_exist ORDER BY k; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_null_if_not_exist has invalid parameter for `null_if_not_exist` invalid parameter: StringLiteral{id=null, type=VARCHAR, sel=-1.0, #distinct=-1, scale=-1}.') +-- !result +SELECT k, dictionary_get("test_dictionary_null_if_not_exist", k, "false") FROM t_dictionary_null_if_not_exist ORDER BY k; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_null_if_not_exist has invalid parameter for `null_if_not_exist` invalid parameter: StringLiteral{id=null, type=VARCHAR, sel=-1.0, #distinct=-1, scale=-1}.') +-- !result +SELECT k, dictionary_get("test_dictionary_null_if_not_exist", k, 1) FROM t_dictionary_null_if_not_exist ORDER BY k; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_null_if_not_exist has invalid parameter for `null_if_not_exist` invalid parameter: IntLiteral{id=null, type=TINYINT, sel=-1.0, #distinct=-1, scale=-1}.') +-- !result +SELECT k, dictionary_get("test_dictionary_null_if_not_exist", k, 0) FROM t_dictionary_null_if_not_exist ORDER BY k; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_null_if_not_exist has invalid parameter for `null_if_not_exist` invalid parameter: IntLiteral{id=null, type=TINYINT, sel=-1.0, #distinct=-1, scale=-1}.') +-- !result +SELECT k, dictionary_get("test_dictionary_null_if_not_exist", k, -1) FROM t_dictionary_null_if_not_exist ORDER BY k; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_null_if_not_exist has invalid parameter for `null_if_not_exist` invalid parameter: IntLiteral{id=null, type=TINYINT, sel=-1.0, #distinct=-1, scale=-1}.') +-- !result +SELECT k, dictionary_get("test_dictionary_null_if_not_exist", k, k) FROM t_dictionary_null_if_not_exist ORDER BY k; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_null_if_not_exist has invalid parameter for `null_if_not_exist` invalid parameter: SlotRef{id=null, type=BIGINT, sel=-1.0, #distinct=-1, scale=-1}.') +-- !result +CREATE TABLE `t_dictionary_null_if_not_exist_gen_column` ( + `k` BIGINT NOT NULL COMMENT "", + `v` BIGINT AS dictionary_get("test_dictionary_null_if_not_exist", k)[1] +) ENGINE=OLAP +PRIMARY KEY(`k`) +DISTRIBUTED BY HASH(`k`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "true", +"compression" = "LZ4" +); +-- result: +-- !result +INSERT INTO t_dictionary_null_if_not_exist_gen_column VALUES (1),(2),(3),(4),(5); +-- result: +[REGEX].*key not found in dictionary cache.* +-- !result +SELECT * FROM t_dictionary_null_if_not_exist_gen_column; +-- result: +-- !result +SHOW CREATE TABLE t_dictionary_null_if_not_exist_gen_column; +-- result: +t_dictionary_null_if_not_exist_gen_column CREATE TABLE `t_dictionary_null_if_not_exist_gen_column` ( + `k` bigint(20) NOT NULL COMMENT "", + `v` bigint(20) NULL AS DICTIONARY_GET('test_dictionary_null_if_not_exist', k, false)[1] COMMENT "" +) ENGINE=OLAP +PRIMARY KEY(`k`) +DISTRIBUTED BY HASH(`k`) BUCKETS 1 +PROPERTIES ( +"compression" = "LZ4", +"enable_persistent_index" = "false", +"fast_schema_evolution" = "true", +"replicated_storage" = "true", +"replication_num" = "1" +); +-- !result +-- name: test_dictionary_null_if_not_exist +CREATE TABLE `t_dictionary_null_if_not_exist` ( + `k` BIGINT NOT NULL COMMENT "", + `v` BIGINT NOT NULL COMMENT "" +) ENGINE=OLAP +PRIMARY KEY(`k`) +DISTRIBUTED BY HASH(`k`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "true", +"compression" = "LZ4" +); +-- result: +-- !result +INSERT INTO t_dictionary_null_if_not_exist VALUES (1,1),(3,3),(5,5); +-- result: +-- !result +[UC]DROP DICTIONARY test_dictionary_null_if_not_exist; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_null_if_not_exist does not exist.') +-- !result +CREATE DICTIONARY test_dictionary_null_if_not_exist USING t_dictionary_null_if_not_exist (k KEY, v VALUE); +-- result: +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_null_if_not_exist", "FINISHED") +-- result: +None +-- !result +INSERT INTO t_dictionary_null_if_not_exist VALUES (2,2),(4,4); +-- result: +-- !result +SELECT k, dictionary_get("test_dictionary_null_if_not_exist", k) FROM t_dictionary_null_if_not_exist ORDER BY k; +-- result: +[REGEX].*key not found in dictionary cache.* +-- !result +SELECT k, dictionary_get("test_dictionary_null_if_not_exist", k, false) FROM t_dictionary_null_if_not_exist ORDER BY k; +-- result: +[REGEX].*key not found in dictionary cache.* +-- !result +SELECT k, dictionary_get("test_dictionary_null_if_not_exist", k, true) FROM t_dictionary_null_if_not_exist ORDER BY k; +-- result: +1 {"v":1} +2 None +3 {"v":3} +4 None +5 {"v":5} +-- !result +SELECT k, dictionary_get("test_dictionary_null_if_not_exist", k, tRuE) FROM t_dictionary_null_if_not_exist ORDER BY k; +-- result: +1 {"v":1} +2 None +3 {"v":3} +4 None +5 {"v":5} +-- !result +SELECT k, dictionary_get("test_dictionary_null_if_not_exist", k, FaLsE) FROM t_dictionary_null_if_not_exist ORDER BY k; +-- result: +[REGEX].*key not found in dictionary cache.* +-- !result +SELECT k, dictionary_get("test_dictionary_null_if_not_exist", k, xxxx) FROM t_dictionary_null_if_not_exist ORDER BY k; +-- result: +E: (1064, "Getting analyzing error. Detail message: Column 'xxxx' cannot be resolved.") +-- !result +SELECT k, dictionary_get("test_dictionary_null_if_not_exist", k, "true") FROM t_dictionary_null_if_not_exist ORDER BY k; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_null_if_not_exist has invalid parameter for `null_if_not_exist` invalid parameter: StringLiteral{id=null, type=VARCHAR, sel=-1.0, #distinct=-1, scale=-1}.') +-- !result +SELECT k, dictionary_get("test_dictionary_null_if_not_exist", k, "false") FROM t_dictionary_null_if_not_exist ORDER BY k; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_null_if_not_exist has invalid parameter for `null_if_not_exist` invalid parameter: StringLiteral{id=null, type=VARCHAR, sel=-1.0, #distinct=-1, scale=-1}.') +-- !result +SELECT k, dictionary_get("test_dictionary_null_if_not_exist", k, 1) FROM t_dictionary_null_if_not_exist ORDER BY k; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_null_if_not_exist has invalid parameter for `null_if_not_exist` invalid parameter: IntLiteral{id=null, type=TINYINT, sel=-1.0, #distinct=-1, scale=-1}.') +-- !result +SELECT k, dictionary_get("test_dictionary_null_if_not_exist", k, 0) FROM t_dictionary_null_if_not_exist ORDER BY k; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_null_if_not_exist has invalid parameter for `null_if_not_exist` invalid parameter: IntLiteral{id=null, type=TINYINT, sel=-1.0, #distinct=-1, scale=-1}.') +-- !result +SELECT k, dictionary_get("test_dictionary_null_if_not_exist", k, -1) FROM t_dictionary_null_if_not_exist ORDER BY k; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_null_if_not_exist has invalid parameter for `null_if_not_exist` invalid parameter: IntLiteral{id=null, type=TINYINT, sel=-1.0, #distinct=-1, scale=-1}.') +-- !result +SELECT k, dictionary_get("test_dictionary_null_if_not_exist", k, k) FROM t_dictionary_null_if_not_exist ORDER BY k; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_null_if_not_exist has invalid parameter for `null_if_not_exist` invalid parameter: SlotRef{id=null, type=BIGINT, sel=-1.0, #distinct=-1, scale=-1}.') +-- !result +SELECT k, dictionary_get("test_dictionary_null_if_not_exist", k, k, k) FROM t_dictionary_null_if_not_exist ORDER BY k; +-- result: +E: (1064, 'Getting analyzing error. Detail message: dictionary: test_dictionary_null_if_not_exist has expected keys size: 1 keys: [k] plus null_if_not_exist flag(optional) but param given: 3.') +-- !result +CREATE TABLE `t_dictionary_null_if_not_exist_gen_column` ( + `k` BIGINT NOT NULL COMMENT "", + `v` BIGINT AS dictionary_get("test_dictionary_null_if_not_exist", k)[1] +) ENGINE=OLAP +PRIMARY KEY(`k`) +DISTRIBUTED BY HASH(`k`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "true", +"compression" = "LZ4" +); +-- result: +-- !result +INSERT INTO t_dictionary_null_if_not_exist_gen_column VALUES (1),(2),(3),(4),(5); +-- result: +[REGEX].*key not found in dictionary cache.* +-- !result +SELECT * FROM t_dictionary_null_if_not_exist_gen_column; +-- result: +-- !result +SHOW CREATE TABLE t_dictionary_null_if_not_exist_gen_column; +-- result: +t_dictionary_null_if_not_exist_gen_column CREATE TABLE `t_dictionary_null_if_not_exist_gen_column` ( + `k` bigint(20) NOT NULL COMMENT "", + `v` bigint(20) NULL AS DICTIONARY_GET('test_dictionary_null_if_not_exist', k, false)[1] COMMENT "" +) ENGINE=OLAP +PRIMARY KEY(`k`) +DISTRIBUTED BY HASH(`k`) BUCKETS 1 +PROPERTIES ( +"compression" = "LZ4", +"enable_persistent_index" = "false", +"fast_schema_evolution" = "true", +"replicated_storage" = "true", +"replication_num" = "1" +); +-- !result +TRUNCATE TABLE t_dictionary_null_if_not_exist; +-- result: +-- !result +INSERT into t_dictionary_null_if_not_exist select generate_series, generate_series from Table(generate_series(1, 1000)); +-- result: +-- !result +REFRESH DICTIONARY test_dictionary_null_if_not_exist; +-- result: +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_null_if_not_exist", "FINISHED") +-- result: +None +-- !result +INSERT into t_dictionary_null_if_not_exist select generate_series, generate_series from Table(generate_series(1001, 2000)); +-- result: +-- !result +SELECT SUM(dictionary_get("test_dictionary_null_if_not_exist", k, true) IS NOT NULL) from t_dictionary_null_if_not_exist; +-- result: +1000 +-- !result +[UC]DROP DICTIONARY test_dictionary_null_if_not_exist; +-- result: +-- !result +DROP TABLE t_dictionary_null_if_not_exist; +-- result: +-- !result +CREATE TABLE `t_dictionary_null_if_not_exist` ( + `k` BIGINT NOT NULL COMMENT "", + `v1` BIGINT NOT NULL COMMENT "", + `v2` BIGINT NOT NULL COMMENT "" +) ENGINE=OLAP +PRIMARY KEY(`k`) +DISTRIBUTED BY HASH(`k`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "true", +"compression" = "LZ4" +); +-- result: +-- !result +INSERT into t_dictionary_null_if_not_exist select generate_series, generate_series, generate_series from Table(generate_series(1, 1000)); +-- result: +-- !result +CREATE DICTIONARY test_dictionary_null_if_not_exist USING t_dictionary_null_if_not_exist (k KEY, v1 VALUE, v2 VALUE); +-- result: +-- !result +function: wait_refresh_dictionary_finish("test_dictionary_null_if_not_exist", "FINISHED") +-- result: +None +-- !result +INSERT into t_dictionary_null_if_not_exist select generate_series, generate_series, generate_series from Table(generate_series(1001, 2000)); +-- result: +-- !result +SELECT SUM(dictionary_get("test_dictionary_null_if_not_exist", k, true) IS NOT NULL) from t_dictionary_null_if_not_exist; +-- result: +1000 +-- !result +[UC]DROP DICTIONARY test_dictionary_null_if_not_exist; +-- result: +-- !result +DROP TABLE t_dictionary_null_if_not_exist; +-- result: +-- !result \ No newline at end of file diff --git a/test/sql/test_external_file/R/test_orc_predicates b/test/sql/test_external_file/R/test_orc_predicates new file mode 100644 index 00000000000000..6f48264f355e09 --- /dev/null +++ b/test/sql/test_external_file/R/test_orc_predicates @@ -0,0 +1,49 @@ +-- name: testOrcPredicates +shell: ossutil64 mkdir oss://${oss_bucket}/test_orc_predicates/${uuid0}/ >/dev/null || echo "exit 0" >/dev/null +-- result: +0 + +-- !result +shell: ossutil64 cp --force ./sql/test_external_file/files/string-dict-column.orc oss://${oss_bucket}/test_orc_predicates/${uuid0}/string-dict-column.orc | grep -Pv "(average|elapsed)" +-- result: +0 + +Succeed: Total num: 1, size: 65,977. OK num: 1(upload 1 files). +-- !result +CREATE EXTERNAL TABLE split_orc_error +( + col1 string +) +ENGINE=file +PROPERTIES +( + "path" = "oss://${oss_bucket}/test_orc_predicates/${uuid0}/", + "format" = "orc", + "aws.s3.access_key" = "${oss_ak}", + "aws.s3.secret_key" = "${oss_sk}", + "aws.s3.endpoint" = "${oss_endpoint}" +); +-- result: +-- !result +set sql_dialect='StarRocks'; +-- result: +-- !result +select count(*) from split_orc_error where split(col1, 'a')[2]='x'; +-- result: +0 +-- !result +set sql_dialect='trino'; +-- result: +-- !result +select count(*) from split_orc_error where split(col1, 'a')[2]='x'; +-- result: +[REGEX].*Array subscript must be less than or equal to array length: 2 > 1.* +-- !result +drop table split_orc_error; +-- result: +-- !result +shell: ossutil64 rm -rf oss://${oss_bucket}/test_orc_predicates/${uuid0}/ >/dev/null || echo "exit 0" >/dev/null +-- result: +0 + +-- !result \ No newline at end of file diff --git a/test/sql/test_function/R/test_time_slice b/test/sql/test_function/R/test_time_slice index 74e8c9f7221925..0810d968d6742b 100644 --- a/test/sql/test_function/R/test_time_slice +++ b/test/sql/test_function/R/test_time_slice @@ -68,23 +68,23 @@ select time_slice('2023-12-31 03:12:04',interval 2147483647 second); -- !result select time_slice('0000-01-01',interval 5 year); -- result: -E: (1064, "time used with time_slice can't before 0001-01-01 00:00:00") +[REGEX].*time used with time_slice can't before 0001-01-01 00:00:00.* -- !result select time_slice('0000-01-01',interval 5 month); -- result: -E: (1064, "time used with time_slice can't before 0001-01-01 00:00:00") +[REGEX].*time used with time_slice can't before 0001-01-01 00:00:00.* -- !result select time_slice('0000-01-01',interval 5 day); -- result: -E: (1064, "time used with time_slice can't before 0001-01-01 00:00:00") +[REGEX].*time used with time_slice can't before 0001-01-01 00:00:00.* -- !result select time_slice('0000-01-01',interval 5 quarter); -- result: -E: (1064, "time used with time_slice can't before 0001-01-01 00:00:00") +[REGEX].*time used with time_slice can't before 0001-01-01 00:00:00.* -- !result select time_slice('0000-01-01',interval 5 week); -- result: -E: (1064, "time used with time_slice can't before 0001-01-01 00:00:00") +[REGEX].*time used with time_slice can't before 0001-01-01 00:00:00.* -- !result select time_slice('9999-12-31',interval 5 year, ceil); -- result: @@ -219,23 +219,23 @@ select time_slice('2023-12-31 03:12:04',interval 2147483647 second); -- !result select time_slice('0000-01-01',interval 5 year); -- result: -E: (1064, "time used with time_slice can't before 0001-01-01 00:00:00") +[REGEX].*time used with time_slice can't before 0001-01-01 00:00:00.* -- !result select time_slice('0000-01-01',interval 5 month); -- result: -E: (1064, "time used with time_slice can't before 0001-01-01 00:00:00") +[REGEX].*time used with time_slice can't before 0001-01-01 00:00:00.* -- !result select time_slice('0000-01-01',interval 5 day); -- result: -E: (1064, "time used with time_slice can't before 0001-01-01 00:00:00") +[REGEX].*time used with time_slice can't before 0001-01-01 00:00:00.* -- !result select time_slice('0000-01-01',interval 5 quarter); -- result: -E: (1064, "time used with time_slice can't before 0001-01-01 00:00:00") +[REGEX].*time used with time_slice can't before 0001-01-01 00:00:00.* -- !result select time_slice('0000-01-01',interval 5 week); -- result: -E: (1064, "time used with time_slice can't before 0001-01-01 00:00:00") +[REGEX].*time used with time_slice can't before 0001-01-01 00:00:00.* -- !result select time_slice('9999-12-31',interval 5 year, ceil); -- result: diff --git a/test/sql/test_generate_series/R/test_generate_series b/test/sql/test_generate_series/R/test_generate_series index 24b6b610ebb1ed..cc41d4c9029a6a 100644 --- a/test/sql/test_generate_series/R/test_generate_series +++ b/test/sql/test_generate_series/R/test_generate_series @@ -257,7 +257,7 @@ SELECT * FROM TABLE(generate_series(9223372036854775807, 9223372036854775807, 92 -- name: test44 SELECT * FROM TABLE(generate_series(1, 2, 0)); -- result: -E: (1064, 'step size cannot equal zero') +[REGEX].*step size cannot equal zero.* -- !result -- name: test47 SELECT * FROM TABLE(generate_series(-2147483640, -2147483647, -3)); @@ -293,7 +293,7 @@ INSERT INTO t1 VALUES(1, 2, 0); -- !result SELECT * FROM t1, LATERAL generate_series(c0, c1, c2); -- result: -E: (1064, 'step size cannot equal zero') +[REGEX].*step size cannot equal zero.* -- !result -- name: test49 SELECT * FROM TABLE(generate_series(-128, -128, -3)); diff --git a/test/sql/test_group_execution/R/test_group_execution_join b/test/sql/test_group_execution/R/test_group_execution_join new file mode 100644 index 00000000000000..670975df1e1e46 --- /dev/null +++ b/test/sql/test_group_execution/R/test_group_execution_join @@ -0,0 +1,473 @@ +-- name: test_group_execution_join +set enable_group_execution = true; +-- result: +-- !result +CREATE TABLE `t0` ( + `c0` int(11) NULL COMMENT "", + `c1` varchar(20) NULL COMMENT "", + `c2` varchar(200) NULL COMMENT "", + `c3` int(11) NULL COMMENT "" +) ENGINE=OLAP +DUPLICATE KEY(`c0`, `c1`) +COMMENT "OLAP" +DISTRIBUTED BY HASH(`c0`, `c1`) BUCKETS 48 +PROPERTIES ( +"colocate_with" = "${uuid0}", +"replication_num" = "1", +"in_memory" = "false", +"storage_format" = "DEFAULT", +"enable_persistent_index" = "false", +"replicated_storage" = "true", +"compression" = "LZ4" +); +-- result: +-- !result +CREATE TABLE `t1` ( + `c0` int(11) NULL COMMENT "", + `c1` varchar(20) NULL COMMENT "", + `c2` varchar(200) NULL COMMENT "", + `c3` int(11) NULL COMMENT "" +) ENGINE=OLAP +DUPLICATE KEY(`c0`, `c1`) +COMMENT "OLAP" +DISTRIBUTED BY HASH(`c0`, `c1`) BUCKETS 48 +PROPERTIES ( +"colocate_with" = "${uuid0}", +"replication_num" = "1", +"in_memory" = "false", +"storage_format" = "DEFAULT", +"enable_persistent_index" = "false", +"replicated_storage" = "true", +"compression" = "LZ4" +); +-- result: +-- !result +CREATE TABLE `small_table` ( + `c0` int(11) NULL COMMENT "", + `c1` varchar(20) NULL COMMENT "", + `c2` varchar(200) NULL COMMENT "", + `c3` int(11) NULL COMMENT "" +) ENGINE=OLAP +DUPLICATE KEY(`c0`, `c1`) +COMMENT "OLAP" +DISTRIBUTED BY HASH(`c0`, `c1`, `c2`) BUCKETS 4 +PROPERTIES ( +"replication_num" = "1" +); +-- result: +-- !result +create table empty_t like t0; +-- result: +-- !result +insert into t0 SELECT generate_series, generate_series, generate_series, generate_series FROM TABLE(generate_series(1, 40960)); +-- result: +-- !result +insert into t0 values (null,null,null,null); +-- result: +-- !result +insert into t1 SELECT * FROM t0; +-- result: +-- !result +insert into small_table SELECT generate_series, generate_series, generate_series, generate_series FROM TABLE(generate_series(1, 100)); +-- result: +-- !result +select count(*) from t0; +-- result: +40961 +-- !result +select count(*) from t1; +-- result: +40961 +-- !result +select count(*) from empty_t; +-- result: +0 +-- !result +select count(*) from small_table; +-- result: +100 +-- !result +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1) from t0 l join [colocate] t1 r on l.c0 = r.c0 and l.c1 = r.c1 where l.c3 > 100; +-- result: +40860 20530.5 40860 40860 40860 +-- !result +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1) from t0 l left join [colocate] t1 r on l.c0 = r.c0 and l.c1 = r.c1 where l.c3 > 100; +-- result: +40860 20530.5 40860 40860 40860 +-- !result +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1) from t0 l right join [colocate] t1 r on l.c0 = r.c0 and l.c1 = r.c1 where r.c3 < 1024; +-- result: +1023 512.0 1023 1023 1023 +-- !result +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1) from t0 l join [colocate] t1 r on l.c0 = r.c0 and l.c1 = r.c1 where r.c3 < 1024; +-- result: +1023 512.0 1023 1023 1023 +-- !result +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1) from t0 l join [colocate] t1 r on l.c0 = r.c0 and l.c1 = r.c1 where r.c3 < 1024; +-- result: +1023 512.0 1023 1023 1023 +-- !result +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1) from t0 l join [colocate] t1 r on l.c0 = r.c0 join [broadcast] small_table s on l.c0 = s.c0 and l.c1 = s.c1; +-- result: +100 50.5 100 100 100 +-- !result +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1) from t0 l join [colocate] t1 r on l.c0 = r.c0 join [bucket] small_table s on l.c0 = s.c0 and l.c1 = s.c1; +-- result: +100 50.5 100 100 100 +-- !result +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1) from t0 l join [colocate] t1 r on l.c0 = r.c0 join [broadcast] empty_t s on l.c0 = s.c0 and l.c1 = s.c1; +-- result: +0 None 0 0 0 +-- !result +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1) from t0 l join [colocate] t1 r on l.c0 = r.c0 and l.c1 = r.c1 where l.c3 < 0; +-- result: +0 None 0 0 0 +-- !result +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1) from t0 l left join [colocate] t1 r on l.c0 = r.c0 and l.c1 = r.c1 where l.c3 < 0; +-- result: +0 None 0 0 0 +-- !result +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1) from t0 l right join [colocate] t1 r on l.c0 = r.c0 and l.c1 = r.c1 where l.c3 < 0; +-- result: +0 None 0 0 0 +-- !result +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1) from t0 l full outer join [colocate] t1 r on l.c0 = r.c0 and l.c1 = r.c1 where l.c3 < 0; +-- result: +0 None 0 0 0 +-- !result +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1) from t0 l join [colocate] t1 r on l.c0 = r.c0 and l.c1 = r.c1 where r.c3 < 0; +-- result: +0 None 0 0 0 +-- !result +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1) from t0 l left join [colocate] t1 r on l.c0 = r.c0 and l.c1 = r.c1 where r.c3 < 0; +-- result: +0 None 0 0 0 +-- !result +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1) from t0 l right join [colocate] t1 r on l.c0 = r.c0 and l.c1 = r.c1 where r.c3 < 0; +-- result: +0 None 0 0 0 +-- !result +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1) from t0 l full outer join [colocate] t1 r on l.c0 = r.c0 and l.c1 = r.c1 where r.c3 < 0; +-- result: +0 None 0 0 0 +-- !result +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1) from t0 l join [colocate] t1 r on l.c0 = r.c0 and l.c1 = r.c1 where r.c3 < 10; +-- result: +9 5.0 9 9 9 +-- !result +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1) from t0 l join [colocate] t1 r on l.c0 = r.c0 and l.c1 = r.c1 where r.c3 < 102400 - 1; +-- result: +40960 20480.5 40960 40960 40960 +-- !result +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1) from t0 l join [colocate] t1 r on l.c0 = r.c0 and l.c1 = r.c1 where r.c3 < 10000; +-- result: +9999 5000.0 9999 9999 9999 +-- !result +select /*+SET_VAR(low_cardinality_optimize_v2=false,global_runtime_filter_build_max_size=-1) */ t0.*,t1.* from t0 join [broadcast] small_table t2 on t0.c0=t2.c0 join [colocate] t1 on t1.c1=t0.c1 and t1.c0 =t0.c0 and t1.c2 = t2.c2 where t1.c3 < 10 order by 1,2,3,4,5,6,7,8; +-- result: +1 1 1 1 1 1 1 1 +2 2 2 2 2 2 2 2 +3 3 3 3 3 3 3 3 +4 4 4 4 4 4 4 4 +5 5 5 5 5 5 5 5 +6 6 6 6 6 6 6 6 +7 7 7 7 7 7 7 7 +8 8 8 8 8 8 8 8 +9 9 9 9 9 9 9 9 +-- !result +select /*+SET_VAR(low_cardinality_optimize_v2=false,global_runtime_filter_build_max_size=-1) */ t0.*,t1.* from t0 join [bucket] small_table t2 on t0.c0=t2.c0 join [colocate] t1 on t1.c1=t0.c1 and t1.c0 =t0.c0 and t1.c2 = t2.c2 where t1.c3 < 10 order by 1,2,3,4,5,6,7,8; +-- result: +1 1 1 1 1 1 1 1 +2 2 2 2 2 2 2 2 +3 3 3 3 3 3 3 3 +4 4 4 4 4 4 4 4 +5 5 5 5 5 5 5 5 +6 6 6 6 6 6 6 6 +7 7 7 7 7 7 7 7 +8 8 8 8 8 8 8 8 +9 9 9 9 9 9 9 9 +-- !result +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1) from t0 l join [colocate] t1 r on l.c0 <=> r.c0 and l.c1 <=> r.c1 where r.c3 < 10; +-- result: +9 5.0 9 9 9 +-- !result +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1) from t0 l join [colocate] t1 r on l.c0 = r.c0 and l.c1 = r.c1 join [broadcast] small_table t3 where r.c3 < 10 and t3.c1 < 3; +-- result: +18 5.0 18 18 18 +-- !result +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1) from t0 l join [broadcast] small_table t3 join [colocate] t1 r on l.c0 = r.c0 and l.c1 = r.c1 where r.c3 < 10 and t3.c1 < 3; +-- result: +18 5.0 18 18 18 +-- !result +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1) from t0 l join [broadcast] small_table t3 join [colocate] t1 r on l.c0 = r.c0 and l.c1 = r.c1 where r.c3 < 10 and t3.c1 = 3; +-- result: +9 5.0 9 9 9 +-- !result +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1) from t0 l join [colocate] t1 r on l.c0 = r.c0 and l.c1 = r.c1 join [broadcast] small_table t3 where r.c3 < 10 and t3.c1 = 3; +-- result: +9 5.0 9 9 9 +-- !result +with agged_table as ( select distinct c0, c1 from t0) select /*+SET_VAR(cbo_cte_reuse_rate=0) */ count(*) ,sum(l.c0), sum(r.c0), sum(l.c1), sum(r.c1) from agged_table l join [colocate] agged_table r on l.c0 = r.c0 and l.c1 = r.c1; +-- result: +40960 838881280 838881280 838881280.0 838881280.0 +-- !result +with agged_table as ( select distinct c0, c1 from t0) select count(*) ,sum(l.c0), sum(r.c0), sum(l.c1), sum(r.c1) from agged_table l join [colocate] agged_table r on l.c0 = r.c0 and l.c1 = r.c1; +-- result: +40960 838881280 838881280 838881280.0 838881280.0 +-- !result +with agged_table as ( select distinct c0, c1 from t0) select count(*) ,sum(l.c0), sum(r.c0), sum(l.c1), sum(r.c1) from agged_table l join [colocate] t0 r on l.c0 = r.c0 and l.c1 = r.c1; +-- result: +40960 838881280 838881280 838881280.0 838881280.0 +-- !result +with agged_table as ( select distinct c0, c1 from t0) select count(*) ,sum(l.c0), sum(r.c0), sum(l.c1), sum(r.c1) from agged_table l join [colocate] t1 r on l.c0 = r.c0 and l.c1 = r.c1; +-- result: +40960 838881280 838881280 838881280.0 838881280.0 +-- !result +with agged_table as ( select distinct c0, c1 from t0) select count(*) ,sum(l.c0), sum(l.c1) from agged_table l join [broadcast] TABLE(generate_series(1, 100)) r on l.c0 = r.generate_series; +-- result: +100 5050 5050.0 +-- !result +with agged_table as ( select distinct c0, c1 from t0) select count(*) ,sum(l.c0), sum(l.c1) from agged_table l join [bucket] TABLE(generate_series(1, 100)) r on l.c0 = r.generate_series; +-- result: +100 5050 5050.0 +-- !result +with agged_table as ( select distinct c0, c1 from t0) select /*+SET_VAR(cbo_cte_reuse_rate=-1) */ count(*) ,sum(l.c0), sum(r.c0), sum(l.c1), sum(r.c1) from agged_table l join [bucket] agged_table r on l.c0 = r.c0 and l.c1 = r.c1; +-- result: +40960 838881280 838881280 838881280.0 838881280.0 +-- !result +with agged_table as ( select distinct c0, c1 from t0) select /*+SET_VAR(cbo_cte_reuse_rate=-1) */ count(*) ,sum(l.c0), sum(r.c0), sum(l.c1), sum(r.c1) from agged_table l right join [bucket] agged_table r on l.c0 = r.c0 and l.c1 = r.c1; +-- result: +40961 838881280 838881280 838881280.0 838881280.0 +-- !result +with agged_table as ( select distinct c0, c1 from t0) select /*+SET_VAR(cbo_cte_reuse_rate=-1) */ count(*) ,sum(l.c0), sum(r.c0), sum(l.c1), sum(r.c1) from agged_table l left join [bucket] agged_table r on l.c0 = r.c0 and l.c1 = r.c1; +-- result: +40961 838881280 838881280 838881280.0 838881280.0 +-- !result +with flat_table as ( select c0, c1 from t0) select /*+SET_VAR(cbo_cte_reuse_rate=-1) */ l.c0, l.c1, count(*) from flat_table l join [bucket] flat_table r on l.c0 = r.c0 and l.c1 = r.c1 group by 1,2 order by 1,2 limit 10000,2; +-- result: +10001 10001 1 +10002 10002 1 +-- !result +with flat_table as ( select c0, c1 from t0) select /*+SET_VAR(cbo_cte_reuse_rate=-1) */ l.c0, l.c1, count(*) from flat_table l right join [bucket] flat_table r on l.c0 = r.c0 and l.c1 = r.c1 group by 1,2 order by 1,2 limit 10000,2; +-- result: +10000 10000 1 +10001 10001 1 +-- !result +with flat_table as ( select c0, c1 from t0) select /*+SET_VAR(cbo_cte_reuse_rate=-1) */ l.c0, l.c1, count(*) from flat_table l left join [bucket] flat_table r on l.c0 = r.c0 and l.c1 = r.c1 group by 1,2 order by 1,2 limit 10000,2; +-- result: +10000 10000 1 +10001 10001 1 +-- !result +with agged_table as ( select distinct c0, c1,c3 from t0) select /*+SET_VAR(cbo_cte_reuse_rate=-1) */ count(*) ,sum(l.c0), sum(r.c0), sum(l.c1), sum(r.c1) from agged_table l join [bucket] agged_table r on l.c0 = r.c0 and l.c1 = r.c1 where r.c3 < 100; +-- result: +99 4950 4950 4950.0 4950.0 +-- !result +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1) from t0 l join [bucket] t1 r on l.c0 = r.c0 and l.c1 = r.c1; +-- result: +40960 20480.5 40960 40960 40960 +-- !result +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1) from t0 l right join [bucket] t1 r on l.c0 = r.c0 and l.c1 = r.c1; +-- result: +40960 20480.5 40960 40960 40960 +-- !result +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1), count(s.c0) from t0 l join [bucket] t1 r on l.c0 = r.c0 and l.c1 = r.c1 join [bucket] small_table s on l.c0 = s.c0 and l.c1 = s.c1; +-- result: +100 50.5 100 100 100 100 +-- !result +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1), count(s.c0) from t0 l left join [bucket] t1 r on l.c0 = r.c0 and l.c1 = r.c1 join [bucket] small_table s on l.c0 = s.c0 and l.c1 = s.c1; +-- result: +100 50.5 100 100 100 100 +-- !result +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1), count(s.c0) from t0 l join [bucket] t1 r on l.c0 = r.c0 and l.c1 = r.c1 join [broadcast] small_table s on l.c0 = s.c0 and l.c1 = s.c1; +-- result: +100 50.5 100 100 100 100 +-- !result +with agged_table as ( select distinct c0, c1 from t0) select /*+SET_VAR(cbo_cte_reuse_rate=-1) */ count(l.c0), count(l.c1) from t0 l join [colocate] (select l.c0, r.c1 from agged_table l join [bucket] t0 r on l.c0=r.c0 and l.c1 = r.c1) r on l.c0=r.c0 and l.c1 = r.c1; +-- result: +40960 40960 +-- !result +with agged_table as ( select distinct c0, c1 from t0) select /*+SET_VAR(cbo_cte_reuse_rate=-1) */ count(l.c0), count(l.c1) from t0 l join [colocate] (select l.c0, r.c1 from agged_table l right join [bucket] t0 r on l.c0=r.c0 and l.c1 = r.c1) r on l.c0=r.c0 and l.c1 = r.c1; +-- result: +40960 40960 +-- !result +select count(*), sum(c0), sum(c1) from (select l.c0, l.c1 from (select c0, c1 from t0 group by rollup (c0, c1)) l join t1 r on l.c0 = r.c0 and r.c1 = l.c1) tb; +-- result: +40960 838881280 838881280.0 +-- !result +select l.c0, l.c1, r.c1 from t0 l join [colocate] t1 r on l.c0 = r.c0 and l.c1 = r.c1 order by 1,2,3 limit 10000, 10; +-- result: +10001 10001 10001 +10002 10002 10002 +10003 10003 10003 +10004 10004 10004 +10005 10005 10005 +10006 10006 10006 +10007 10007 10007 +10008 10008 10008 +10009 10009 10009 +10010 10010 10010 +-- !result +select count(*) from (select l.c0, l.c1, r.c1, row_number() over () from t0 l join [colocate] t1 r on l.c0 = r.c0 and l.c1 = r.c1 order by 1,2,3 limit 10) tb; +-- result: +10 +-- !result +select l.c0, l.c1, r.c1, row_number() over (partition by l.c0) from t0 l join [colocate] t1 r on l.c0 = r.c0 and l.c1 = r.c1 order by 1,2,3 limit 10; +-- result: +1 1 1 1 +2 2 2 1 +3 3 3 1 +4 4 4 1 +5 5 5 1 +6 6 6 1 +7 7 7 1 +8 8 8 1 +9 9 9 1 +10 10 10 1 +-- !result +select l.c0, l.c1, r.c1, row_number() over (partition by l.c0, l.c1) from t0 l join [colocate] t1 r on l.c0 = r.c0 and l.c1 = r.c1 order by 1,2,3 limit 10; +-- result: +1 1 1 1 +2 2 2 1 +3 3 3 1 +4 4 4 1 +5 5 5 1 +6 6 6 1 +7 7 7 1 +8 8 8 1 +9 9 9 1 +10 10 10 1 +-- !result +select count(*), count(lc0), count(lc1), count(rc1) from (select l.c0 lc0, l.c1 lc1, r.c1 rc1, row_number() over () rn from t0 l join [colocate] t1 r on l.c0 = r.c0 and l.c1 = r.c1)tb where rn < 10; +-- result: +9 9 9 9 +-- !result +select count(*), sum(lc0), sum(lc1), sum(rc1) from (select l.c0 lc0, l.c1 lc1, r.c1 rc1, row_number() over (partition by l.c0) rn from t0 l join [colocate] t1 r on l.c0 = r.c0 and l.c1 = r.c1)tb where rn < 10; +-- result: +40960 838881280 838881280.0 838881280.0 +-- !result +select count(*), sum(lc0), sum(lc1), sum(rc1) from (select l.c0 lc0, l.c1 lc1, r.c1 rc1, row_number() over (partition by l.c0, l.c1) rn from t0 l join [colocate] t1 r on l.c0 = r.c0 and l.c1 = r.c1)tb where rn < 10; +-- result: +40960 838881280 838881280.0 838881280.0 +-- !result +select count(*) from (select c0l, c1l from (select l.c0 c0l, l.c1 c1l, r.c1 c1r, row_number() over () rn from t0 l join [colocate] t1 r on l.c0 = r.c0 and l.c1 = r.c1) tb where rn < 10 group by 1,2 order by 1,2 limit 2000, 1) tb; +-- result: +0 +-- !result +select c0l, c1l from (select l.c0 c0l, l.c1 c1l, r.c1 c1r, row_number() over (partition by l.c0) rn from t0 l join [colocate] t1 r on l.c0 = r.c0 and l.c1 = r.c1) tb where rn < 10 group by 1,2 order by 1,2 limit 2000, 1; +-- result: +2001 2001 +-- !result +select c0l, c1l from (select l.c0 c0l, l.c1 c1l, r.c1 c1r, row_number() over (partition by l.c0, l.c1) rn from t0 l join [colocate] t1 r on l.c0 = r.c0 and l.c1 = r.c1) tb where rn < 10 group by 1,2 order by 1,2 limit 2000, 1; +-- result: +2001 2001 +-- !result +insert into blackhole() select BE_ID from information_schema.be_bvars l join t0 r on l.BE_ID = r.c0; +-- result: +-- !result +select c0,c1 in (select c1 from t1 where c0 = 10) from (select l.c0, r.c1 from t0 l join [colocate] t1 r on l.c0 = r.c0 and l.c1 = r.c1) tb order by 1, 2 limit 10000, 1; +-- result: +10001 0 +-- !result +select c0,c1 = (select c1 from t1 where c0 = 10) from (select l.c0, r.c1 from t0 l join [colocate] t1 r on l.c0 = r.c0 and l.c1 = r.c1) tb order by 1, 2 limit 10000, 1; +-- result: +10001 0 +-- !result +select c0,c1 = (select c1 from t1 where c0 != 10) from (select l.c0, r.c1 from t0 l join [colocate] t1 r on l.c0 = r.c0 and l.c1 = r.c1) tb order by 1, 2 limit 10000, 1; +-- result: +[REGEX].*Expected LE 1 to be returned by expression.* +-- !result +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1) from t0 l join [colocate] t1 r on l.c0 = r.c0 and l.c1 = r.c1 where l.c0=1 and l.c1=1; +-- result: +1 1.0 1 1 1 +-- !result +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1) from t0 l left join [colocate] t1 r on l.c0 = r.c0 and l.c1 = r.c1 where l.c0=1 and l.c1=1; +-- result: +1 1.0 1 1 1 +-- !result +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1) from t0 l right join [colocate] t1 r on l.c0 = r.c0 and l.c1 = r.c1 where l.c0=1 and l.c1=1; +-- result: +1 1.0 1 1 1 +-- !result +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1) from t0 l full join [colocate] t1 r on l.c0 = r.c0 and l.c1 = r.c1 where l.c0=1 and l.c1=1; +-- result: +1 1.0 1 1 1 +-- !result +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1) from t0 l join [colocate] t1 r on l.c0 = r.c0 and l.c1 = r.c1 where r.c0=1 and r.c1=1; +-- result: +1 1.0 1 1 1 +-- !result +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1) from t0 l left join [colocate] t1 r on l.c0 = r.c0 and l.c1 = r.c1 where r.c0=1 and r.c1=1; +-- result: +1 1.0 1 1 1 +-- !result +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1) from t0 l right join [colocate] t1 r on l.c0 = r.c0 and l.c1 = r.c1 where r.c0=1 and r.c1=1; +-- result: +1 1.0 1 1 1 +-- !result +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1) from t0 l full join [colocate] t1 r on l.c0 = r.c0 and l.c1 = r.c1 where r.c0=1 and r.c1=1; +-- result: +1 1.0 1 1 1 +-- !result +with + tx as (select c0, c1 from t0 where c0 = 1 and c1 = 1), + ty as (select c0, c1 from t0 where c0) +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1) from tx l join [colocate] ty r on l.c0 = r.c0 and l.c1 = r.c1; +-- result: +1 1.0 1 1 1 +-- !result +with + tx as (select c0, c1 from t0 where c0 = 1 and c1 = 1), + ty as (select c0, c1 from t0 where c0) +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1) from tx l left join [colocate] ty r on l.c0 = r.c0 and l.c1 = r.c1; +-- result: +1 1.0 1 1 1 +-- !result +with + tx as (select c0, c1 from t0 where c0 = 1 and c1 = 1), + ty as (select c0, c1 from t0 where c0) +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1) from tx l right join [colocate] ty r on l.c0 = r.c0 and l.c1 = r.c1; +-- result: +1 1.0 1 1 40960 +-- !result +with + tx as (select c0, c1 from t0 where c0 = 1 and c1 = 1), + ty as (select c0, c1 from t0 where c0) +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1) from tx l full join [colocate] ty r on l.c0 = r.c0 and l.c1 = r.c1; +-- result: +1 1.0 1 1 40960 +-- !result +with + tx as (select c0, c1 from t0), + ty as (select c0, c1 from t0 where c0 = 1 and c1 = 1) +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1) from tx l join [colocate] ty r on l.c0 = r.c0 and l.c1 = r.c1; +-- result: +1 1.0 1 1 1 +-- !result +with + tx as (select c0, c1 from t0), + ty as (select c0, c1 from t0 where c0 = 1 and c1 = 1) +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1) from tx l left join [colocate] ty r on l.c0 = r.c0 and l.c1 = r.c1; +-- result: +40960 20480.5 40960 40960 1 +-- !result +with + tx as (select c0, c1 from t0), + ty as (select c0, c1 from t0 where c0 = 1 and c1 = 1) +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1) from tx l right join [colocate] ty r on l.c0 = r.c0 and l.c1 = r.c1; +-- result: +1 1.0 1 1 1 +-- !result +with + tx as (select c0, c1 from t0), + ty as (select c0, c1 from t0 where c0 = 1 and c1 = 1) +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1) from tx l full join [colocate] ty r on l.c0 = r.c0 and l.c1 = r.c1; +-- result: +40960 20480.5 40960 40960 1 +-- !result +set enable_spill=true; +-- result: +-- !result +select count(l.c0), avg(l.c0), count(l.c1), count(l.c0), count(r.c1) from t0 l join [colocate] t1 r on l.c0 = r.c0 join [bucket] small_table s on l.c0 = s.c0 and l.c1 = s.c1; +-- result: +100 50.5 100 100 100 +-- !result \ No newline at end of file diff --git a/test/sql/test_inverted_index/R/test_inverted_index b/test/sql/test_inverted_index/R/test_inverted_index new file mode 100644 index 00000000000000..eab9969972b00b --- /dev/null +++ b/test/sql/test_inverted_index/R/test_inverted_index @@ -0,0 +1,1691 @@ +-- name: test_basic_create_index @slow +ADMIN SET FRONTEND CONFIG("enable_experimental_gin" = "true"); +-- result: +-- !result +set low_cardinality_optimize_v2 = false; +-- result: +-- !result +set cbo_enable_low_cardinality_optimize = false; +-- result: +-- !result +CREATE TABLE `t_test_basic_create_index_pk` ( + `id1` bigint(20) NOT NULL COMMENT "", + `id2` bigint(20) NOT NULL COMMENT "", + `id3` bigint(20) NOT NULL COMMENT "" +) ENGINE=OLAP +PRIMARY KEY(`id1`) +DISTRIBUTED BY HASH(`id1`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "false", +"compression" = "LZ4" +); +-- result: +-- !result +CREATE INDEX index_1 ON t_test_basic_create_index_pk (id2) USING BITMAP; +-- result: +-- !result +function: wait_alter_table_finish() +-- result: +None +-- !result +CREATE INDEX index_2 ON t_test_basic_create_index_pk (id3) USING BITMAP; +-- result: +-- !result +function: wait_alter_table_finish() +-- result: +None +-- !result +CREATE TABLE `t_test_basic_create_index_dup` ( + `id1` bigint(20) NOT NULL COMMENT "", + `id2` bigint(20) NOT NULL COMMENT "", + `id3` bigint(20) NOT NULL COMMENT "", + `id4` string NOT NULL COMMENT "", + `id5` string NOT NULL COMMENT "", + `id6` bigint(20) NOT NULL COMMENT "", + `id7` string NOT NULL COMMENT "" +) ENGINE=OLAP +DUPLICATE KEY(`id1`) +DISTRIBUTED BY HASH(`id1`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "false", +"compression" = "LZ4" +); +-- result: +-- !result +CREATE INDEX index_1 ON t_test_basic_create_index_dup (id2) USING BITMAP; +-- result: +-- !result +function: wait_alter_table_finish() +-- result: +None +-- !result +CREATE INDEX index_2 ON t_test_basic_create_index_dup (id3) USING BITMAP; +-- result: +-- !result +function: wait_alter_table_finish() +-- result: +None +-- !result +CREATE INDEX index_3 ON t_test_basic_create_index_dup (id4) USING GIN; +-- result: +-- !result +function: wait_alter_table_finish() +-- result: +None +-- !result +CREATE INDEX index_4 ON t_test_basic_create_index_dup (id5) USING GIN; +-- result: +-- !result +function: wait_alter_table_finish() +-- result: +None +-- !result +CREATE INDEX index_5 ON t_test_basic_create_index_dup (id6) USING BITMAP; +-- result: +-- !result +function: wait_alter_table_finish() +-- result: +None +-- !result +CREATE INDEX index_6 ON t_test_basic_create_index_dup (id7) USING GIN; +-- result: +-- !result +function: wait_alter_table_finish() +-- result: +None +-- !result +CREATE TABLE `t_test_basic_create_index_replicated` ( + `id1` bigint(20) NOT NULL COMMENT "", + `id2` bigint(20) NOT NULL COMMENT "", + `id3` bigint(20) NOT NULL COMMENT "", + `id4` string NOT NULL COMMENT "", + `id5` string NOT NULL COMMENT "", + `id6` bigint(20) NOT NULL COMMENT "", + `id7` string NOT NULL COMMENT "", + INDEX `gin_id4` (`id4`) USING GIN ("parser" = "none") COMMENT '' +) ENGINE=OLAP +DUPLICATE KEY(`id1`) +DISTRIBUTED BY HASH(`id1`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "true", +"compression" = "LZ4" +); +-- result: +E: (1064, 'Getting analyzing error. Detail message: GIN does not support replicated mode.') +-- !result +DROP TABLE t_test_basic_create_index_pk; +-- result: +-- !result +DROP TABLE t_test_basic_create_index_dup; +-- result: +-- !result +-- name: test_query_gin_index +ADMIN SET FRONTEND CONFIG("enable_experimental_gin" = "true"); +-- result: +-- !result +set low_cardinality_optimize_v2 = false; +-- result: +-- !result +set cbo_enable_low_cardinality_optimize = false; +-- result: +-- !result +CREATE TABLE `t_test_gin_index_query` ( + `id1` bigint(20) NOT NULL COMMENT "", + `query_none_analyzer` varchar(255) NOT NULL COMMENT "", + `query_english` varchar(255) NOT NULL COMMENT "", + `query_chinese` varchar(255) NOT NULL COMMENT "", + INDEX gin_none (`query_none_analyzer`) USING GIN ("parser" = "none") COMMENT 'whole line index', + INDEX gin_english (`query_english`) USING GIN ("parser" = "english") COMMENT 'english index', + INDEX gin_chinese (`query_chinese`) USING GIN ("parser" = "chinese") COMMENT 'chinese index' +) ENGINE=OLAP +DUPLICATE KEY(`id1`) +DISTRIBUTED BY HASH(`id1`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "false", +"compression" = "LZ4" +); +-- result: +-- !result +insert into t_test_gin_index_query values +(1, 'starrocks', 'hello starrocks', '极速分析'), +(2, 'starrocks', 'hello world', '你好世界'), +(3, 'lakehouse', 'hello lakehouse', '湖仓一体'), +(4, 'materialized view', 'materialized view', '物化视图'), +(5, '中文测试', 'chinese test', '中文测试'); +-- result: +-- !result +select count(*) from t_test_gin_index_query where query_none_analyzer = 'starrocks'; +-- result: +2 +-- !result +select count(*) from t_test_gin_index_query where query_english match 'hello'; +-- result: +3 +-- !result +select count(*) from t_test_gin_index_query where query_english not match 'hello' and id1 > 4; +-- result: +1 +-- !result +select count(*) from t_test_gin_index_query where query_english match 'hello' and query_none_analyzer = 'lakehouse'; +-- result: +1 +-- !result +select count(*) from t_test_gin_index_query where query_none_analyzer like '%ed vi%'; +-- result: +1 +-- !result +select count(*) from t_test_gin_index_query where query_english match '%ed vi%'; +-- result: +0 +-- !result +select count(*) from t_test_gin_index_query where query_english match '%teria%'; +-- result: +1 +-- !result +drop table t_test_gin_index_query; +-- result: +-- !result +-- name: test_gin_index_single_predicate_none +ADMIN SET FRONTEND CONFIG("enable_experimental_gin" = "true"); +-- result: +-- !result +set low_cardinality_optimize_v2 = false; +-- result: +-- !result +set cbo_enable_low_cardinality_optimize = false; +-- result: +-- !result +CREATE TABLE `t_gin_index_single_predicate_none` ( + `id1` bigint(20) NOT NULL COMMENT "", + `text_column` varchar(255) NULL COMMENT "", + INDEX gin_none (`text_column`) USING GIN ("parser" = "none") COMMENT 'whole line index' +) ENGINE=OLAP +DUPLICATE KEY(`id1`) +DISTRIBUTED BY HASH(`id1`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "false", +"compression" = "LZ4" +); +-- result: +-- !result +INSERT INTO t_gin_index_single_predicate_none VALUES +(1, "ABC"), +(2, "abc"), +(3, "ABD"), +(4, "This is Gin Index"), +(5, NULL); +-- result: +-- !result +SELECT * FROM t_gin_index_single_predicate_none WHERE text_column = "ABC"; +-- result: +1 ABC +-- !result +SELECT * FROM t_gin_index_single_predicate_none WHERE text_column != "ABC"; +-- result: +2 abc +3 ABD +4 This is Gin Index +5 None +-- !result +SELECT * FROM t_gin_index_single_predicate_none WHERE text_column IS NULL; +-- result: +5 None +-- !result +SELECT * FROM t_gin_index_single_predicate_none WHERE text_column IS NOT NULL; +-- result: +1 ABC +2 abc +3 ABD +4 This is Gin Index +-- !result +SELECT * FROM t_gin_index_single_predicate_none WHERE text_column <= "AB"; +-- result: +5 None +-- !result +SELECT * FROM t_gin_index_single_predicate_none WHERE text_column >= "AB"; +-- result: +1 ABC +2 abc +3 ABD +4 This is Gin Index +-- !result +SELECT * FROM t_gin_index_single_predicate_none WHERE text_column IN ("ABC"); +-- result: +1 ABC +-- !result +SELECT * FROM t_gin_index_single_predicate_none WHERE text_column IN ("ABC", "ABD"); +-- result: +1 ABC +3 ABD +-- !result +SELECT * FROM t_gin_index_single_predicate_none WHERE text_column NOT IN ("ABC"); +-- result: +2 abc +3 ABD +4 This is Gin Index +5 None +-- !result +SELECT * FROM t_gin_index_single_predicate_none WHERE text_column NOT IN ("ABC", "ABD"); +-- result: +2 abc +4 This is Gin Index +5 None +-- !result +SELECT * FROM t_gin_index_single_predicate_none WHERE text_column LIKE "ABC"; +-- result: +1 ABC +-- !result +SELECT * FROM t_gin_index_single_predicate_none WHERE text_column LIKE "%ABC%"; +-- result: +1 ABC +-- !result +SELECT * FROM t_gin_index_single_predicate_none WHERE text_column LIKE "%BC"; +-- result: +1 ABC +-- !result +SELECT * FROM t_gin_index_single_predicate_none WHERE text_column LIKE "AB%"; +-- result: +1 ABC +3 ABD +-- !result +SELECT * FROM t_gin_index_single_predicate_none WHERE text_column match "AB%"; +-- result: +1 ABC +3 ABD +-- !result +DROP TABLE t_gin_index_single_predicate_none; +-- result: +-- !result +-- name: test_gin_index_single_predicate_english +ADMIN SET FRONTEND CONFIG("enable_experimental_gin" = "true"); +-- result: +-- !result +set low_cardinality_optimize_v2 = false; +-- result: +-- !result +set cbo_enable_low_cardinality_optimize = false; +-- result: +-- !result +CREATE TABLE `t_gin_index_single_predicate_english` ( + `id1` bigint(20) NOT NULL COMMENT "", + `text_column` varchar(255) NULL COMMENT "", + INDEX gin_english (`text_column`) USING GIN ("parser" = "english") COMMENT 'english index' +) ENGINE=OLAP +DUPLICATE KEY(`id1`) +DISTRIBUTED BY HASH(`id1`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "false", +"compression" = "LZ4" +); +-- result: +-- !result +INSERT INTO t_gin_index_single_predicate_english VALUES +(1, "ABC"), +(2, "abc"), +(3, "ABD"), +(4, "This is Gin Index"), +(5, NULL); +-- result: +-- !result +SELECT * FROM t_gin_index_single_predicate_english WHERE text_column = "This is Gin Index"; +-- result: +4 This is Gin Index +-- !result +SELECT * FROM t_gin_index_single_predicate_english WHERE text_column match "This"; +-- result: +-- !result +SELECT * FROM t_gin_index_single_predicate_english WHERE text_column match "this"; +-- result: +4 This is Gin Index +-- !result +SELECT * FROM t_gin_index_single_predicate_english WHERE text_column match "thi%"; +-- result: +4 This is Gin Index +-- !result +SELECT * FROM t_gin_index_single_predicate_english WHERE text_column not match "this"; +-- result: +1 ABC +2 abc +3 ABD +5 None +-- !result +SELECT * FROM t_gin_index_single_predicate_english WHERE text_column <= "this"; +-- result: +1 ABC +2 abc +3 ABD +4 This is Gin Index +-- !result +SELECT * FROM t_gin_index_single_predicate_english WHERE text_column >= "this"; +-- result: +-- !result +SELECT * FROM t_gin_index_single_predicate_english WHERE text_column IN ("this"); +-- result: +-- !result +SELECT * FROM t_gin_index_single_predicate_english WHERE text_column IN ("this", "is"); +-- result: +-- !result +SELECT * FROM t_gin_index_single_predicate_english WHERE text_column NOT IN ("this"); +-- result: +1 ABC +2 abc +3 ABD +4 This is Gin Index +-- !result +SELECT * FROM t_gin_index_single_predicate_english WHERE text_column NOT IN ("this", "is"); +-- result: +1 ABC +2 abc +3 ABD +4 This is Gin Index +-- !result +SELECT * FROM t_gin_index_single_predicate_english WHERE text_column LIKE "this"; +-- result: +-- !result +SELECT * FROM t_gin_index_single_predicate_english WHERE text_column LIKE "%this%"; +-- result: +-- !result +SELECT * FROM t_gin_index_single_predicate_english WHERE text_column LIKE "%his"; +-- result: +-- !result +SELECT * FROM t_gin_index_single_predicate_english WHERE text_column LIKE "thi%"; +-- result: +-- !result +DROP TABLE t_gin_index_single_predicate_english; +-- result: +-- !result +-- name: test_gin_index_multiple_predicate_none +ADMIN SET FRONTEND CONFIG("enable_experimental_gin" = "true"); +-- result: +-- !result +set low_cardinality_optimize_v2 = false; +-- result: +-- !result +set cbo_enable_low_cardinality_optimize = false; +-- result: +-- !result +CREATE TABLE `t_gin_index_multiple_predicate_none` ( + `id1` bigint(20) NOT NULL COMMENT "", + `text_column` varchar(255) NULL COMMENT "", + INDEX gin_none (`text_column`) USING GIN ("parser" = "none") COMMENT 'whole line index' +) ENGINE=OLAP +DUPLICATE KEY(`id1`) +DISTRIBUTED BY HASH(`id1`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "false", +"compression" = "LZ4" +); +-- result: +-- !result +INSERT INTO t_gin_index_multiple_predicate_none VALUES +(1, "ABC"), +(2, "abc"), +(3, "ABD"), +(4, "This is Gin Index"), +(5, NULL); +-- result: +-- !result +SELECT * FROM t_gin_index_multiple_predicate_none WHERE text_column = "ABC" OR text_column = "ABD"; +-- result: +1 ABC +3 ABD +-- !result +SELECT * FROM t_gin_index_multiple_predicate_none WHERE text_column != "ABC" AND text_column != "ABD"; +-- result: +2 abc +4 This is Gin Index +5 None +-- !result +SELECT * FROM t_gin_index_multiple_predicate_none WHERE text_column IS NOT NULL AND text_column != "ABC"; +-- result: +2 abc +3 ABD +4 This is Gin Index +-- !result +SELECT * FROM t_gin_index_multiple_predicate_none WHERE text_column <= "ABD" OR text_column >= "This is Gin Index"; +-- result: +1 ABC +2 abc +3 ABD +4 This is Gin Index +-- !result +SELECT * FROM t_gin_index_multiple_predicate_none WHERE text_column >= "AB" AND text_column <= "This is Gin Index"; +-- result: +1 ABC +3 ABD +4 This is Gin Index +-- !result +SELECT * FROM t_gin_index_multiple_predicate_none WHERE text_column IN ("ABC") OR text_column IN ("ABD"); +-- result: +1 ABC +3 ABD +-- !result +SELECT * FROM t_gin_index_multiple_predicate_none WHERE text_column IN ("ABC", "ABD") OR text_column IN ("abc"); +-- result: +1 ABC +2 abc +3 ABD +-- !result +SELECT * FROM t_gin_index_multiple_predicate_none WHERE text_column NOT IN ("ABC") OR text_column NOT IN ("ABD"); +-- result: +1 ABC +2 abc +3 ABD +4 This is Gin Index +-- !result +SELECT * FROM t_gin_index_multiple_predicate_none WHERE text_column LIKE "ABC" OR text_column LIKE "ABD"; +-- result: +1 ABC +3 ABD +-- !result +SELECT * FROM t_gin_index_multiple_predicate_none WHERE text_column LIKE "AB%" AND text_column LIKE "%C"; +-- result: +1 ABC +-- !result +SELECT * FROM t_gin_index_multiple_predicate_none WHERE text_column match "ABC" OR text_column match "ABD"; +-- result: +[REGEX].*Match can only used as a pushdown predicate on column with GIN in a single query.* +-- !result +SELECT * FROM t_gin_index_multiple_predicate_none WHERE text_column match "AB%" AND text_column match "%C"; +-- result: +1 ABC +-- !result +DROP TABLE t_gin_index_multiple_predicate_none; +-- result: +-- !result +-- name: test_gin_index_multiple_predicate_english +ADMIN SET FRONTEND CONFIG("enable_experimental_gin" = "true"); +-- result: +-- !result +set low_cardinality_optimize_v2 = false; +-- result: +-- !result +set cbo_enable_low_cardinality_optimize = false; +-- result: +-- !result +CREATE TABLE `t_gin_index_multiple_predicate_english` ( + `id1` bigint(20) NOT NULL COMMENT "", + `text_column` varchar(255) NULL COMMENT "", + INDEX gin_english (`text_column`) USING GIN ("parser" = "english") COMMENT 'english index' +) ENGINE=OLAP +DUPLICATE KEY(`id1`) +DISTRIBUTED BY HASH(`id1`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "false", +"compression" = "LZ4" +); +-- result: +-- !result +INSERT INTO t_gin_index_multiple_predicate_english VALUES +(1, "ABC"), +(2, "abc"), +(3, "ABD"), +(4, "This is Gin Index"), +(5, NULL); +-- result: +-- !result +SELECT * FROM t_gin_index_multiple_predicate_english WHERE text_column match "this" OR text_column match "abc"; +-- result: +[REGEX].*Match can only used as a pushdown predicate on column with GIN in a single query.* +-- !result +SELECT * FROM t_gin_index_multiple_predicate_english WHERE text_column not match "this" AND text_column not match "abc"; +-- result: +3 ABD +5 None +-- !result +SELECT * FROM t_gin_index_multiple_predicate_english WHERE text_column LIKE "this" OR text_column LIKE "abc"; +-- result: +2 abc +-- !result +SELECT * FROM t_gin_index_multiple_predicate_english WHERE text_column LIKE "%this%" AND text_column LIKE "%is%"; +-- result: +-- !result +DROP TABLE t_gin_index_multiple_predicate_english; +-- result: +-- !result +-- name: test_gin_index_compaction +ADMIN SET FRONTEND CONFIG("enable_experimental_gin" = "true"); +-- result: +-- !result +set low_cardinality_optimize_v2 = false; +-- result: +-- !result +set cbo_enable_low_cardinality_optimize = false; +-- result: +-- !result +CREATE TABLE `t_gin_index_compaction_none_base` ( + `id1` bigint(20) NOT NULL COMMENT "", + `text_column` varchar(255) NULL COMMENT "", + INDEX gin_none (`text_column`) USING GIN ("parser" = "none") COMMENT 'whole line index' +) ENGINE=OLAP +DUPLICATE KEY(`id1`) +DISTRIBUTED BY HASH(`id1`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "false", +"compression" = "LZ4" +); +-- result: +-- !result +INSERT INTO t_gin_index_compaction_none_base VALUES (1, "abc"); +-- result: +-- !result +INSERT INTO t_gin_index_compaction_none_base VALUES (2, "ABC"); +-- result: +-- !result +INSERT INTO t_gin_index_compaction_none_base VALUES (3, "bcd"); +-- result: +-- !result +INSERT INTO t_gin_index_compaction_none_base VALUES (4, "BCD"); +-- result: +-- !result +ALTER TABLE t_gin_index_compaction_none_base BASE COMPACT; +-- result: +-- !result +CREATE TABLE `t_gin_index_compaction_none_cumu` ( + `id1` bigint(20) NOT NULL COMMENT "", + `text_column` varchar(255) NULL COMMENT "", + INDEX gin_none (`text_column`) USING GIN ("parser" = "none") COMMENT 'whole line index' +) ENGINE=OLAP +DUPLICATE KEY(`id1`) +DISTRIBUTED BY HASH(`id1`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "false", +"compression" = "LZ4" +); +-- result: +-- !result +INSERT INTO t_gin_index_compaction_none_cumu VALUES (1, "abc"); +-- result: +-- !result +INSERT INTO t_gin_index_compaction_none_cumu VALUES (2, "ABC"); +-- result: +-- !result +INSERT INTO t_gin_index_compaction_none_cumu VALUES (3, "bcd"); +-- result: +-- !result +INSERT INTO t_gin_index_compaction_none_cumu VALUES (4, "BCD"); +-- result: +-- !result +ALTER TABLE t_gin_index_compaction_none_cumu CUMULATIVE COMPACT; +-- result: +-- !result +SELECT sleep(10); +-- result: +1 +-- !result +SELECT * FROM t_gin_index_compaction_none_base; +-- result: +1 abc +2 ABC +3 bcd +4 BCD +-- !result +SELECT * FROM t_gin_index_compaction_none_cumu; +-- result: +1 abc +2 ABC +3 bcd +4 BCD +-- !result +DROP TABLE t_gin_index_compaction_none_base; +-- result: +-- !result +DROP TABLE t_gin_index_compaction_none_cumu; +-- result: +-- !result +CREATE TABLE `t_gin_index_compaction_english_base` ( + `id1` bigint(20) NOT NULL COMMENT "", + `text_column` varchar(255) NULL COMMENT "", + INDEX gin_english (`text_column`) USING GIN ("parser" = "english") COMMENT 'english index' +) ENGINE=OLAP +DUPLICATE KEY(`id1`) +DISTRIBUTED BY HASH(`id1`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "false", +"compression" = "LZ4" +); +-- result: +-- !result +INSERT INTO t_gin_index_compaction_english_base VALUES (1, "This is Gin Index"); +-- result: +-- !result +INSERT INTO t_gin_index_compaction_english_base VALUES (2, "This is Not Gin Index"); +-- result: +-- !result +INSERT INTO t_gin_index_compaction_english_base VALUES (3, "Gin Index"); +-- result: +-- !result +ALTER TABLE t_gin_index_compaction_english_base BASE COMPACT; +-- result: +-- !result +CREATE TABLE `t_gin_index_compaction_english_cumu` ( + `id1` bigint(20) NOT NULL COMMENT "", + `text_column` varchar(255) NULL COMMENT "", + INDEX gin_english (`text_column`) USING GIN ("parser" = "english") COMMENT 'english index' +) ENGINE=OLAP +DUPLICATE KEY(`id1`) +DISTRIBUTED BY HASH(`id1`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "false", +"compression" = "LZ4" +); +-- result: +-- !result +INSERT INTO t_gin_index_compaction_english_cumu VALUES (1, "This is Gin Index"); +-- result: +-- !result +INSERT INTO t_gin_index_compaction_english_cumu VALUES (2, "This is Not Gin Index"); +-- result: +-- !result +INSERT INTO t_gin_index_compaction_english_cumu VALUES (3, "Gin Index"); +-- result: +-- !result +ALTER TABLE t_gin_index_compaction_english_cumu CUMULATIVE COMPACT; +-- result: +-- !result +SELECT sleep(10); +-- result: +1 +-- !result +SELECT * FROM t_gin_index_compaction_english_base; +-- result: +1 This is Gin Index +2 This is Not Gin Index +3 Gin Index +-- !result +SELECT * FROM t_gin_index_compaction_english_cumu; +-- result: +1 This is Gin Index +2 This is Not Gin Index +3 Gin Index +-- !result +DROP TABLE t_gin_index_compaction_english_base; +-- result: +-- !result +DROP TABLE t_gin_index_compaction_english_cumu; +-- result: +-- !result +-- name: test_gin_index_type +ADMIN SET FRONTEND CONFIG("enable_experimental_gin" = "true"); +-- result: +-- !result +set low_cardinality_optimize_v2 = false; +-- result: +-- !result +set cbo_enable_low_cardinality_optimize = false; +-- result: +-- !result +CREATE TABLE `t_gin_index_type_1` ( + `id1` bigint(20) NOT NULL COMMENT "", + `test_column` varchar(255) NULL COMMENT "", + INDEX gin_none (`test_column`) USING GIN ("parser" = "none") COMMENT 'whole line index' +) ENGINE=OLAP +DUPLICATE KEY(`id1`) +DISTRIBUTED BY HASH(`id1`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "false", +"compression" = "LZ4" +); +-- result: +-- !result +CREATE TABLE `t_gin_index_type_2` ( + `id1` bigint(20) NOT NULL COMMENT "", + `test_column` String NULL COMMENT "", + INDEX gin_none (`test_column`) USING GIN ("parser" = "none") COMMENT 'whole line index' +) ENGINE=OLAP +DUPLICATE KEY(`id1`) +DISTRIBUTED BY HASH(`id1`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "false", +"compression" = "LZ4" +); +-- result: +-- !result +CREATE TABLE `t_gin_index_type_3` ( + `id1` bigint(20) NOT NULL COMMENT "", + `test_column` CHAR NULL COMMENT "", + INDEX gin_none (`test_column`) USING GIN ("parser" = "none") COMMENT 'whole line index' +) ENGINE=OLAP +DUPLICATE KEY(`id1`) +DISTRIBUTED BY HASH(`id1`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "false", +"compression" = "LZ4" +); +-- result: +-- !result +CREATE TABLE `t_gin_index_type_4` ( + `id1` bigint(20) NOT NULL COMMENT "", + `test_column` BIGINT NULL COMMENT "", + INDEX gin_none (`test_column`) USING GIN ("parser" = "none") COMMENT 'whole line index' +) ENGINE=OLAP +DUPLICATE KEY(`id1`) +DISTRIBUTED BY HASH(`id1`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "false", +"compression" = "LZ4" +); +-- result: +E: (1064, 'Getting analyzing error. Detail message: The inverted index can only be build on column with type of CHAR/STRING/VARCHAR type..') +-- !result +CREATE TABLE `t_gin_index_type_5` ( + `id1` bigint(20) NOT NULL COMMENT "", + `test_column` DOUBLE NULL COMMENT "", + INDEX gin_none (`test_column`) USING GIN ("parser" = "none") COMMENT 'whole line index' +) ENGINE=OLAP +DUPLICATE KEY(`id1`) +DISTRIBUTED BY HASH(`id1`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "false", +"compression" = "LZ4" +); +-- result: +E: (1064, 'Getting analyzing error. Detail message: The inverted index can only be build on column with type of CHAR/STRING/VARCHAR type..') +-- !result +CREATE TABLE `t_gin_index_type_6` ( + `id1` bigint(20) NOT NULL COMMENT "", + `test_column` DATETIME NULL COMMENT "", + INDEX gin_none (`test_column`) USING GIN ("parser" = "none") COMMENT 'whole line index' +) ENGINE=OLAP +DUPLICATE KEY(`id1`) +DISTRIBUTED BY HASH(`id1`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "false", +"compression" = "LZ4" +); +-- result: +E: (1064, 'Getting analyzing error. Detail message: The inverted index can only be build on column with type of CHAR/STRING/VARCHAR type..') +-- !result +CREATE TABLE `t_gin_index_type_7` ( + `id1` bigint(20) NOT NULL COMMENT "", + `test_column` DATE NULL COMMENT "", + INDEX gin_none (`test_column`) USING GIN ("parser" = "none") COMMENT 'whole line index' +) ENGINE=OLAP +DUPLICATE KEY(`id1`) +DISTRIBUTED BY HASH(`id1`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "false", +"compression" = "LZ4" +); +-- result: +E: (1064, 'Getting analyzing error. Detail message: The inverted index can only be build on column with type of CHAR/STRING/VARCHAR type..') +-- !result +-- name: test_clone_for_gin +ADMIN SET FRONTEND CONFIG("enable_experimental_gin" = "true"); +-- result: +-- !result +set low_cardinality_optimize_v2 = false; +-- result: +-- !result +set cbo_enable_low_cardinality_optimize = false; +-- result: +-- !result +CREATE TABLE `t_clone_for_gin` ( + `id1` bigint(20) NOT NULL COMMENT "", + `text_column_1` varchar(255) NULL COMMENT "", + `text_column_2` varchar(255) NULL COMMENT "", + `text_column_3` varchar(255) NULL COMMENT "", + `text_column_4` varchar(255) NULL COMMENT "", + INDEX gin_none_1 (`text_column_1`) USING GIN ("parser" = "none") COMMENT 'whole line index', + INDEX gin_none_2 (`text_column_2`) USING BITMAP, + INDEX gin_none_3 (`text_column_3`) USING GIN ("parser" = "none") COMMENT 'whole line index', + INDEX gin_none_4 (`text_column_4`) USING BITMAP +) ENGINE=OLAP +DUPLICATE KEY(`id1`) +DISTRIBUTED BY HASH(`id1`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "2", +"enable_persistent_index" = "false", +"replicated_storage" = "false", +"compression" = "LZ4" +); +-- result: +-- !result +INSERT INTO t_clone_for_gin VALUES (1, "abc","abc","abc","abc"),(2, "ABC","ABC","ABC","ABC"); +-- result: +-- !result +SELECT * FROM t_clone_for_gin ORDER BY id1; +-- result: +1 abc abc abc abc +2 ABC ABC ABC ABC +-- !result +function: set_first_tablet_bad_and_recover("t_clone_for_gin") +-- result: +None +-- !result +SELECT * FROM t_clone_for_gin ORDER BY id1; +-- result: +1 abc abc abc abc +2 ABC ABC ABC ABC +-- !result +-- name: test_complex_predicate_for_gin +ADMIN SET FRONTEND CONFIG("enable_experimental_gin" = "true"); +-- result: +-- !result +set low_cardinality_optimize_v2 = false; +-- result: +-- !result +set cbo_enable_low_cardinality_optimize = false; +-- result: +-- !result +CREATE TABLE `t_complex_predicate_for_gin_none` ( + `id1` bigint(20) NOT NULL COMMENT "", + `text_column` varchar(255) NULL COMMENT "", + INDEX gin_none (`text_column`) USING GIN ("parser" = "none") COMMENT '' +) ENGINE=OLAP +DUPLICATE KEY(`id1`) +DISTRIBUTED BY HASH(`id1`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "false", +"compression" = "LZ4" +); +-- result: +-- !result +CREATE TABLE `t_complex_predicate_for_gin_english` ( + `id1` bigint(20) NOT NULL COMMENT "", + `text_column` varchar(255) NULL COMMENT "", + INDEX gin_english (`text_column`) USING GIN ("parser" = "english") COMMENT '' +) ENGINE=OLAP +DUPLICATE KEY(`id1`) +DISTRIBUTED BY HASH(`id1`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "false", +"compression" = "LZ4" +); +-- result: +-- !result +INSERT INTO t_complex_predicate_for_gin_none VALUES (1, "abc cbd"); +-- result: +-- !result +INSERT INTO t_complex_predicate_for_gin_none VALUES (2, "cbd edf"); +-- result: +-- !result +SELECT * FROM t_complex_predicate_for_gin_none WHERE upper(text_column) LIKE "%CBD%"; +-- result: +1 abc cbd +2 cbd edf +-- !result +SELECT * FROM t_complex_predicate_for_gin_none WHERE lower(text_column) LIKE "%cbd%"; +-- result: +1 abc cbd +2 cbd edf +-- !result +SELECT * FROM t_complex_predicate_for_gin_none WHERE CAST(id1 as STRING) LIKE "abc"; +-- result: +-- !result +SELECT * FROM t_complex_predicate_for_gin_none WHERE CAST(id1 as STRING) LIKE "%abc%"; +-- result: +-- !result +SELECT * FROM t_complex_predicate_for_gin_none WHERE text_column LIKE CONCAT("ab", "c"); +-- result: +-- !result +SELECT * FROM t_complex_predicate_for_gin_none WHERE text_column LIKE CONCAT("ab", "%"); +-- result: +1 abc cbd +-- !result +SELECT * FROM t_complex_predicate_for_gin_none WHERE upper(text_column) match "%CBD%"; +-- result: +E: (1064, 'Getting analyzing error. Detail message: left operand of MATCH must be column ref.') +-- !result +SELECT * FROM t_complex_predicate_for_gin_none WHERE lower(text_column) match "%cbd%"; +-- result: +E: (1064, 'Getting analyzing error. Detail message: left operand of MATCH must be column ref.') +-- !result +SELECT * FROM t_complex_predicate_for_gin_none WHERE CAST(id1 as STRING) match "abc"; +-- result: +E: (1064, 'Getting analyzing error. Detail message: left operand of MATCH must be column ref.') +-- !result +SELECT * FROM t_complex_predicate_for_gin_none WHERE CAST(id1 as STRING) match "%abc%"; +-- result: +E: (1064, 'Getting analyzing error. Detail message: left operand of MATCH must be column ref.') +-- !result +SELECT * FROM t_complex_predicate_for_gin_none WHERE text_column match CONCAT("ab", "c"); +-- result: +E: (1064, 'Getting analyzing error. Detail message: right operand of MATCH must be of type StringLiteral with NOT NULL.') +-- !result +SELECT * FROM t_complex_predicate_for_gin_none WHERE text_column match CONCAT("ab", "%"); +-- result: +E: (1064, 'Getting analyzing error. Detail message: right operand of MATCH must be of type StringLiteral with NOT NULL.') +-- !result +SELECT * FROM t_complex_predicate_for_gin_none WHERE text_column match ""; +-- result: +-- !result +SELECT * FROM t_complex_predicate_for_gin_none WHERE text_column match NULL; +-- result: +E: (1064, 'Getting analyzing error. Detail message: right operand of MATCH must be of type StringLiteral with NOT NULL.') +-- !result +INSERT INTO t_complex_predicate_for_gin_english VALUES (1, "abc cbd"); +-- result: +-- !result +INSERT INTO t_complex_predicate_for_gin_english VALUES (2, "cbd edf"); +-- result: +-- !result +SELECT * FROM t_complex_predicate_for_gin_english WHERE upper(text_column) LIKE "%CBD%"; +-- result: +1 abc cbd +2 cbd edf +-- !result +SELECT * FROM t_complex_predicate_for_gin_english WHERE lower(text_column) LIKE "%cbd%"; +-- result: +1 abc cbd +2 cbd edf +-- !result +SELECT * FROM t_complex_predicate_for_gin_english WHERE CAST(id1 as STRING) LIKE "abc"; +-- result: +-- !result +SELECT * FROM t_complex_predicate_for_gin_english WHERE CAST(id1 as STRING) LIKE "%abc%"; +-- result: +-- !result +SELECT * FROM t_complex_predicate_for_gin_english WHERE text_column LIKE CONCAT("ab", "c"); +-- result: +-- !result +SELECT * FROM t_complex_predicate_for_gin_english WHERE text_column LIKE CONCAT("ab", "%"); +-- result: +1 abc cbd +-- !result +SELECT * FROM t_complex_predicate_for_gin_english WHERE upper(text_column) match "%CBD%"; +-- result: +E: (1064, 'Getting analyzing error. Detail message: left operand of MATCH must be column ref.') +-- !result +SELECT * FROM t_complex_predicate_for_gin_english WHERE lower(text_column) match "%cbd%"; +-- result: +E: (1064, 'Getting analyzing error. Detail message: left operand of MATCH must be column ref.') +-- !result +SELECT * FROM t_complex_predicate_for_gin_english WHERE CAST(id1 as STRING) match "abc"; +-- result: +E: (1064, 'Getting analyzing error. Detail message: left operand of MATCH must be column ref.') +-- !result +SELECT * FROM t_complex_predicate_for_gin_english WHERE CAST(id1 as STRING) match "%abc%"; +-- result: +E: (1064, 'Getting analyzing error. Detail message: left operand of MATCH must be column ref.') +-- !result +SELECT * FROM t_complex_predicate_for_gin_english WHERE text_column match CONCAT("ab", "c"); +-- result: +E: (1064, 'Getting analyzing error. Detail message: right operand of MATCH must be of type StringLiteral with NOT NULL.') +-- !result +SELECT * FROM t_complex_predicate_for_gin_english WHERE text_column match CONCAT("ab", "%"); +-- result: +E: (1064, 'Getting analyzing error. Detail message: right operand of MATCH must be of type StringLiteral with NOT NULL.') +-- !result +SELECT * FROM t_complex_predicate_for_gin_english WHERE text_column match ""; +-- result: +-- !result +SELECT * FROM t_complex_predicate_for_gin_none WHERE text_column match NULL; +-- result: +E: (1064, 'Getting analyzing error. Detail message: right operand of MATCH must be of type StringLiteral with NOT NULL.') +-- !result +DROP TABLE t_complex_predicate_for_gin_none; +-- result: +-- !result +DROP TABLE t_complex_predicate_for_gin_english; +-- result: +-- !result +-- name: test_delete_and_column_prune +ADMIN SET FRONTEND CONFIG("enable_experimental_gin" = "true"); +-- result: +-- !result +CREATE TABLE `t_delete_and_column_prune` ( + `id1` bigint(20) NOT NULL COMMENT "", + `text_column` varchar(255) NULL COMMENT "", + INDEX gin_none (`text_column`) USING GIN ("parser" = "english") COMMENT 'whole line index' +) ENGINE=OLAP +DUPLICATE KEY(`id1`) +DISTRIBUTED BY HASH(`id1`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "false", +"compression" = "LZ4" +); +-- result: +-- !result +INSERT INTO t_delete_and_column_prune VALUES (1, "b"),(2, "b"),(3, "b"); +-- result: +-- !result +SELECT id1 FROM t_delete_and_column_prune WHERE text_column MATCH "b"; +-- result: +1 +2 +3 +-- !result +DELETE FROM t_delete_and_column_prune WHERE id1 = 2; +-- result: +-- !result +SELECT id1 FROM t_delete_and_column_prune WHERE text_column MATCH "b"; +-- result: +1 +3 +-- !result +DROP TABLE t_delete_and_column_prune; +-- result: +-- !result +-- name: test_upper_case_column_name +ADMIN SET FRONTEND CONFIG("enable_experimental_gin" = "true"); +-- result: +-- !result +set low_cardinality_optimize_v2 = false; +-- result: +-- !result +set cbo_enable_low_cardinality_optimize = false; +-- result: +-- !result +CREATE TABLE `t_upper_case_column_name` ( + `id1` bigint(20) NOT NULL COMMENT "", + `TeXt` varchar(255) NULL COMMENT "", + INDEX gin_none (`TeXt`) USING GIN ("parser" = "english") COMMENT 'whole line index' +) ENGINE=OLAP +DUPLICATE KEY(`id1`) +DISTRIBUTED BY HASH(`id1`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "false", +"compression" = "LZ4" +); +-- result: +-- !result +INSERT INTO t_upper_case_column_name VALUES (1, "b"),(2, "b"),(3, "b"); +-- result: +-- !result +SELECT id1 FROM t_upper_case_column_name WHERE `TeXt` MATCH "b"; +-- result: +1 +2 +3 +-- !result +SELECT id1 FROM t_upper_case_column_name WHERE `Text` MATCH "b"; +-- result: +1 +2 +3 +-- !result +SELECT id1 FROM t_upper_case_column_name WHERE `TEXT` MATCH "b"; +-- result: +1 +2 +3 +-- !result +SELECT id1 FROM t_upper_case_column_name WHERE `text` MATCH "b"; +-- result: +1 +2 +3 +-- !result +DROP TABLE t_upper_case_column_name; +-- result: +-- !result +-- name: test_alter_replicated_storage +ADMIN SET FRONTEND CONFIG("enable_experimental_gin" = "true"); +-- result: +-- !result +CREATE TABLE `t_alter_replicated_storage` ( + `id` bigint(20) NOT NULL COMMENT "", + `text` varchar(255) NULL COMMENT "", + INDEX gin_none (`text`) USING GIN ("parser" = "english") +) ENGINE=OLAP +DUPLICATE KEY(`id`) +DISTRIBUTED BY HASH(`id`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "false", +"compression" = "LZ4" +); +-- result: +-- !result +ALTER TABLE t_alter_replicated_storage SET ("replicated_storage" = "true"); +-- result: +E: (5507, 'Getting analyzing error. Detail message: Can not enable replicated storage when the table has GIN.') +-- !result +SHOW CREATE TABLE t_alter_replicated_storage; +-- result: +t_alter_replicated_storage CREATE TABLE `t_alter_replicated_storage` ( + `id` bigint(20) NOT NULL COMMENT "", + `text` varchar(255) NULL COMMENT "", + INDEX gin_none (`text`) USING GIN("imp_lib" = "clucene", "parser" = "english") COMMENT '' +) ENGINE=OLAP +DUPLICATE KEY(`id`) +DISTRIBUTED BY HASH(`id`) BUCKETS 1 +PROPERTIES ( +"compression" = "LZ4", +"fast_schema_evolution" = "true", +"replicated_storage" = "false", +"replication_num" = "1" +); +-- !result +DROP TABLE t_alter_replicated_storage; +-- result: +-- !result +-- name: test_disable_global_dict_rewrite +ADMIN SET FRONTEND CONFIG("enable_experimental_gin" = "true"); +-- result: +-- !result +CREATE TABLE `t_disable_global_dict_rewrite` ( + `id` bigint(20) NOT NULL COMMENT "", + `v1` varchar(255) NULL COMMENT "", + `v2` varchar(255) NULL COMMENT "", + INDEX gin_none (`v1`) USING GIN ("parser" = "english") +) ENGINE=OLAP +DUPLICATE KEY(`id`) +DISTRIBUTED BY HASH(`id`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "false", +"compression" = "LZ4" +); +-- result: +-- !result +INSERT INTO t_disable_global_dict_rewrite VALUES (1, "abc", "bcd"), (2, "cbd", "dbs"); +-- result: +-- !result +SELECT * FROM t_disable_global_dict_rewrite; +-- result: +1 abc bcd +2 cbd dbs +-- !result +SELECT SLEEP(5); +-- result: +1 +-- !result +SELECT id FROM t_disable_global_dict_rewrite WHERE v1 MATCH "abc" AND v1 = "abc"; +-- result: +1 +-- !result +SELECT id FROM t_disable_global_dict_rewrite WHERE v1 MATCH "abc" AND v1 LIKE "%abc%"; +-- result: +1 +-- !result +SELECT id FROM t_disable_global_dict_rewrite WHERE v1 MATCH "abc" AND v1 IN ("abc", "ab"); +-- result: +1 +-- !result +SELECT id FROM t_disable_global_dict_rewrite WHERE upper(v2) = "BCD" AND v1 MATCH "abc"; +-- result: +1 +-- !result +SELECT id FROM t_disable_global_dict_rewrite WHERE upper(v2) = "BCD" AND v1 MATCH "abc"; +-- result: +1 +-- !result +SELECT id FROM t_disable_global_dict_rewrite WHERE upper(v2) = "BCD" AND v1 MATCH "abc"; +-- result: +1 +-- !result +SELECT sum(id) FROM t_disable_global_dict_rewrite WHERE v1 MATCH "abc" GROUP BY v1; +-- result: +1 +-- !result +SELECT sum(id) FROM t_disable_global_dict_rewrite WHERE v2 MATCH "abc" GROUP BY v2; +-- result: +[REGEX].*Match can only used as a pushdown predicate on column with GIN in a single query.* +-- !result +SELECT id FROM t_disable_global_dict_rewrite WHERE v1 MATCH "abc" ORDER BY v1; +-- result: +1 +-- !result +SELECT id FROM t_disable_global_dict_rewrite WHERE v2 MATCH "abc" ORDER BY v2; +-- result: +[REGEX].*Match can only used as a pushdown predicate on column with GIN in a single query.* +-- !result +DROP TABLE t_disable_global_dict_rewrite; +-- result: +-- !result +-- name: test_create_mv_with_match +ADMIN SET FRONTEND CONFIG("enable_experimental_gin" = "true"); +-- result: +-- !result +CREATE TABLE `t_create_mv_with_match` ( + `id` bigint(20) NOT NULL COMMENT "", + `v1` varchar(255) NULL COMMENT "", + `v2` varchar(255) NULL COMMENT "", + INDEX gin_none (`v1`) USING GIN ("parser" = "english") +) ENGINE=OLAP +DUPLICATE KEY(`id`) +DISTRIBUTED BY HASH(`id`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "false", +"compression" = "LZ4" +); +-- result: +-- !result +INSERT INTO t_create_mv_with_match VALUES (1, "abc", "bcd"); +-- result: +-- !result +CREATE MATERIALIZED VIEW mv AS SELECT id, v1, v2 FROM t_create_mv_with_match WHERE v1 MATCH "abc"; +-- result: +-- !result +function: wait_materialized_view_cancel() +-- result: +None +-- !result +DROP TABLE t_create_mv_with_match; +-- result: +-- !result +-- name: test_alter_gin_col_into_other_type +ADMIN SET FRONTEND CONFIG("enable_experimental_gin" = "true"); +-- result: +-- !result +CREATE TABLE `t_alter_gin_col_into_other_type` ( + `id` bigint(20) NOT NULL COMMENT "", + `v1` varchar(255) NULL COMMENT "", + INDEX gin_none (`v1`) USING GIN ("parser" = "english") +) ENGINE=OLAP +DUPLICATE KEY(`id`) +DISTRIBUTED BY HASH(`id`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "false", +"compression" = "LZ4" +); +-- result: +-- !result +ALTER TABLE t_alter_gin_col_into_other_type MODIFY COLUMN v1 BIGINT; +-- result: +E: (1064, 'Cannot modify a column with GIN into non-string type') +-- !result +INSERT INTO t_alter_gin_col_into_other_type VALUES (1, "abc"); +-- result: +-- !result +ALTER TABLE t_alter_gin_col_into_other_type MODIFY COLUMN v1 VARCHAR(2000); +-- result: +-- !result +function: wait_alter_table_finish() +-- result: +None +-- !result +SHOW CREATE TABLE t_alter_gin_col_into_other_type; +-- result: +t_alter_gin_col_into_other_type CREATE TABLE `t_alter_gin_col_into_other_type` ( + `id` bigint(20) NOT NULL COMMENT "", + `v1` varchar(2000) NULL COMMENT "", + INDEX gin_none (`v1`) USING GIN("imp_lib" = "clucene", "parser" = "english") COMMENT '' +) ENGINE=OLAP +DUPLICATE KEY(`id`) +DISTRIBUTED BY HASH(`id`) BUCKETS 1 +PROPERTIES ( +"compression" = "LZ4", +"fast_schema_evolution" = "true", +"replicated_storage" = "false", +"replication_num" = "1" +); +-- !result +SELECT * FROM t_alter_gin_col_into_other_type; +-- result: +1 abc +-- !result +DROP TABLE t_alter_gin_col_into_other_type; +-- result: +-- !result +-- name: test_gin_var @sequential +ADMIN SET FRONTEND CONFIG("enable_experimental_gin" = "true"); +-- result: +-- !result +CREATE TABLE `t_gin_var` ( + `id` bigint(20) NOT NULL COMMENT "", + `v1` varchar(255) NULL COMMENT "", + INDEX gin_none (`v1`) USING GIN ("parser" = "standard") +) ENGINE=OLAP +DUPLICATE KEY(`id`) +DISTRIBUTED BY HASH(`id`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "false", +"compression" = "LZ4" +); +-- result: +-- !result +INSERT INTO t_gin_var VALUES (1, "abc bcd"); +-- result: +-- !result +SELECT * FROM t_gin_var WHERE v1 MATCH "abc"; +-- result: +1 abc bcd +-- !result +SET enable_gin_filter = false; +-- result: +-- !result +SELECT * FROM t_gin_var WHERE v1 MATCH "abc"; +-- result: +[REGEX].*Match can only used as a pushdown predicate on column with GIN in a single query.* +-- !result +SET enable_gin_filter = true; +-- result: +-- !result +SELECT * FROM t_gin_var WHERE v1 MATCH "abc"; +-- result: +1 abc bcd +-- !result +DROP TABLE t_gin_var; +-- result: +-- !result +ADMIN SET FRONTEND CONFIG("enable_experimental_gin" = "false"); +-- result: +-- !result +CREATE TABLE `t_gin_var` ( + `id` bigint(20) NOT NULL COMMENT "", + `v1` varchar(255) NULL COMMENT "", + INDEX gin_none (`v1`) USING GIN ("parser" = "standard") +) ENGINE=OLAP +DUPLICATE KEY(`id`) +DISTRIBUTED BY HASH(`id`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "false", +"compression" = "LZ4" +); +-- result: +E: (1064, 'Getting analyzing error. Detail message: The inverted index is disabled, enable it by setting FE config `enable_experimental_gin` to true.') +-- !result +CREATE TABLE `t_gin_var` ( + `id` bigint(20) NOT NULL COMMENT "", + `v1` varchar(255) NULL COMMENT "" +) ENGINE=OLAP +DUPLICATE KEY(`id`) +DISTRIBUTED BY HASH(`id`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "false", +"compression" = "LZ4" +); +-- result: +-- !result +ALTER TABLE t_gin_var add index idx (v1) USING GIN('parser' = 'standard'); +-- result: +E: (1064, 'Getting analyzing error. Detail message: The inverted index is disabled, enable it by setting FE config `enable_experimental_gin` to true.') +-- !result +ADMIN SET FRONTEND CONFIG("enable_experimental_gin" = "true"); +-- result: +-- !result +DROP TABLE t_gin_var; +-- result: +-- !result +CREATE TABLE `t_gin_var` ( + `id` bigint(20) NOT NULL COMMENT "", + `v1` varchar(255) NULL COMMENT "", + INDEX gin_none (`v1`) USING GIN ("parser" = "standard") +) ENGINE=OLAP +DUPLICATE KEY(`id`) +DISTRIBUTED BY HASH(`id`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "false", +"compression" = "LZ4" +); +-- result: +-- !result +DROP TABLE t_gin_var; +-- result: +-- !result +-- name: test_gin_match_empty +CREATE TABLE `t_gin_match_empty` ( + `k` BIGINT NOT NULL COMMENT "", + `v1` string COMMENT "", + `v2` string COMMENT "", + `v3` string COMMENT "", + INDEX idx1 (v1) USING GIN ('parser' = 'english'), + INDEX idx2 (v2) USING GIN ('parser' = 'chinese'), + INDEX idx3 (v3) USING GIN ('parser' = 'standard') +) ENGINE=OLAP +DUPLICATE KEY(`k`) +DISTRIBUTED BY HASH(`k`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"in_memory" = "false", +"enable_persistent_index" = "false", +"replicated_storage" = "false" +); +-- result: +-- !result +insert into t_gin_match_empty values (1, "中文50中文", "中文50中文", "中文50中文"); +-- result: +-- !result +SELECT count(*) FROM t_gin_match_empty WHERE v1 MATCH ""; +-- result: +0 +-- !result +SELECT count(*) FROM t_gin_match_empty WHERE v2 MATCH ""; +-- result: +0 +-- !result +SELECT count(*) FROM t_gin_match_empty WHERE v3 MATCH ""; +-- result: +0 +-- !result +DROP TABLE t_gin_match_empty; +-- result: +-- !result +-- name: test_gin_view +ADMIN SET FRONTEND CONFIG("enable_experimental_gin" = "true"); +-- result: +-- !result +CREATE TABLE `t_gin_view` ( + `id` bigint(20) NOT NULL COMMENT "", + `v1` varchar(255) NULL COMMENT "", + INDEX gin_none (`v1`) USING GIN ("parser" = "english") +) ENGINE=OLAP +DUPLICATE KEY(`id`) +DISTRIBUTED BY HASH(`id`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "false", +"compression" = "LZ4" +); +-- result: +-- !result +INSERT INTO t_gin_view VALUES (1, "abd bcd"); +-- result: +-- !result +SELECT * FROM t_gin_view; +-- result: +1 abd bcd +-- !result +CREATE VIEW test_view1 (column1, column2) AS SELECT * FROM t_gin_view; +-- result: +-- !result +SELECT * FROM test_view1 WHERE column2 MATCH "abd" AND column2 MATCH "bcd"; +-- result: +1 abd bcd +-- !result +SELECT * FROM test_view1 WHERE column2 MATCH "abd" AND column2 MATCH "bcd"; +-- result: +1 abd bcd +-- !result +SELECT * FROM test_view1 WHERE column2 MATCH "abd" AND column2 MATCH "bcd"; +-- result: +1 abd bcd +-- !result +SELECT * FROM test_view1 WHERE column2 MATCH "abd" AND column2 MATCH "bcd"; +-- result: +1 abd bcd +-- !result +SELECT * FROM test_view1 WHERE column2 MATCH "abd" AND column2 MATCH "bcd"; +-- result: +1 abd bcd +-- !result +SELECT * FROM test_view1 WHERE column2 MATCH "abd" AND column2 MATCH "bcd"; +-- result: +1 abd bcd +-- !result +SELECT * FROM test_view1 WHERE column2 MATCH "abd" AND column2 MATCH "bcd"; +-- result: +1 abd bcd +-- !result +SELECT * FROM test_view1 WHERE column2 MATCH "abd" AND column2 MATCH "bcd"; +-- result: +1 abd bcd +-- !result +SELECT * FROM test_view1 WHERE column2 MATCH "abd" AND column2 MATCH "bcd"; +-- result: +1 abd bcd +-- !result +SELECT * FROM test_view1 WHERE column2 MATCH "abd" AND column2 MATCH "bcd"; +-- result: +1 abd bcd +-- !result +DROP TABLE t_gin_view; +-- result: +-- !result +CREATE TABLE duplicate_table_demo_datatype_not_replicated_all_varchar ( AAA DATETIME not NULL COMMENT "", BBB VARCHAR(200) not NULL COMMENT "", CCC VARCHAR(200) not NULL COMMENT "", DDD VARCHAR(20000) COMMENT "", EEE LARGEINT NULL COMMENT "", FFF DECIMAL(20,10) NULL COMMENT "", GGG VARCHAR(200) NULL COMMENT "", HHH FLOAT NULL COMMENT "", III BOOLEAN NULL COMMENT "", KKK CHAR(20) NULL COMMENT "", LLL STRING NULL COMMENT "", MMM VARCHAR(20) NULL COMMENT "", NNN BINARY NULL COMMENT "", OOO TINYINT NULL COMMENT "", PPP DATETIME NULL COMMENT "", QQQ ARRAY NULL COMMENT "", RRR JSON NULL COMMENT "", SSS MAP NULL COMMENT "", TTT STRUCT NULL COMMENT "", INDEX init_bitmap_index (KKK) USING BITMAP ) duplicate KEY(AAA, BBB, CCC) PARTITION BY RANGE (`AAA`) ( START ("1970-01-01") END ("2030-01-01") EVERY (INTERVAL 30 YEAR) ) DISTRIBUTED BY HASH(`AAA`, `BBB`) BUCKETS 3 ORDER BY(`AAA`,`BBB`,`CCC`,`DDD`) PROPERTIES ( "replicated_storage"="false", "replication_num" = "1", "storage_format" = "v2", "enable_persistent_index" = "true", "bloom_filter_columns" = "MMM", "unique_constraints" = "GGG" ); +-- result: +-- !result +create view test_view (AAA, DDD) as select AAA, max(DDD) from duplicate_table_demo_datatype_not_replicated_all_varchar group by AAA; +-- result: +-- !result +CREATE INDEX idx ON duplicate_table_demo_datatype_not_replicated_all_varchar(DDD) USING GIN('parser' = 'english'); +-- result: +-- !result +function: wait_alter_table_finish() +-- result: +None +-- !result +insert into duplicate_table_demo_datatype_not_replicated_all_varchar values ('1974-08-20 23:13:25', 'xIjfSXnegdnZiZGQMaxo', 'syHwIOMctmDLDGCibEun', 'hIbilUEGdLbCnaZASCVL', 6299, 25361.52081, 'QuTsacRyxiIkBjEmjhNu', -11.4812925061712, True, 'QcLRdQJMhtPXojJUjkUd', 'yUeFlbzomaPDwKeaHylx', 'WqQyGEjEYpvLzfBXYUCB', '', 8, '2015-11-03 16:31:47', [2621, 5950, 13171], '{"job": "Administrator, Civil Service", "company": "Morris-Anderson", "ssn": "823-67-5554", "residence": "59688 Hanna Shoal Apt. 586\nWest Waynefort, CO 69652", "current_location": ["-64.3777465", "21.079566"], "blood_group": "O-", "website": ["http://young.biz/", "https://cobb-bell.com/", "http://www.roberts-garrison.com/", "http://jones.com/"], "username": "howardarcher", "name": "John Mccullough", "sex": "M", "address": "1361 Susan Mountain\nJasonbury, MI 85084", "mail": "lovejennifer@gmail.com", "birthdate": "1928-06-25"}', null, null); +-- result: +-- !result +select * from test_view where DDD match 'msrjabmbwkxmjggulkiy'; +-- result: +[REGEX].*Match can only used as a pushdown predicate on column with GIN in a single query.* +-- !result +select * from test_view where DDD match 'msrjabmbwkxmjggulkiy'; +-- result: +[REGEX].*Match can only used as a pushdown predicate on column with GIN in a single query.* +-- !result +select * from test_view where DDD match 'msrjabmbwkxmjggulkiy'; +-- result: +[REGEX].*Match can only used as a pushdown predicate on column with GIN in a single query.* +-- !result +select * from test_view where DDD match 'msrjabmbwkxmjggulkiy'; +-- result: +[REGEX].*Match can only used as a pushdown predicate on column with GIN in a single query.* +-- !result +select * from test_view where DDD match 'msrjabmbwkxmjggulkiy'; +-- result: +[REGEX].*Match can only used as a pushdown predicate on column with GIN in a single query.* +-- !result +select * from test_view where DDD match 'msrjabmbwkxmjggulkiy'; +-- result: +[REGEX].*Match can only used as a pushdown predicate on column with GIN in a single query.* +-- !result +select * from test_view where DDD match 'msrjabmbwkxmjggulkiy'; +-- result: +[REGEX].*Match can only used as a pushdown predicate on column with GIN in a single query.* +-- !result +select * from test_view where DDD match 'msrjabmbwkxmjggulkiy'; +-- result: +[REGEX].*Match can only used as a pushdown predicate on column with GIN in a single query.* +-- !result +select * from test_view where DDD match 'msrjabmbwkxmjggulkiy'; +-- result: +[REGEX].*Match can only used as a pushdown predicate on column with GIN in a single query.* +-- !result +select * from test_view where DDD match 'msrjabmbwkxmjggulkiy'; +-- result: +[REGEX].*Match can only used as a pushdown predicate on column with GIN in a single query.* +-- !result +DROP VIEW test_view; +-- result: +-- !result +DROP TABLE duplicate_table_demo_datatype_not_replicated_all_varchar; +-- result: +-- !result +-- name: test_vertical_compaction +ADMIN SET FRONTEND CONFIG("enable_experimental_gin" = "true"); +-- result: +-- !result +CREATE TABLE `t_vertical_compaction` ( + `id1` bigint(20) NOT NULL COMMENT "", + `text_column` varchar(255) NULL COMMENT "", + `col1` varchar(255) DEFAULT "ABC" COMMENT "", + `col2` varchar(255) DEFAULT "ABC" COMMENT "", + `col3` varchar(255) DEFAULT "ABC" COMMENT "", + `col4` varchar(255) DEFAULT "ABC" COMMENT "", + `col5` varchar(255) DEFAULT "ABC" COMMENT "", + `col6` varchar(255) DEFAULT "ABC" COMMENT "", + `col7` varchar(255) DEFAULT "ABC" COMMENT "", + `col8` varchar(255) DEFAULT "ABC" COMMENT "", + `col9` varchar(255) DEFAULT "ABC" COMMENT "", + `col10` varchar(255) DEFAULT "ABC" COMMENT "", + INDEX gin_none (`text_column`) USING GIN ("parser" = "none") COMMENT 'whole line index' +) ENGINE=OLAP +DUPLICATE KEY(`id1`) +DISTRIBUTED BY HASH(`id1`) BUCKETS 1 +PROPERTIES ( +"replication_num" = "1", +"enable_persistent_index" = "false", +"replicated_storage" = "false", +"compression" = "LZ4" +); +-- result: +-- !result +INSERT INTO t_vertical_compaction (id1, text_column) VALUES (1, "abc"); +-- result: +-- !result +INSERT INTO t_vertical_compaction (id1, text_column) VALUES (2, "ABC"); +-- result: +-- !result +INSERT INTO t_vertical_compaction (id1, text_column) VALUES (3, "bcd"); +-- result: +-- !result +INSERT INTO t_vertical_compaction (id1, text_column) VALUES (4, "BCD"); +-- result: +-- !result +ALTER TABLE t_vertical_compaction BASE COMPACT; +-- result: +-- !result +SELECT sleep(10); +-- result: +1 +-- !result +DROP TABLE t_vertical_compaction; +-- result: +-- !result \ No newline at end of file diff --git a/test/sql/test_json/R/to_json b/test/sql/test_json/R/to_json index c88e9d3815b120..18dfe61d23b0ea 100644 --- a/test/sql/test_json/R/to_json +++ b/test/sql/test_json/R/to_json @@ -33,4 +33,16 @@ SELECT to_json(row(1, 1)); SELECT to_json(NULL); -- result: None --- !result \ No newline at end of file +<<<<<<< HEAD +-- !result +======= +-- !result +select /*+SET_VAR(sql_mode='ONLY_FULL_GROUP_BY,ALLOW_THROW_EXCEPTION')*/ to_json(map{null:null}); +-- result: +[REGEX].*key of Map should not be null.* +-- !result +select /*+SET_VAR(sql_mode='ONLY_FULL_GROUP_BY')*/ to_json(map{null:null}); +-- result: +None +-- !result +>>>>>>> 52c55f9c2b ([BugFix] Fix duplicate entries in be_logs; Add reset_delvec in script; Add BE id in error message when query failed (#51204)) diff --git a/test/sql/test_string_functions/R/test_string_functions b/test/sql/test_string_functions/R/test_string_functions index 6c38d6d22a7d00..0fb4ffa740408b 100644 --- a/test/sql/test_string_functions/R/test_string_functions +++ b/test/sql/test_string_functions/R/test_string_functions @@ -262,4 +262,270 @@ https://docs.starrocks数据库.io/en-us/latest/quick_start/Deploy?name=快速 select url_decode(NULL); -- result: None +<<<<<<< HEAD +======= +-- !result +select substring_index("hello world", " ", 1); +-- result: +hello +-- !result +select substring_index("hello world", " ", 2); +-- result: +hello world +-- !result +select substring_index("hello world", " ", -1); +-- result: +world +-- !result +select substring_index("hello world", " ", -2); +-- result: +hello world +-- !result +select substring_index("com.mysql.com", ".", 1); +-- result: +com +-- !result +select substring_index("com.mysql.com", ".", 2); +-- result: +com.mysql +-- !result +select substring_index("com.mysql.com", ".", 3); +-- result: +com.mysql.com +-- !result +select substring_index("com.mysql.com", ".", -1); +-- result: +com +-- !result +select substring_index("com.mysql.com", ".", -2); +-- result: +mysql.com +-- !result +select substring_index("com.mysql.com", ".", -3); +-- result: +com.mysql.com +-- !result +create table t2(c0 varchar(20), c1 varchar(20)) + DUPLICATE KEY(c0) + DISTRIBUTED BY HASH(c0) + BUCKETS 1 + PROPERTIES('replication_num'='1'); +-- result: +-- !result +insert into t2 values ('hello world', 'com.mysql.com'); +-- result: +-- !result +select substring_index(c0, " ", 1) from t2; +-- result: +hello +-- !result +select substring_index(c0, " ", 2) from t2; +-- result: +hello world +-- !result +select substring_index(c0, " ", -1) from t2; +-- result: +world +-- !result +select substring_index(c0, " ", -2) from t2; +-- result: +hello world +-- !result +select substring_index(c1, ".", 1) from t2; +-- result: +com +-- !result +select substring_index(c1, ".", 2) from t2; +-- result: +com.mysql +-- !result +select substring_index(c1, ".", 3) from t2; +-- result: +com.mysql.com +-- !result +select substring_index(c1, ".", -1) from t2; +-- result: +com +-- !result +select substring_index(c1, ".", -2) from t2; +-- result: +mysql.com +-- !result +select substring_index(c1, ".", -3) from t2; +-- result: +com.mysql.com +-- !result +select crc32("starrocks"); +-- result: +2312449062 +-- !result +select crc32("STARROCKS"); +-- result: +3440849609 +-- !result +select crc32("中文"); +-- result: +1510599991 +-- !result +create table crc01(c0 varchar(20), c1 varchar(20), c2 varchar(20)) + DUPLICATE KEY(c0) + DISTRIBUTED BY HASH(c0) + BUCKETS 1 + PROPERTIES('replication_num'='1'); +-- result: +-- !result +insert into crc01 values ('hello world', 'com.mysql.com', "镜舟科技"); +-- result: +-- !result +select crc32(c0) from crc01; +-- result: +222957957 +-- !result +select crc32(c1) from crc01; +-- result: +2823441944 +-- !result +select crc32(c2) from crc01; +-- result: +2291122336 +-- !result +-- name: test_ngram_search +select ngram_search("chinese","china",4); +-- result: +0.5 +-- !result +select ngram_search("chinese","CHINESE",4); +-- result: +0.0 +-- !result +select ngram_search_case_insensitive("chinese","CHINESE",4); +-- result: +1.0 +-- !result +select ngram_search_case_insensitive("CHINESE","chinese",4); +-- result: +1.0 +-- !result +select ngram_search("chinese","",4); +-- result: +0.0 +-- !result +select ngram_search("","chinese",4); +-- result: +0.0 +-- !result +select ngram_search("","",4); +-- result: +0.0 +-- !result +select ngram_search("chi","chi",4); +-- result: +0.0 +-- !result +CREATE TABLE `string_table` ( + `rowkey` varchar(300) NOT NULL COMMENT "" +) ENGINE=OLAP +DUPLICATE KEY(`rowkey`) +COMMENT "OLAP" +DISTRIBUTED BY HASH(`rowkey`) BUCKETS 64 +PROPERTIES ( + "replication_num" = "1", + "bucket_size" = "4294967296", + "storage_volume" = "builtin_storage_volume", + "enable_persistent_index" = "false", + "compression" = "LZ4" +); +-- result: +-- !result +insert into string_table values +("000073a7-274f-46bf-bfaf-678868cc26cd"), +("e6249ba1-5b54-46bf-bfaf-89d69094b757"), +("93da4b36-5401-46bf-bfa7-2bde65779623"), +("2548c7aa-d94f-46bf-b0a4-d769f248cbb2"), +("1bd32347-274f-4a30-93f3-9087594de9cd"); +-- result: +-- !result +select ngram_search(rowkey,"000073a7-274f-46bf-bfaf-678868cc26cd",4) as a from string_table order by a; +-- result: +0.15151512622833252 +0.1818181872367859 +0.21212118864059448 +0.24242424964904785 +1.0 +-- !result +select ngram_search(rowkey, "e6249ba1-5b54-46bf-bfaf-89d69094b757",4) as a from string_table order by a; +-- result: +0.0 +0.12121212482452393 +0.1818181872367859 +0.24242424964904785 +1.0 +-- !result +select ngram_search("000073a7-274f-46bf-bfaf-678868cc26cd",rowkey,4) from string_table; +-- result: +E: (1064, "Getting analyzing error from line 1, column 7 to line 1, column 67. Detail message: ngram_search function 's second parameter and third parameter must be constant.") +-- !result +select ngram_search("chi","chi",0); +-- result: +[REGEX].*ngram search's third parameter must be a positive number.* +-- !result +select ngram_search("chi","chi",-1); +-- result: +[REGEX].*ngram search's third parameter must be a positive number.* +-- !result +select ngram_search(date('2020-06-23'), "2020", 4); +-- result: +E: (1064, "Getting analyzing error from line 1, column 7 to line 1, column 49. Detail message: ngram_search function 's first parameter and second parameter must be string type.") +-- !result +select sum(result) from ( select ngram_search("normal_string", "normal_string", 5) as result from ( select generate_series from TABLE(generate_series(0, 4097 - 1)) ) as t1) as t2; +-- result: +4097.0 +-- !result +-- name: test_ngram_search_with_low_cardinality +CREATE TABLE __row_util_1 ( + k1 bigint null +) ENGINE=OLAP +DUPLICATE KEY(`k1`) +DISTRIBUTED BY HASH(`k1`) BUCKETS 48 +PROPERTIES ( + "replication_num" = "1" +); +-- result: +-- !result +insert into __row_util_1 select generate_series from TABLE(generate_series(0, 5000)); +-- result: +-- !result +CREATE TABLE left_table ( + id int, + nation string, + exsit_hot_value int +) +ENGINE=olap +DUPLICATE KEY(id) +DISTRIBUTED BY HASH(id) buckets 32 +PROPERTIES ( + "replication_num" = "1" +); +-- result: +-- !result +insert into left_table +select + cast(rand() * 100000000 as int), + CASE + WHEN RAND() > 0.8 THEN 'china' + WHEN RAND() > 0.6 THEN 'usa' + WHEN RAND() > 0.4 THEN 'russian' + WHEN RAND() > 0.2 THEN 'canada' + ELSE 'japan' + END, + case when RAND() > 0.99 THEN k1 + ELSE k1 % 5 + END +from __row_util_1; +-- result: +-- !result +select sum(c0) > 500 from (select ngram_search(nation, 'china', 4) as c0 from left_table)t0; +-- result: +1 +>>>>>>> 52c55f9c2b ([BugFix] Fix duplicate entries in be_logs; Add reset_delvec in script; Add BE id in error message when query failed (#51204)) -- !result \ No newline at end of file diff --git a/test/sql/test_trino_dialect/R/test_trino_dialect b/test/sql/test_trino_dialect/R/test_trino_dialect index d5a7ccfb2093c9..11c121c8b08eb9 100644 --- a/test/sql/test_trino_dialect/R/test_trino_dialect +++ b/test/sql/test_trino_dialect/R/test_trino_dialect @@ -50,11 +50,11 @@ None None -- !result select c2['not-existed'] from map_array_tbl order by c1; -- result: -E: (1064, "Key not present in map: 'not-existed'") +[REGEX].*Key not present in map: 'not-existed'.* -- !result select c3[100] from map_array_tbl order by c1; -- result: -E: (1064, 'Array subscript must be less than or equal to array length: 100 > 1') +[REGEX].*Array subscript must be less than or equal to array length: 100 > 1.* -- !result select element_at(c2, 'not-existed'), element_at(c3, 100) from map_array_tbl order by c1; -- result: diff --git a/test/sql/test_udf/R/test_jvm_udf b/test/sql/test_udf/R/test_jvm_udf index 53039e414b98bd..274e0241c3ebab 100644 --- a/test/sql/test_udf/R/test_jvm_udf +++ b/test/sql/test_udf/R/test_jvm_udf @@ -73,7 +73,7 @@ select count(udtfstring) from t0, udtfstring(c1); -- !result select count(udtfstring_wrong_match) from t0, udtfstring_wrong_match(c1); -- result: -E: (1064, 'Type not matched, expect class java.lang.Integer, but got class java.lang.String') +[REGEX].*Type not matched, expect class java.lang.Integer, but got class java.lang.String.* -- !result select count(udtfint) from t0, udtfint(c1); -- result: