diff --git a/README.md b/README.md index 4236edfd90aeb82838ff0016a53fcf76dccecfef..73d47eee16304722a5c128def3c8dde833b06663 100644 --- a/README.md +++ b/README.md @@ -34,6 +34,13 @@ Gitee 是 OSCHINA 推出的基于 Git 的代码托管平台(同时支持 SVN 2. 修改如涉及文档,需要同步在docs仓提交文档修改,插件相关文档入口: https://gitee.com/opengauss/docs/tree/master/content/zh/docs/Developerguide/dolphin-Extension.md 。注意添加SQL语法时,需要增加必要的示例。 3. 新增/修改的代码需要使用宏 DOLPHIN 进行控制,方便后续回合openGauss-server仓代码时,区分哪些是插件修改的代码,哪些是内核修改的代码。修改的代码通过宏的IF/ELSE分支保留原始代码。主要控制 ```.h/.cpp``` 文件, ```.y``` 文件不太好使用宏控制,可以不处理。 +### check用例编写规范 +1. check用例默认使用的数据库为contrib_regression数据库,B兼容类型。编写用例时无需自己手动创建B类型数据库。 +2. 建议通过schema的方式隔离不同用例间的结果影响。可参考现有用例的写法。 +3. 单个用例执行时间不宜太长,建议不超过10s,超过的应当考虑优化用例或进行拆分。 +4. 非必要不新增测试组,一个测试组可允许5~10个用例一起并行执行。 +5. 对于SELECT语句强烈建议增加order by子句,保证SELECT语句查询结果稳定。 + #### 特技 1. 使用 Readme\_XXX.md 来支持不同的语言,例如 Readme\_en.md, Readme\_zh.md diff --git a/contrib/dolphin/Makefile b/contrib/dolphin/Makefile index 2bd6f1571b79ac89296603496eeda7e1b49a315d..c31dd323b4a5bf103d3e17c9e2fab30a776c925d 100644 --- a/contrib/dolphin/Makefile +++ b/contrib/dolphin/Makefile @@ -132,7 +132,7 @@ ifdef MJDBC_TEST REGRESS+=b_proto_jdbc endif -REGRESS_OPTS = --dlpath=$(top_builddir)/src/test/regress -c 0 -d 1 --single_node -p ${p} --schedule=./parallel_schedule_dolphin${PART} --regconf=regress.conf -r 1 -n --keep_last_data=false --temp-config=./make_check_postgresql.conf +REGRESS_OPTS = --dlpath=$(top_builddir)/src/test/regress -c 0 -d 1 --single_node -p ${p} --schedule=./parallel_schedule_dolphin${PART} --regconf=regress.conf -r 1 -n --keep_last_data=false --temp-config=./make_check_postgresql.conf --dbcmpt=B export dp = $(shell expr $(p) + 3) export THIRD_PARTY_LIBS = $(with_3rd) diff --git a/contrib/dolphin/expected/alter_function_test/alter_function.out b/contrib/dolphin/expected/alter_function_test/alter_function.out old mode 100755 new mode 100644 index f1ebb93a876b1533d2a00a505f08ebcdd22f28f2..3a256079cd588a8935da2f050aa48635aee1d5a3 --- a/contrib/dolphin/expected/alter_function_test/alter_function.out +++ b/contrib/dolphin/expected/alter_function_test/alter_function.out @@ -1,7 +1,5 @@ -drop database if exists db_alter_func_1; -NOTICE: database "db_alter_func_1" does not exist, skipping -create database db_alter_func_1 dbcompatibility 'B'; -\c db_alter_func_1 +create schema db_alter_func_1; +set current_schema to 'db_alter_func_1'; CREATE FUNCTION f1 (s CHAR(20)) RETURNS int AS $$ select 1 $$ ; ALTER FUNCTION f1 (s char(20)) NO SQL; ALTER FUNCTION f1 (s char(20)) CONTAINS SQL; @@ -103,5 +101,12 @@ select * from t1; 1 (3 rows) -\c postgres -drop database db_alter_func_1; +drop schema db_alter_func_1 cascade; +NOTICE: drop cascades to 6 other objects +DETAIL: drop cascades to function f1(character) +drop cascades to function f3() +drop cascades to function f5(integer) +drop cascades to function f5(real) +drop cascades to function f5() +drop cascades to table t1 +reset current_schema; diff --git a/contrib/dolphin/expected/alter_function_test/alter_procedure.out b/contrib/dolphin/expected/alter_function_test/alter_procedure.out old mode 100755 new mode 100644 index 6fa9c59442251fc5efb8520631e743b017b10e8a..9b33face2cb8f7013b11d4de505cf6ebea51c8ed --- a/contrib/dolphin/expected/alter_function_test/alter_procedure.out +++ b/contrib/dolphin/expected/alter_function_test/alter_procedure.out @@ -1,7 +1,5 @@ -drop database if exists db_alter_func_2; -NOTICE: database "db_alter_func_2" does not exist, skipping -create database db_alter_func_2 dbcompatibility 'B'; -\c db_alter_func_2 +create schema db_alter_func_2; +set current_schema to 'db_alter_func_2'; CREATE OR REPLACE PROCEDURE proc1() AS BEGIN @@ -64,5 +62,6 @@ ERROR: function "proc1" already exists with same argument types -- 修改不存在的存储过程 ALTER PROCEDURE proc2 READS SQL DATA; ERROR: function proc2 does not exist -\c postgres -drop database db_alter_func_2; +drop schema db_alter_func_2 cascade; +NOTICE: drop cascades to function proc1() +reset current_schema; diff --git a/contrib/dolphin/expected/alter_function_test/language_sql.out b/contrib/dolphin/expected/alter_function_test/language_sql.out index ed911fc79b1400d1f025482448712ffa78eafcf0..99635ead62dd204d5cbe1b7ce6c00d7719d0f3fa 100644 --- a/contrib/dolphin/expected/alter_function_test/language_sql.out +++ b/contrib/dolphin/expected/alter_function_test/language_sql.out @@ -1,7 +1,5 @@ -drop database if exists db_alter_func_sql; -NOTICE: database "db_alter_func_sql" does not exist, skipping -create database db_alter_func_sql dbcompatibility 'B'; -\c db_alter_func_sql +create schema db_alter_func_sql; +set current_schema to 'db_alter_func_sql'; -- test func CREATE FUNCTION f1 (s CHAR(20)) RETURNS int AS $$ begin @@ -194,5 +192,16 @@ call pro_3(1,2,'a'); (1 row) -\c postgres -drop database db_alter_func_sql; +drop schema db_alter_func_sql cascade; +NOTICE: drop cascades to 10 other objects +DETAIL: drop cascades to function f1(character) +drop cascades to function func(integer) +drop cascades to function proc1(integer) +drop cascades to table language_1093039 +drop cascades to function fun_1(integer,integer,character varying) +drop cascades to function fun_2(integer,integer,character varying) +drop cascades to function fun_3(integer,integer,character varying) +drop cascades to function pro_1(integer,integer,character varying) +drop cascades to function pro_2(integer,integer,character varying) +drop cascades to function pro_3(integer,integer,character varying) +reset current_schema; diff --git a/contrib/dolphin/expected/ansi_quotes_start.out b/contrib/dolphin/expected/ansi_quotes_start.out index bde70e0aa5f70b2617758ed9f7707612764dc96e..8855fe711118c95566eb2e46618f981500d422ae 100644 --- a/contrib/dolphin/expected/ansi_quotes_start.out +++ b/contrib/dolphin/expected/ansi_quotes_start.out @@ -1,2 +1 @@ -CREATE DATABASE test_ansi_quotes DBCOMPATIBILITY 'B'; -\c test_ansi_quotes +create schema test_ansi_quotes; diff --git a/contrib/dolphin/expected/ansi_quotes_test.out b/contrib/dolphin/expected/ansi_quotes_test.out index 6aa888e21cdafbf9d5e532d23136dd54efbb1290..09f9a93764ecc6fc623b32fcf651bbc9b3703288 100644 --- a/contrib/dolphin/expected/ansi_quotes_test.out +++ b/contrib/dolphin/expected/ansi_quotes_test.out @@ -1,4 +1,4 @@ -\c test_ansi_quotes +set current_schema to 'test_ansi_quotes'; CREATE TABLE test_quotes (a text); show dolphin.sql_mode; dolphin.sql_mode @@ -67,27 +67,23 @@ desc test_quotes_2; -- test show show tables; - Tables_in_public -------------------------------- - index_statistic - pg_type_nonstrict_basic_value + Tables_in_test_ansi_quotes +---------------------------- test_quotes test_quotes_2 -(4 rows) +(2 rows) show full tables; - Tables_in_public | Table_type --------------------------------+------------ - index_statistic | VIEW - pg_type_nonstrict_basic_value | VIEW - test_quotes | BASE TABLE - test_quotes_2 | BASE TABLE -(4 rows) + Tables_in_test_ansi_quotes | Table_type +----------------------------+------------ + test_quotes | BASE TABLE + test_quotes_2 | BASE TABLE +(2 rows) show create table test_quotes_2; Table | Create Table ---------------+----------------------------------------- - test_quotes_2 | SET search_path = public; + + test_quotes_2 | SET search_path = test_ansi_quotes; + | CREATE TABLE test_quotes_2 ( + | a text + | ) + @@ -147,5 +143,8 @@ select * from test_quotes where a = "test1"; test1 (1 row) -\c postgres -DROP DATABASE test_ansi_quotes; +drop schema test_ansi_quotes cascade; +NOTICE: drop cascades to 2 other objects +DETAIL: drop cascades to table test_quotes +drop cascades to table test_quotes_2 +reset current_schema; diff --git a/contrib/dolphin/expected/any_value_test.out b/contrib/dolphin/expected/any_value_test.out index 4cf1549af8cdfe42c748fea50648ea57fde66158..33f3978b7fafd7f2c03c2647e2d670b702f56c77 100644 --- a/contrib/dolphin/expected/any_value_test.out +++ b/contrib/dolphin/expected/any_value_test.out @@ -1,7 +1,5 @@ -drop DATABASE if exists any_value_test; -NOTICE: database "any_value_test" does not exist, skipping -CREATE DATABASE any_value_test dbcompatibility 'B'; -\c any_value_test; +create schema any_value_test; +set current_schema to 'any_value_test'; --test int type create table test_int1(a tinyint, b int); create table test_int2(a smallint, b int); @@ -192,5 +190,26 @@ select any_value(c) from test_blob_bytea group by a; \xdabc (2 rows) -\c postgres; -drop DATABASE if exists any_value_test; +drop schema any_value_test cascade; +NOTICE: drop cascades to 20 other objects +DETAIL: drop cascades to table test_int1 +drop cascades to table test_int2 +drop cascades to table test_int4 +drop cascades to table test_int8 +drop cascades to table test_uint1 +drop cascades to table test_uint2 +drop cascades to table test_uint4 +drop cascades to table test_uint8 +drop cascades to table test_float +drop cascades to table test_double +drop cascades to table test_numeric +drop cascades to table test_char +drop cascades to table test_varchar +drop cascades to table test_text +drop cascades to table test_date +drop cascades to table test_bool +drop cascades to table test_year +drop cascades to type test_set_a_set +drop cascades to table test_set +drop cascades to table test_blob_bytea +reset current_schema; diff --git a/contrib/dolphin/expected/ast.out b/contrib/dolphin/expected/ast.out index 1cec80f946417fe668a3be4a910a87a57ffa412b..b14aa1cbb9580c9d1926ead06b7edadce849a227 100644 --- a/contrib/dolphin/expected/ast.out +++ b/contrib/dolphin/expected/ast.out @@ -1,7 +1,5 @@ -drop database if exists ast_test; -NOTICE: database "ast_test" does not exist, skipping -create database ast_test dbcompatibility 'b'; -\c ast_test +create schema ast_test; +set current_schema to 'ast_test'; ast select * from test; ast create table test(id int); ast create table test(id int(5)); @@ -13,5 +11,5 @@ ast select 1;select 1; (1 row) ast select 1;ast select 1; -\c postgres -drop database ast_test; +drop schema ast_test cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/b_comments.out b/contrib/dolphin/expected/b_comments.out index ed49c110fd59580aeb3cc7a1290925056e1e236e..24c21c171a80fa9e07721f008b403f048f50e981 100644 --- a/contrib/dolphin/expected/b_comments.out +++ b/contrib/dolphin/expected/b_comments.out @@ -1,4 +1,6 @@ /* unsupported */ +create database b_comments dbcompatibility 'A'; +\c b_comments create schema b_comments; set search_path to 'b_comments'; create table test_row(a int not null comment 'test_row.a'); @@ -51,8 +53,8 @@ NOTICE: drop cascades to 3 other objects DETAIL: drop cascades to table test_alter drop cascades to function test_alter_function(integer) drop cascades to function test_alter_procedure(integer,integer) -create database b_comments dbcompatibility 'B'; -\c b_comments +\c contrib_regression +drop database b_comments; create schema b_comments; set search_path to 'b_comments'; /* unsupported */ @@ -267,5 +269,3 @@ drop cascades to table fvt_distribute_query_tables_02 drop cascades to table t_comment_0032 drop cascades to table t_comment_0034 reset search_path; -\c postgres -drop database b_comments; diff --git a/contrib/dolphin/expected/b_do_statment.out b/contrib/dolphin/expected/b_do_statment.out index 1f805197fd009d54035fc8df2ce44bd767b26d02..932fcfb17bc5c4d85acfbcbc9c5e7df6289e25a8 100644 --- a/contrib/dolphin/expected/b_do_statment.out +++ b/contrib/dolphin/expected/b_do_statment.out @@ -1,5 +1,5 @@ -create database db_do_stmt dbcompatibility = 'B'; -\c db_do_stmt +create schema db_do_stmt; +set current_schema to 'db_do_stmt'; create table t1 (a int); insert into t1 values(1),(4),(7); select a from t1; @@ -57,5 +57,6 @@ do sin(a) from t1; ERROR: syntax error at or near "from" LINE 1: do sin(a) from t1; ^ -\c regress -\connect: FATAL: database "regress" does not exist +drop schema db_do_stmt cascade; +NOTICE: drop cascades to table t1 +reset current_schema; diff --git a/contrib/dolphin/expected/bit_count.out b/contrib/dolphin/expected/bit_count.out index 28bcb7c0e69ba9f3c55b1e0983f2d6427586ff4d..939e3a3f7d0cbedfa53ef61f0401d62855ae3cb4 100644 --- a/contrib/dolphin/expected/bit_count.out +++ b/contrib/dolphin/expected/bit_count.out @@ -1,7 +1,5 @@ -drop database if exists test_bit_count; -NOTICE: database "test_bit_count" does not exist, skipping -create database test_bit_count dbcompatibility 'b'; -\c test_bit_count +create schema test_bit_count; +set current_schema to 'test_bit_count'; -- 测试数字,字符串,二进制输入 SELECT bit_count(29); bit_count @@ -204,5 +202,5 @@ select bit_count(b'1000000011111111111111111111111111111111111111111111111111111 64 (1 row) -\c postgres -drop database test_bit_count; +drop schema test_bit_count cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/builtin_funcs/b_compatibility_time_funcs.out b/contrib/dolphin/expected/builtin_funcs/b_compatibility_time_funcs.out index f403d370fae98dc71100231ebbbc08f4b10e20b1..5432253af6585e957f8ba586b0aa2654cb2f9947 100644 --- a/contrib/dolphin/expected/builtin_funcs/b_compatibility_time_funcs.out +++ b/contrib/dolphin/expected/builtin_funcs/b_compatibility_time_funcs.out @@ -1,10 +1,5 @@ --- --- Test All Time function under 'b' compatibility --- -drop database if exists b_time_funcs; -NOTICE: database "b_time_funcs" does not exist, skipping -create database b_time_funcs dbcompatibility 'b'; -\c b_time_funcs +create schema b_time_funcs; +set current_schema to 'b_time_funcs'; create table func_test(functionName varchar(256),result varchar(256)); truncate table func_test; -- makedate() @@ -488,5 +483,6 @@ select * from func_test; subdate('2022-01-01 01:01:01', interval 1.999 second) | Sat Jan 01 01:00:59.001 2022 --?.* -\c postgres -drop database if exists b_time_funcs; +drop schema b_time_funcs cascade; +NOTICE: drop cascades to table func_test +reset current_schema; diff --git a/contrib/dolphin/expected/builtin_funcs/b_compatibility_time_funcs2.out b/contrib/dolphin/expected/builtin_funcs/b_compatibility_time_funcs2.out index f3d1a682c6259c55adee78dfe54f5f18cae6784e..77e91a13d0299dd3bead19ecdb03bab01a02541b 100644 --- a/contrib/dolphin/expected/builtin_funcs/b_compatibility_time_funcs2.out +++ b/contrib/dolphin/expected/builtin_funcs/b_compatibility_time_funcs2.out @@ -2,10 +2,8 @@ -- Test Time functions(Stage 2) under 'b' compatibility -- Contains subtime()、timediff()、time()、time_format()、timestamp()、timestampadd() -- -drop database if exists b_time_funcs2; -NOTICE: database "b_time_funcs2" does not exist, skipping -create database b_time_funcs2 dbcompatibility 'b'; -\c b_time_funcs2 +create schema b_time_funcs2; +set current_schema to 'b_time_funcs2'; create table func_test2(functionName varchar(256),result varchar(256)); truncate table func_test2; -- SUBTIME() @@ -1039,5 +1037,6 @@ select * from func_test2; TIMESTAMPADD(SECOND,-0.001,'2022-07-27 00:00:00') | Tue Jul 26 23:59:59.999 2022 --?.* -\c postgres -drop database if exists b_time_funcs2; +drop schema b_time_funcs2 cascade; +NOTICE: drop cascades to table func_test2 +reset current_schema; diff --git a/contrib/dolphin/expected/builtin_funcs/b_compatibility_time_funcs3.out b/contrib/dolphin/expected/builtin_funcs/b_compatibility_time_funcs3.out index 7c440d2ad84628a9f6c4edbc959f426cb2346cbe..041de9286bc8acc7a943ac6bdf615f63be830dc5 100644 --- a/contrib/dolphin/expected/builtin_funcs/b_compatibility_time_funcs3.out +++ b/contrib/dolphin/expected/builtin_funcs/b_compatibility_time_funcs3.out @@ -2,10 +2,8 @@ -- Test Time functions(Stage 3) under 'b' compatibility -- Contains to_days(), to_seconds(), unix_timestamp(), utc_date(), utc_time()、timestampadd() -- -drop database if exists b_time_funcs3; -NOTICE: database "b_time_funcs3" does not exist, skipping -create database b_time_funcs3 dbcompatibility 'b'; -\c b_time_funcs3 +create schema b_time_funcs3; +set current_schema to 'b_time_funcs3'; create table func_test3(functionName varchar(256),result varchar(256)); truncate table func_test3; -- TO_DAYS() @@ -385,5 +383,6 @@ select * from func_test3; --? UTC_TIMESTAMP(6) | .* (101 rows) -\c postgres -drop database if exists b_time_funcs3; +drop schema b_time_funcs3 cascade; +NOTICE: drop cascades to table func_test3 +reset current_schema; diff --git a/contrib/dolphin/expected/builtin_funcs/between.out b/contrib/dolphin/expected/builtin_funcs/between.out index 3190cfcf4ee8cd2a7f400c1db1fd1e337a6016c8..1784827050e6dfa5689112f03af901ba6f778545 100644 --- a/contrib/dolphin/expected/builtin_funcs/between.out +++ b/contrib/dolphin/expected/builtin_funcs/between.out @@ -1,7 +1,5 @@ -drop database if exists db_between; -NOTICE: database "db_between" does not exist, skipping -create database db_between dbcompatibility 'B'; -\c db_between +create schema db_between; +set current_schema to 'db_between'; select 2 between 2 and 23; ?column? ---------- @@ -361,5 +359,6 @@ select distinct c_town from t_between_and_0023 where c_town between 'b' and 'nz' (5 rows) drop table t_between_and_0023; -\c postgres -drop database if exists db_between; +drop schema db_between cascade; +NOTICE: drop cascades to table t_between_and_0007 +reset current_schema; diff --git a/contrib/dolphin/expected/builtin_funcs/bin.out b/contrib/dolphin/expected/builtin_funcs/bin.out index c6ee11dfe8b3ac1c72125960b988b0470a97397d..dc950ce456ec5992eafb07b600f5b8fad67e4472 100644 --- a/contrib/dolphin/expected/builtin_funcs/bin.out +++ b/contrib/dolphin/expected/builtin_funcs/bin.out @@ -1,7 +1,5 @@ -drop database if exists db_bin; -NOTICE: database "db_bin" does not exist, skipping -create database db_bin dbcompatibility 'B'; -\c db_bin +create schema db_bin; +set current_schema to 'db_bin'; select bin(1); bin ----- @@ -62,5 +60,5 @@ select bin('测试'); 0 (1 row) -\c postgres -drop database if exists db_bin; +drop schema db_bin cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/builtin_funcs/cast.out b/contrib/dolphin/expected/builtin_funcs/cast.out index 49731db017285fd71526314cd6b9f0bc69272269..2678b09e48e5805b270cb49290c77841c156f8e4 100644 --- a/contrib/dolphin/expected/builtin_funcs/cast.out +++ b/contrib/dolphin/expected/builtin_funcs/cast.out @@ -1,7 +1,5 @@ -drop database if exists db_cast; -NOTICE: database "db_cast" does not exist, skipping -create database db_cast dbcompatibility 'B'; -\c db_cast +create schema db_cast; +set current_schema to 'db_cast'; select cast('$2'::money as unsigned); uint8 ------- @@ -38,5 +36,5 @@ select cast(cast('2022-11-10 18:03:20'::timestamp as unsigned) as timestamp); Fri Nov 11 02:03:20 2022 PST (1 row) -\c postgres -drop database if exists db_cast; +drop schema db_cast cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/builtin_funcs/char.out b/contrib/dolphin/expected/builtin_funcs/char.out index a12e795983ec069d41b4d593a697f7ffaf80ded7..76ed18f04def61d617ff3bcfc3ea6a97f30f7dd7 100644 --- a/contrib/dolphin/expected/builtin_funcs/char.out +++ b/contrib/dolphin/expected/builtin_funcs/char.out @@ -1,7 +1,5 @@ -drop database if exists db_char; -NOTICE: database "db_char" does not exist, skipping -create database db_char dbcompatibility 'B'; -\c db_char +create schema db_char; +set current_schema to 'db_char'; select char(67,66,67); char ------ @@ -128,5 +126,5 @@ select char('hiu158','测试',125.99,146); ~ (1 row) -\c postgres -drop database if exists db_char; +drop schema db_char cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/builtin_funcs/char_length.out b/contrib/dolphin/expected/builtin_funcs/char_length.out index fba0da928ceb82742085dd00d360b57950a028c1..f31c7e201ced48a4ec5d5efdf0461c0579f500b0 100644 --- a/contrib/dolphin/expected/builtin_funcs/char_length.out +++ b/contrib/dolphin/expected/builtin_funcs/char_length.out @@ -1,7 +1,5 @@ -drop database if exists db_char_length; -NOTICE: database "db_char_length" does not exist, skipping -create database db_char_length dbcompatibility 'B'; -\c db_char_length +create schema db_char_length; +set current_schema to 'db_char_length'; select char_length(1234); char_length ------------- @@ -92,8 +90,8 @@ select char_length(true); 1 (1 row) -\c postgres -drop database if exists db_char_length; +drop schema db_char_length cascade; +reset current_schema; drop database if exists db_char_length_gbk; NOTICE: database "db_char_length_gbk" does not exist, skipping create database db_char_length_gbk dbcompatibility 'B' encoding 'GBK' LC_CTYPE 'zh_CN.gbk' lc_collate 'zh_CN.gbk'; diff --git a/contrib/dolphin/expected/builtin_funcs/character_length.out b/contrib/dolphin/expected/builtin_funcs/character_length.out index 718df4253940a90973d757c05d091fa40ec6f5b2..458d01ee0f9852a0daaaa5e81ae4b394eeaf261e 100644 --- a/contrib/dolphin/expected/builtin_funcs/character_length.out +++ b/contrib/dolphin/expected/builtin_funcs/character_length.out @@ -1,7 +1,5 @@ -drop database if exists db_character_length; -NOTICE: database "db_character_length" does not exist, skipping -create database db_character_length dbcompatibility 'B'; -\c db_character_length +create schema db_character_length; +set current_schema to 'db_character_length'; select character_length(1234); character_length ------------------ @@ -92,8 +90,8 @@ select character_length(true); 1 (1 row) -\c postgres -drop database if exists db_character_length; +drop schema db_character_length cascade; +reset current_schema; drop database if exists db_character_length_gbk; NOTICE: database "db_character_length_gbk" does not exist, skipping create database db_character_length_gbk dbcompatibility 'B' encoding 'GBK' LC_CTYPE 'zh_CN.gbk' lc_collate 'zh_CN.gbk'; diff --git a/contrib/dolphin/expected/builtin_funcs/conv.out b/contrib/dolphin/expected/builtin_funcs/conv.out index 50e1a6902c3df29e91b7b1ce55956dd1d4680246..ee937d5797c62e1aedf599f0eb17c9c8272f6b55 100644 --- a/contrib/dolphin/expected/builtin_funcs/conv.out +++ b/contrib/dolphin/expected/builtin_funcs/conv.out @@ -1,7 +1,5 @@ -drop database if exists db_conv; -NOTICE: database "db_conv" does not exist, skipping -create database db_conv dbcompatibility 'B'; -\c db_conv +create schema db_conv; +set current_schema to 'db_conv'; select conv('a',16,2); conv ------ @@ -504,5 +502,5 @@ select conv(-9544646155975628532428411,-10,-10); -9223372036854775808 (1 row) -\c postgres -drop database if exists db_conv; +drop schema db_conv cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/builtin_funcs/convert.out b/contrib/dolphin/expected/builtin_funcs/convert.out index 2354ea84a4df7228e96b5e171517d09ba019ddf1..29b4022f9fa224f93e1b4df27b6dce612dc60b95 100644 --- a/contrib/dolphin/expected/builtin_funcs/convert.out +++ b/contrib/dolphin/expected/builtin_funcs/convert.out @@ -1,7 +1,5 @@ -drop database if exists db_convert; -NOTICE: database "db_convert" does not exist, skipping -create database db_convert dbcompatibility 'B'; -\c db_convert +create schema db_convert; +set current_schema to 'db_convert'; select convert(1 using 'utf8'); convert --------- @@ -78,5 +76,5 @@ select convert(1 using decimal(10,3)); 1.000 (1 row) -\c postgres -drop database if exists db_convert; +drop schema db_convert cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/builtin_funcs/crc32.out b/contrib/dolphin/expected/builtin_funcs/crc32.out index b4467321145ef7a62ef3ac00e56eca9ec8788c8e..471dc2f59288b7f552c961195d33cd07996cb149 100644 --- a/contrib/dolphin/expected/builtin_funcs/crc32.out +++ b/contrib/dolphin/expected/builtin_funcs/crc32.out @@ -1,7 +1,5 @@ -drop database if exists db_crc32; -NOTICE: database "db_crc32" does not exist, skipping -create database db_crc32 dbcompatibility 'B'; -\c db_crc32 +create schema db_crc32; +set current_schema to 'db_crc32'; select crc32('abc'); crc32 ----------- @@ -26,5 +24,5 @@ select crc32(10),crc32(-3.1415926),crc32(1.339E5),crc32('ab57'),crc32('HAF47'); 2707236321 | 1632764266 | 2833135858 | 4076943245 | 4203314247 (1 row) -\c postgres -drop database if exists db_crc32; +drop schema db_crc32 cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/builtin_funcs/db_b_format.out b/contrib/dolphin/expected/builtin_funcs/db_b_format.out index a47df3a34c8bc086056ac037a8a7a2af674f62ab..e4268d76df5c25e36c063906dc9cb4c3ebf1ffff 100644 --- a/contrib/dolphin/expected/builtin_funcs/db_b_format.out +++ b/contrib/dolphin/expected/builtin_funcs/db_b_format.out @@ -1,7 +1,5 @@ -drop database if exists db_db_b_format; -NOTICE: database "db_db_b_format" does not exist, skipping -create database db_db_b_format dbcompatibility 'B'; -\c db_db_b_format +create schema db_db_b_format; +set current_schema to 'db_db_b_format'; -- test for b_compatibility_mode = false select format(1234.456, 2); format @@ -567,8 +565,8 @@ select format('%2$s, %1$s', variadic array[1, 2]); 2, 1 (1 row) -\c postgres -drop database db_db_b_format; +drop schema db_db_b_format cascade; +reset current_schema; -- test for A compatibility to ensure the original functionality is good. create database db_db_b_format dbcompatibility 'A'; \c db_db_b_format diff --git a/contrib/dolphin/expected/builtin_funcs/db_b_hex.out b/contrib/dolphin/expected/builtin_funcs/db_b_hex.out index 86fb7dc39e7f0acaff5bb60b593d31ba42c241a2..0aa71e6af6427b78eaa1d8fb4e0d3e2dbe33fdd6 100644 --- a/contrib/dolphin/expected/builtin_funcs/db_b_hex.out +++ b/contrib/dolphin/expected/builtin_funcs/db_b_hex.out @@ -1,7 +1,5 @@ -drop database if exists db_b_hex; -NOTICE: database "db_b_hex" does not exist, skipping -create database db_b_hex dbcompatibility 'B'; -\c db_b_hex +create schema db_b_hex; +set current_schema to 'db_b_hex'; select hex(int1(255)); ERROR: tinyint out of range CONTEXT: referenced column: to_number @@ -203,5 +201,6 @@ select hex(c1) from bytea_to_hex_test; deadbeef (1 row) -\c postgres -drop database if exists db_b_hex; +drop schema db_b_hex cascade; +NOTICE: drop cascades to table bytea_to_hex_test +reset current_schema; diff --git a/contrib/dolphin/expected/builtin_funcs/db_b_if.out b/contrib/dolphin/expected/builtin_funcs/db_b_if.out index 94e00fdcefb3b3315116a0b471992de54b35d2cd..7178b2cfeaaa591cd863af4417c302e371e3abe1 100644 --- a/contrib/dolphin/expected/builtin_funcs/db_b_if.out +++ b/contrib/dolphin/expected/builtin_funcs/db_b_if.out @@ -1,7 +1,5 @@ -drop database if exists db_b_if; -NOTICE: database "db_b_if" does not exist, skipping -create database db_b_if dbcompatibility 'B'; -\c db_b_if +create schema db_b_if; +set current_schema to 'db_b_if'; select if(TRUE, 1, 2); case ------ @@ -23,7 +21,7 @@ CONTEXT: referenced column: case -- '2022-01-30' is text, date '2022-01-30' is date CREATE VIEW test_view as select '2022-01-30' as text_type, date '2022-01-30' as date_type; \d+ test_view - View "public.test_view" + View "db_b_if.test_view" Column | Type | Modifiers | Storage | Description -----------+------+-----------+----------+------------- text_type | text | | extended | @@ -193,5 +191,6 @@ ERROR: CASE types boolean and numeric cannot be matched LINE 1: select if (true, 2.2::numeric(10, 2), true) as a, if (false,... ^ CONTEXT: referenced column: a -\c postgres -drop database if exists db_b_if; +drop schema db_b_if cascade; +NOTICE: drop cascades to view test_view +reset current_schema; diff --git a/contrib/dolphin/expected/builtin_funcs/elt.out b/contrib/dolphin/expected/builtin_funcs/elt.out index 77570ed73e423351b4c7a7823fada7deceff001a..fbb397c849c3e5bf8ce5406fd832f6d8a4d7652f 100644 --- a/contrib/dolphin/expected/builtin_funcs/elt.out +++ b/contrib/dolphin/expected/builtin_funcs/elt.out @@ -1,7 +1,5 @@ -drop database if exists db_elt; -NOTICE: database "db_elt" does not exist, skipping -create database db_elt dbcompatibility 'B'; -\c db_elt +create schema db_elt; +set current_schema to 'db_elt'; select elt(1,1); elt ----- @@ -110,5 +108,5 @@ select elt(1,'a',2); a (1 row) -\c postgres -drop database if exists db_elt; +drop schema db_elt cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/builtin_funcs/field.out b/contrib/dolphin/expected/builtin_funcs/field.out index a98cfe41271f3d72dad70f23d8892dc125fafc24..d35dbbf9bbf35523b0bbb9c5c77c266ab75de8cf 100644 --- a/contrib/dolphin/expected/builtin_funcs/field.out +++ b/contrib/dolphin/expected/builtin_funcs/field.out @@ -1,7 +1,5 @@ -drop database if exists db_field; -NOTICE: database "db_field" does not exist, skipping -create database db_field dbcompatibility 'B'; -\c db_field +create schema db_field; +set current_schema to 'db_field'; select field(4,1,2,3,4); field ------- @@ -95,5 +93,5 @@ select field(' ','@',null,' ','',' '); 3 (1 row) -\c postgres -drop database if exists db_field; +drop schema db_field cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/builtin_funcs/find_in_set.out b/contrib/dolphin/expected/builtin_funcs/find_in_set.out index e0c760a5b72cf2f949adb43b6c5cfff8ddc78efa..15c4f2674c667d8fc3f66f29ae3a5135d1bd11bf 100644 --- a/contrib/dolphin/expected/builtin_funcs/find_in_set.out +++ b/contrib/dolphin/expected/builtin_funcs/find_in_set.out @@ -1,7 +1,5 @@ -drop database if exists db_find_in_set; -NOTICE: database "db_find_in_set" does not exist, skipping -create database db_find_in_set dbcompatibility 'B'; -\c db_find_in_set +create schema db_find_in_set; +set current_schema to 'db_find_in_set'; select find_in_set(1,'a,1,c'); find_in_set ------------- @@ -92,5 +90,5 @@ select find_in_set(1.1,'a,1.2,c,qwee,1212,1.1'); 6 (1 row) -\c postgres -drop database if exists db_find_in_set; +drop schema db_find_in_set cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/builtin_funcs/make_set.out b/contrib/dolphin/expected/builtin_funcs/make_set.out index 02fa26bc7e66fee4696467a470506303f21602f6..ba4d2467d8b7cea3e65b7359980689ff8d40bf30 100644 --- a/contrib/dolphin/expected/builtin_funcs/make_set.out +++ b/contrib/dolphin/expected/builtin_funcs/make_set.out @@ -1,7 +1,5 @@ -drop database if exists make_set; -NOTICE: database "make_set" does not exist, skipping -create database make_set dbcompatibility 'b'; -\c make_set +create schema make_set; +set current_schema to 'make_set'; set dolphin.sql_mode = ''; select make_set(3, 'a', 'b', 'c'); make_set @@ -141,5 +139,5 @@ select make_set(3,01/02/03, false, true, false); 0.166666666666667,0 (1 row) -\c postgres -drop database make_set +drop schema make_set cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/builtin_funcs/not_between.out b/contrib/dolphin/expected/builtin_funcs/not_between.out index ae5fc133b0d932b76285ec28521938cf1a16b7ac..2d75cdb460cec2b1eaf8b7ca97980de21a11d3e0 100644 --- a/contrib/dolphin/expected/builtin_funcs/not_between.out +++ b/contrib/dolphin/expected/builtin_funcs/not_between.out @@ -1,7 +1,5 @@ -drop database if exists db_not_between; -NOTICE: database "db_not_between" does not exist, skipping -create database db_not_between dbcompatibility 'B'; -\c db_not_between +create schema db_not_between; +set current_schema to 'db_not_between'; select 2 not between 2 and 23; ?column? ---------- @@ -176,5 +174,5 @@ select 1 not between '1测' and '1'; f (1 row) -\c postgres -drop database if exists db_not_between; +drop schema db_not_between cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/builtin_funcs/soundex.out b/contrib/dolphin/expected/builtin_funcs/soundex.out index 45d3583cdc79bd289204f5f1af21f3b2a3f95e46..2fba6491361d7bdb9859ec1f6285cf650cc1634a 100644 --- a/contrib/dolphin/expected/builtin_funcs/soundex.out +++ b/contrib/dolphin/expected/builtin_funcs/soundex.out @@ -1,7 +1,5 @@ -drop database if exists db_soundex; -NOTICE: database "db_soundex" does not exist, skipping -create database db_soundex dbcompatibility 'B'; -\c db_soundex +create schema db_soundex; +set current_schema to 'db_soundex'; select soundex('abc'); soundex --------- @@ -236,5 +234,5 @@ select soundex('测T测h测试o测masёння я такая шчасліваяhe 测3524 (1 row) -\c postgres -drop database if exists db_soundex; +drop schema db_soundex cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/builtin_funcs/space.out b/contrib/dolphin/expected/builtin_funcs/space.out index 27b5e46e22766da69a1636cc5d0082de1c1a8323..0fd01b32cb1ede7c02b1596e3227f37422ed5c43 100644 --- a/contrib/dolphin/expected/builtin_funcs/space.out +++ b/contrib/dolphin/expected/builtin_funcs/space.out @@ -1,7 +1,5 @@ -drop database if exists db_space; -NOTICE: database "db_space" does not exist, skipping -create database db_space dbcompatibility 'B'; -\c db_space +create schema db_space; +set current_schema to 'db_space'; select space('a'); space ------- @@ -68,5 +66,5 @@ select space(b'111'); (1 row) -\c postgres -drop database if exists db_space; +drop schema db_space cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/connection_id.out b/contrib/dolphin/expected/connection_id.out index b333ca23c5c2cea78a9dd45a40407dc79c1ee210..2ae1a10c3fce39cd834d3f7ecb7b6db2a4f0cea0 100644 --- a/contrib/dolphin/expected/connection_id.out +++ b/contrib/dolphin/expected/connection_id.out @@ -1,7 +1,5 @@ -drop database if exists test_connection_id; -NOTICE: database "test_connection_id" does not exist, skipping -create database test_connection_id dbcompatibility 'b'; -\c test_connection_id +create schema test_connection_id; +set current_schema to 'test_connection_id'; -- 测试返回连接的ID SELECT CONNECTION_ID(); connection_id @@ -9,5 +7,5 @@ SELECT CONNECTION_ID(); --? (1 row) -\c postgres -drop database test_connection_id; +drop schema test_connection_id cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/conv_cast_test.out b/contrib/dolphin/expected/conv_cast_test.out index 71c76f6027184b48473c672b42ec0ce93a94e1d9..7d9d3615b4929b61f1c58aa34c43cbaf94ff7734 100644 --- a/contrib/dolphin/expected/conv_cast_test.out +++ b/contrib/dolphin/expected/conv_cast_test.out @@ -1,9 +1,5 @@ --- b compatibility case -drop database if exists conv_cast_test; -NOTICE: database "conv_cast_test" does not exist, skipping --- create database conv_cast_test dbcompatibility 'b'; -create database conv_cast_test with DBCOMPATIBILITY = 'B'; -\c conv_cast_test +create schema conv_cast_test; +set current_schema to 'conv_cast_test'; select conv(-211111111111111111111111111111111111111111111111111111111177777,10,8); conv @@ -404,5 +400,5 @@ select cast(b'11111111111111111111111111111111111111111111111111111111111111111' (1 row) -\c postgres -drop database conv_cast_test; +drop schema conv_cast_test cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/create_function_test/call_function.out b/contrib/dolphin/expected/create_function_test/call_function.out old mode 100755 new mode 100644 index fd471e32eaeb5ddf9f7c293c8aa5251765476fd1..a879a18cfccacbcdcd896d1950e5313ab2d9333e --- a/contrib/dolphin/expected/create_function_test/call_function.out +++ b/contrib/dolphin/expected/create_function_test/call_function.out @@ -1,7 +1,5 @@ -drop database if exists db_func_call1; -NOTICE: database "db_func_call1" does not exist, skipping -create database db_func_call1 dbcompatibility 'B'; -\c db_func_call1 +create schema db_func_call1; +set current_schema to 'db_func_call1'; CREATE FUNCTION f1 (s CHAR(20)) RETURNS int CONTAINS SQL AS $$ select 1 $$ ; call f1('a'); f1 @@ -98,5 +96,11 @@ call f_3(); A (1 row) -\c postgres -drop database if exists db_func_call1; +drop schema db_func_call1 cascade; +NOTICE: drop cascades to 5 other objects +DETAIL: drop cascades to table tb_object0015 +drop cascades to table tb_object0015_01 +drop cascades to function tri_1() +drop cascades to function f_2() +drop cascades to function f_3() +reset current_schema; diff --git a/contrib/dolphin/expected/create_function_test/commentsharp.out b/contrib/dolphin/expected/create_function_test/commentsharp.out index 65fac3f68dfbf188bb3cef41f3a07e364ed89aab..2a55f3628779e7bc3d186af799fa587d9fbe49f2 100644 --- a/contrib/dolphin/expected/create_function_test/commentsharp.out +++ b/contrib/dolphin/expected/create_function_test/commentsharp.out @@ -1,7 +1,5 @@ -drop database if exists db_comment_sharp; -NOTICE: database "db_comment_sharp" does not exist, skipping -create database db_comment_sharp dbcompatibility 'B'; -\c db_comment_sharp +create schema db_comment_sharp; +set current_schema to 'db_comment_sharp'; create table t1(a int,b int); insert into t1 values(10,11); insert into t1 values(12,13); @@ -185,5 +183,14 @@ LINE 3: for each row# drop table t_test; drop table t_test2; drop table t_test3; -\c postgres -drop database if exists db_comment_sharp; +drop schema db_comment_sharp cascade; +NOTICE: drop cascades to 8 other objects +DETAIL: drop cascades to table t1 +drop cascades to table "t1#t2" +drop cascades to table t3 +drop cascades to table t4 +drop cascades to table test +drop cascades to function testfunc3() +drop cascades to function testfunc2() +drop cascades to function testfunc() +reset current_schema; diff --git a/contrib/dolphin/expected/create_function_test/deterministic.out b/contrib/dolphin/expected/create_function_test/deterministic.out old mode 100755 new mode 100644 index 3d4e27de72cc6c7694d7e93979ca9200284c32fd..f4b20d97148e7b763c6a4bffd1976adb92626f0b --- a/contrib/dolphin/expected/create_function_test/deterministic.out +++ b/contrib/dolphin/expected/create_function_test/deterministic.out @@ -1,7 +1,5 @@ -drop database if exists db_func_1; -NOTICE: database "db_func_1" does not exist, skipping -create database db_func_1 dbcompatibility 'B'; -\c db_func_1 +create schema db_func_1; +set current_schema to 'db_func_1'; CREATE FUNCTION f1 (s CHAR(20)) RETURNS int NOT DETERMINISTIC AS $$ select 1 $$ ; CREATE FUNCTION f2 (s int) RETURNS int DETERMINISTIC AS $$ select s; $$ ; CREATE FUNCTION f3 (s int) RETURNS int DETERMINISTIC LANGUAGE SQL AS $$ select s; $$ ; @@ -30,5 +28,10 @@ call f4(4); 4 (1 row) -\c postgres -drop database if exists db_func_1; +drop schema db_func_1 cascade; +NOTICE: drop cascades to 4 other objects +DETAIL: drop cascades to function f1(character) +drop cascades to function f2(integer) +drop cascades to function f3(integer) +drop cascades to function f4(integer) +reset current_schema; diff --git a/contrib/dolphin/expected/create_function_test/language_sql.out b/contrib/dolphin/expected/create_function_test/language_sql.out old mode 100755 new mode 100644 index ad4503004c21a180f872c5d9d6a90349e47aaef6..0b9d0dddfa5675be84bddc21fea71f6ff711a789 --- a/contrib/dolphin/expected/create_function_test/language_sql.out +++ b/contrib/dolphin/expected/create_function_test/language_sql.out @@ -1,7 +1,5 @@ -drop database if exists db_func_2; -NOTICE: database "db_func_2" does not exist, skipping -create database db_func_2 dbcompatibility 'B'; -\c db_func_2 +create schema db_func_2; +set current_schema to 'db_func_2'; CREATE FUNCTION f1 (s CHAR(20)) RETURNS int AS $$ select 1 $$ ; CREATE FUNCTION f2 (s int) RETURNS int AS $$ select s; $$ ; CREATE FUNCTION f3 (s int) RETURNS int LANGUAGE SQL AS $$ select s; $$ ; @@ -164,5 +162,17 @@ call f4(4); 4 (1 row) -\c postgres -drop database if exists db_func_2; +drop schema db_func_2 cascade; +NOTICE: drop cascades to 11 other objects +DETAIL: drop cascades to function f1(character) +drop cascades to function f2(integer) +drop cascades to function f3(integer) +drop cascades to function f4(integer) +drop cascades to table language_1093039 +drop cascades to function fun_1(integer,integer,character varying) +drop cascades to function fun_2(integer,integer,character varying) +drop cascades to function fun_3(integer,integer,character varying) +drop cascades to function pro_1(integer,integer,character varying) +drop cascades to function pro_2(integer,integer,character varying) +drop cascades to function pro_3(integer,integer,character varying) +reset current_schema; diff --git a/contrib/dolphin/expected/create_function_test/m_type_create_proc.out b/contrib/dolphin/expected/create_function_test/m_type_create_proc.out index 52241daadc5889fc952f86c830531cbbb7cb70a1..9392bdfff51dc49d298f4b74489ab4b0458f1b6a 100644 --- a/contrib/dolphin/expected/create_function_test/m_type_create_proc.out +++ b/contrib/dolphin/expected/create_function_test/m_type_create_proc.out @@ -1,7 +1,5 @@ -drop database if exists m_create_proc_type; -NOTICE: database "m_create_proc_type" does not exist, skipping -create database m_create_proc_type dbcompatibility 'B'; -\c m_create_proc_type +create schema m_create_proc_type; +set current_schema to 'm_create_proc_type'; --test create procedure delimiter // CREATE PROCEDURE p() @@ -347,5 +345,24 @@ call doempty(); (1 row) -\c postgres -drop database m_create_proc_type; +drop schema m_create_proc_type cascade; +NOTICE: drop cascades to 18 other objects +DETAIL: drop cascades to function p() +drop cascades to function proc1() +drop cascades to table item +drop cascades to function procedure_1(integer,integer) +drop cascades to function procedure_1_m(integer,integer) +drop cascades to table test +drop cascades to function testpro(integer) +drop cascades to function testpro_m(integer) +drop cascades to function testpro_m6(integer) +drop cascades to function testpro_m7(integer) +drop cascades to function procedure_1_m_o(integer,integer) +drop cascades to function procedure_2_m_o() +drop cascades to table test9 +drop cascades to function test_proc() +drop cascades to function doiterate(integer) +drop cascades to function dorepeat(integer) +drop cascades to function docase(integer) +drop cascades to function doempty() +reset current_schema; diff --git a/contrib/dolphin/expected/create_function_test/single_line_proc.out b/contrib/dolphin/expected/create_function_test/single_line_proc.out index e50e0baa7e05db85070703c356b9fa56e672da7e..8eeb1c6f1687bc0b549b11c6784e640f71eb18ef 100644 --- a/contrib/dolphin/expected/create_function_test/single_line_proc.out +++ b/contrib/dolphin/expected/create_function_test/single_line_proc.out @@ -1,7 +1,5 @@ -drop database if exists db_func_call_2; -NOTICE: database "db_func_call_2" does not exist, skipping -create database db_func_call_2 dbcompatibility 'B'; -\c db_func_call_2 +create schema db_func_call_2; +set current_schema to 'db_func_call_2'; create table t1 (a int); create table t2 (a int); insert into t1 values(1),(2),(3); @@ -231,5 +229,22 @@ LINE 2: select z from tz; QUERY: select z from tz; -\c regress -\connect: FATAL: database "regress" does not exist +drop schema db_func_call_2 cascade; +NOTICE: drop cascades to 16 other objects +DETAIL: drop cascades to table t1 +drop cascades to table t2 +drop cascades to function proc1() +drop cascades to function proc2() +drop cascades to function proc3() +drop cascades to function proc4() +drop cascades to function proc5(integer) +drop cascades to function proc6() +drop cascades to function proc7() +drop cascades to function proc8() +drop cascades to function proc9() +drop cascades to table pbu_trade_collect_mbr_m +drop cascades to function proc10() +drop cascades to table base_info_pbu_trade +drop cascades to table base_info_pbu_org +drop cascades to function proc11() +reset current_schema; diff --git a/contrib/dolphin/expected/create_function_test/sql_options.out b/contrib/dolphin/expected/create_function_test/sql_options.out old mode 100755 new mode 100644 index f608d466cc8a0e477e9c6804c48118528462be36..ce45c4b6bd6df910c473615c8066ba1d67f3f904 --- a/contrib/dolphin/expected/create_function_test/sql_options.out +++ b/contrib/dolphin/expected/create_function_test/sql_options.out @@ -1,7 +1,5 @@ -drop database if exists db_func_3; -NOTICE: database "db_func_3" does not exist, skipping -create database db_func_3 dbcompatibility 'B'; -\c db_func_3 +create schema db_func_3; +set current_schema to 'db_func_3'; CREATE FUNCTION f1 (s CHAR(20)) RETURNS int CONTAINS SQL AS $$ select 1 $$ ; CREATE FUNCTION f2 (s int) RETURNS int CONTAINS SQL DETERMINISTIC AS $$ select s; $$ ; CREATE FUNCTION f3 (s int) RETURNS int CONTAINS SQL LANGUAGE SQL AS $$ select s; $$ ; @@ -94,5 +92,10 @@ call f4(4); 4 (1 row) -\c postgres -drop database if exists db_func_3; +drop schema db_func_3 cascade; +NOTICE: drop cascades to 4 other objects +DETAIL: drop cascades to function f1(character) +drop cascades to function f2(integer) +drop cascades to function f3(integer) +drop cascades to function f4(integer) +reset current_schema; diff --git a/contrib/dolphin/expected/create_function_test/sql_security.out b/contrib/dolphin/expected/create_function_test/sql_security.out old mode 100755 new mode 100644 index 2d2facce462cd7ae973fc1265842e2dc48906995..4cc467715d16de680f1e4ce73e495136a9fada8d --- a/contrib/dolphin/expected/create_function_test/sql_security.out +++ b/contrib/dolphin/expected/create_function_test/sql_security.out @@ -1,7 +1,5 @@ -drop database if exists db_func_4; -NOTICE: database "db_func_4" does not exist, skipping -create database db_func_4 dbcompatibility 'B'; -\c db_func_4 +create schema db_func_4; +set current_schema to 'db_func_4'; CREATE FUNCTION f1 (s CHAR(20)) RETURNS int SQL SECURITY DEFINER AS $$ select 1 $$ ; CREATE FUNCTION f2 (s int) RETURNS int NO SQL SQL SECURITY DEFINER AS $$ select s; $$ ; CREATE FUNCTION f3 (s int) RETURNS int SQL SECURITY INVOKER READS SQL DATA LANGUAGE SQL AS $$ select s; $$ ; @@ -30,5 +28,10 @@ call f4(4); 4 (1 row) -\c postgres -drop database if exists db_func_4; +drop schema db_func_4 cascade; +NOTICE: drop cascades to 4 other objects +DETAIL: drop cascades to function f1(character) +drop cascades to function f2(integer) +drop cascades to function f3(integer) +drop cascades to function f4(integer) +reset current_schema; diff --git a/contrib/dolphin/expected/db_b_date_time_functions.out b/contrib/dolphin/expected/db_b_date_time_functions.out index a3b466162de6cf5138c5005acd83e985913aad56..015bf8178ec1a8a9030ae9384c435521f115eaea 100644 --- a/contrib/dolphin/expected/db_b_date_time_functions.out +++ b/contrib/dolphin/expected/db_b_date_time_functions.out @@ -1,8 +1,5 @@ ----- b compatibility case -drop database if exists b_datetime_func_test; -NOTICE: database "b_datetime_func_test" does not exist, skipping -create database b_datetime_func_test dbcompatibility 'b'; -\c b_datetime_func_test +create schema b_datetime_func_test1; +set current_schema to 'b_datetime_func_test1'; set datestyle = 'ISO,ymd'; set time zone "Asia/Shanghai"; -- test part-one function @@ -578,5 +575,5 @@ select * from test_datetime; (3 rows) drop table test_datetime; -\c contrib_regression -DROP DATABASE b_datetime_func_test; +drop schema b_datetime_func_test1 cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/db_b_date_time_functions2.out b/contrib/dolphin/expected/db_b_date_time_functions2.out index 1d8aa3cef06080941d924f6d0446b43474d7ef8c..e360638fd73f4564bcf94ac3430adda3d28e7947 100644 --- a/contrib/dolphin/expected/db_b_date_time_functions2.out +++ b/contrib/dolphin/expected/db_b_date_time_functions2.out @@ -1,8 +1,5 @@ ----- b compatibility case -drop database if exists b_datetime_func_test; -NOTICE: database "b_datetime_func_test" does not exist, skipping -create database b_datetime_func_test dbcompatibility 'b'; -\c b_datetime_func_test +create schema b_datetime_func_test2; +set current_schema to 'b_datetime_func_test2'; set datestyle = 'ISO,ymd'; set time zone "Asia/Shanghai"; create table test(funcname text, result text); @@ -1411,5 +1408,5 @@ select * from test order by funcname; (334 rows) drop table test; -\c contrib_regression -DROP DATABASE b_datetime_func_test; +drop schema b_datetime_func_test2 cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/db_b_date_time_functions3.out b/contrib/dolphin/expected/db_b_date_time_functions3.out index ea87b846394d85c412d7d6f01ba84e681e47a78f..fbd34ef0d4c183f8b9f4c193d470f262c58fec00 100644 --- a/contrib/dolphin/expected/db_b_date_time_functions3.out +++ b/contrib/dolphin/expected/db_b_date_time_functions3.out @@ -1,8 +1,5 @@ ----- b compatibility case -drop database if exists b_datetime_func_test; -NOTICE: database "b_datetime_func_test" does not exist, skipping -create database b_datetime_func_test dbcompatibility 'b'; -\c b_datetime_func_test +create schema b_datetime_func_test3; +set current_schema to 'b_datetime_func_test3'; set datestyle = 'ISO,ymd'; set time zone "Asia/Shanghai"; create table test(funcname text, result text); @@ -977,5 +974,5 @@ select * from test order by funcname; (309 rows) drop table test; -\c contrib_regression -DROP DATABASE b_datetime_func_test; +drop schema b_datetime_func_test3 cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/db_b_date_time_functions4.out b/contrib/dolphin/expected/db_b_date_time_functions4.out index 9c006254500a7288f087d77f234db46ab1247c0d..77e1064060d317098749a9a00a37a2ef3c5cb575 100644 --- a/contrib/dolphin/expected/db_b_date_time_functions4.out +++ b/contrib/dolphin/expected/db_b_date_time_functions4.out @@ -1,8 +1,5 @@ ----- b compatibility case -drop database if exists b_datetime_func_test; -NOTICE: database "b_datetime_func_test" does not exist, skipping -create database b_datetime_func_test dbcompatibility 'b'; -\c b_datetime_func_test +create schema b_datetime_func_test4; +set current_schema to 'b_datetime_func_test4'; set datestyle = 'ISO,ymd'; set time zone "Asia/Shanghai"; create table test(funcname text, result text); @@ -799,5 +796,5 @@ select * from test order by funcname; (269 rows) drop table test; -\c contrib_regression -DROP DATABASE b_datetime_func_test; +drop schema b_datetime_func_test4 cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/db_b_new_gram_test.out b/contrib/dolphin/expected/db_b_new_gram_test.out index c7c401bc830b7ed1f60e01791a63e19627e2d8ec..249400d29bc06bad73a17016b2a0c6b20c6f90d2 100644 --- a/contrib/dolphin/expected/db_b_new_gram_test.out +++ b/contrib/dolphin/expected/db_b_new_gram_test.out @@ -1,7 +1,5 @@ -drop database if exists db_b_new_gram_test; -NOTICE: database "db_b_new_gram_test" does not exist, skipping -create database db_b_new_gram_test dbcompatibility 'B'; -\c db_b_new_gram_test +create schema db_b_new_gram_test; +set current_schema to 'db_b_new_gram_test'; -- CREATE TABLE engine test CREATE TABLE test_engine_1 (a int) engine = InnoDB; CREATE TABLE IF NOT EXISTS test_engine_2 (a int) engine = InnoDB; @@ -19,7 +17,7 @@ CREATE TABLE test_engine_as engine = InnoDB as select a from test_engine_1; -- CREATE TABLE COMPRESSION test CREATE TABLE test_compression_1_pglz (a int) COMPRESSION = pglz; \d+ test_compression_1_pglz - Table "public.test_compression_1_pglz" + Table "db_b_new_gram_test.test_compression_1_pglz" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- a | integer | | plain | | @@ -28,7 +26,7 @@ Options: orientation=row, compresstype=1 CREATE TABLE IF NOT EXISTS test_compression_create_2_pglz (a int) COMPRESSION = pglz; \d+ test_compression_create_2_pglz - Table "public.test_compression_create_2_pglz" + Table "db_b_new_gram_test.test_compression_create_2_pglz" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- a | integer | | plain | | @@ -37,7 +35,7 @@ Options: orientation=row, compresstype=1 CREATE TABLE test_compression_type_table_1_pglz OF test_engine_type1 COMPRESSION = pglz; \d+ test_compression_type_table_1_pglz - Table "public.test_compression_type_table_1_pglz" + Table "db_b_new_gram_test.test_compression_type_table_1_pglz" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+----------+--------------+------------- a | integer | | plain | | @@ -48,7 +46,7 @@ Options: orientation=row, compresstype=1 CREATE TABLE IF NOT EXISTS test_compression_create_type_table_2_pglz OF test_engine_type2 COMPRESSION = pglz; \d+ test_compression_create_type_table_2_pglz - Table "public.test_compression_create_type_table_2_pglz" + Table "db_b_new_gram_test.test_compression_create_type_table_2_pglz" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+----------+--------------+------------- a | integer | | plain | | @@ -59,7 +57,7 @@ Options: orientation=row, compresstype=1 CREATE TABLE test_compression_as_pglz COMPRESSION = pglz as select a from test_compression_1_pglz; \d+ test_compression_as_pglz - Table "public.test_compression_as_pglz" + Table "db_b_new_gram_test.test_compression_as_pglz" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- a | integer | | plain | | @@ -68,7 +66,7 @@ Options: orientation=row, compresstype=1 CREATE TABLE test_compression_1_zstd (a int) COMPRESSION = zstd; \d+ test_compression_1_zstd - Table "public.test_compression_1_zstd" + Table "db_b_new_gram_test.test_compression_1_zstd" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- a | integer | | plain | | @@ -77,7 +75,7 @@ Options: orientation=row, compresstype=2 CREATE TABLE IF NOT EXISTS test_compression_create_2_zstd (a int) COMPRESSION = zstd; \d+ test_compression_create_2_zstd - Table "public.test_compression_create_2_zstd" + Table "db_b_new_gram_test.test_compression_create_2_zstd" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- a | integer | | plain | | @@ -86,7 +84,7 @@ Options: orientation=row, compresstype=2 CREATE TABLE test_compression_type_table_1_zstd OF test_engine_type1 COMPRESSION = zstd; \d+ test_compression_type_table_1_zstd - Table "public.test_compression_type_table_1_zstd" + Table "db_b_new_gram_test.test_compression_type_table_1_zstd" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+----------+--------------+------------- a | integer | | plain | | @@ -97,7 +95,7 @@ Options: orientation=row, compresstype=2 CREATE TABLE IF NOT EXISTS test_compression_create_type_table_2_zstd OF test_engine_type2 COMPRESSION = zstd; \d+ test_compression_create_type_table_2_zstd - Table "public.test_compression_create_type_table_2_zstd" + Table "db_b_new_gram_test.test_compression_create_type_table_2_zstd" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+----------+--------------+------------- a | integer | | plain | | @@ -108,7 +106,7 @@ Options: orientation=row, compresstype=2 CREATE TABLE test_compression_as_zstd COMPRESSION = zstd as select a from test_compression_1_zstd; \d+ test_compression_as_zstd - Table "public.test_compression_as_zstd" + Table "db_b_new_gram_test.test_compression_as_zstd" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- a | integer | | plain | | @@ -117,7 +115,7 @@ Options: orientation=row, compresstype=2 CREATE TABLE test_compression_1_none (a int) COMPRESSION = 'none'; \d+ test_compression_1_none - Table "public.test_compression_1_none" + Table "db_b_new_gram_test.test_compression_1_none" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- a | integer | | plain | | @@ -126,7 +124,7 @@ Options: orientation=row, compresstype=0, compression=no CREATE TABLE IF NOT EXISTS test_compression_create_2_none (a int) COMPRESSION = 'none'; \d+ test_compression_create_2_none - Table "public.test_compression_create_2_none" + Table "db_b_new_gram_test.test_compression_create_2_none" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- a | integer | | plain | | @@ -135,7 +133,7 @@ Options: orientation=row, compresstype=0, compression=no CREATE TABLE test_compression_type_table_1_none OF test_engine_type1 COMPRESSION = 'none'; \d+ test_compression_type_table_1_none - Table "public.test_compression_type_table_1_none" + Table "db_b_new_gram_test.test_compression_type_table_1_none" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+----------+--------------+------------- a | integer | | plain | | @@ -146,7 +144,7 @@ Options: orientation=row, compresstype=0, compression=no CREATE TABLE IF NOT EXISTS test_compression_create_type_table_2_none OF test_engine_type2 COMPRESSION = 'none'; \d+ test_compression_create_type_table_2_none - Table "public.test_compression_create_type_table_2_none" + Table "db_b_new_gram_test.test_compression_create_type_table_2_none" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+----------+--------------+------------- a | integer | | plain | | @@ -157,7 +155,7 @@ Options: orientation=row, compresstype=0, compression=no CREATE TABLE test_compression_as_none COMPRESSION = none as select a from test_compression_1_none; \d+ test_compression_as_none - Table "public.test_compression_as_none" + Table "db_b_new_gram_test.test_compression_as_none" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- a | integer | | plain | | @@ -196,7 +194,7 @@ PARTITION p3 VALUES (4000), PARTITION p4 VALUES (5000) ); \d+ test_list1 - Table "public.test_list1" + Table "db_b_new_gram_test.test_list1" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- col1 | integer | | plain | | @@ -215,7 +213,7 @@ PARTITION p3 VALUES IN (4000), PARTITION p4 VALUES IN (5000) ); \d+ test_list2 - Table "public.test_list2" + Table "db_b_new_gram_test.test_list2" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- col1 | integer | | plain | | @@ -294,29 +292,29 @@ DROP TEMPORARY TABLE test_engine_1 CASCADE; -- new grammar test for analyze table CREATE TABLE t_new_analyze(c1 int, c2 text); ANALYZE NO_WRITE_TO_BINLOG TABLE t_new_analyze; - Table | Op | Msg_type | Msg_text -----------------------+---------+----------+---------- - public.t_new_analyze | analyze | status | OK + Table | Op | Msg_type | Msg_text +----------------------------------+---------+----------+---------- + db_b_new_gram_test.t_new_analyze | analyze | status | OK (1 row) ANALYZE LOCAL TABLE t_new_analyze; - Table | Op | Msg_type | Msg_text -----------------------+---------+----------+---------- - public.t_new_analyze | analyze | status | OK + Table | Op | Msg_type | Msg_text +----------------------------------+---------+----------+---------- + db_b_new_gram_test.t_new_analyze | analyze | status | OK (1 row) ANALYZE TABLE t_new_analyze; - Table | Op | Msg_type | Msg_text -----------------------+---------+----------+---------- - public.t_new_analyze | analyze | status | OK + Table | Op | Msg_type | Msg_text +----------------------------------+---------+----------+---------- + db_b_new_gram_test.t_new_analyze | analyze | status | OK (1 row) ANALYZE TABLE t_not_exist, t_new_analyze; - Table | Op | Msg_type | Msg_text -----------------------+---------+----------+---------------------------------------------- - public.t_not_exist | analyze | Error | relation "public.t_not_exist" does not exist - public.t_not_exist | analyze | status | Operation failed - public.t_new_analyze | analyze | status | OK + Table | Op | Msg_type | Msg_text +----------------------------------+---------+----------+---------------------------------------------------------- + db_b_new_gram_test.t_not_exist | analyze | Error | relation "db_b_new_gram_test.t_not_exist" does not exist + db_b_new_gram_test.t_not_exist | analyze | status | Operation failed + db_b_new_gram_test.t_new_analyze | analyze | status | OK (3 rows) -- new grammar test for CREATE TABLESPACE @@ -535,7 +533,7 @@ drop user u_test_tbspace2; CREATE TABLESPACE test_tbspace ADD DATAFILE 'test_tbspace1'; CREATE TABLE t_tbspace(num int) TABLESPACE test_tbspace; \d t_tbspace - Table "public.t_tbspace" +Table "db_b_new_gram_test.t_tbspace" Column | Type | Modifiers --------+---------+----------- num | integer | @@ -547,7 +545,7 @@ CREATE TABLESPACE test_tbspace_ibd ADD DATAFILE 'test_tbspace1.ibd'; WARNING: Suffix ".ibd" of datafile path detected. The actual path will be renamed as "test_tbspace1_ibd" CREATE TABLE t_tbspace(num int) TABLESPACE test_tbspace_ibd; \d t_tbspace - Table "public.t_tbspace" +Table "db_b_new_gram_test.t_tbspace" Column | Type | Modifiers --------+---------+----------- num | integer | @@ -564,21 +562,21 @@ ERROR: Tablespace "test_tbspace_ibd_2" does not exist. -- new grammar test for RENAME [TO | AS] CREATE TABLE t_rename(c int); \d t_rename - Table "public.t_rename" +Table "db_b_new_gram_test.t_rename" Column | Type | Modifiers --------+---------+----------- c | integer | ALTER TABLE t_rename RENAME TO t_rename_to; \d t_rename_to - Table "public.t_rename_to" +Table "db_b_new_gram_test.t_rename_to" Column | Type | Modifiers --------+---------+----------- c | integer | ALTER TABLE t_rename_to RENAME AS t_rename_as; \d t_rename_as - Table "public.t_rename_as" +Table "db_b_new_gram_test.t_rename_as" Column | Type | Modifiers --------+---------+----------- c | integer | @@ -591,7 +589,7 @@ CREATE TABLE t_index_new_grammar(c1 int, c2 int); CREATE INDEX test_index_btree_1 ON t_index_new_grammar USING btree(c1); CREATE INDEX test_index_btree_2 USING btree ON t_index_new_grammar(c2); \d t_index_new_grammar -Table "public.t_index_new_grammar" +Table "db_b_new_gram_test.t_index_new_grammar" Column | Type | Modifiers --------+---------+----------- c1 | integer | @@ -935,7 +933,7 @@ SELECT COUNT(*) FROM t_ctas_new; (1 row) \d t_ctas_new - Table "public.t_ctas_new" +Table "db_b_new_gram_test.t_ctas_new" Column | Type | Modifiers ---------+---------+----------- new_c_a | integer | @@ -950,7 +948,7 @@ SELECT COUNT(*) FROM t_ctas_new; (1 row) \d t_ctas_new - Table "public.t_ctas_new" +Table "db_b_new_gram_test.t_ctas_new" Column | Type | Modifiers ---------+---------+----------- new_c_a | integer | @@ -966,7 +964,7 @@ SELECT COUNT(*) FROM t_like; (1 row) \d t_like - Table "public.t_like" +Table "db_b_new_gram_test.t_like" Column | Type | Modifiers --------+---------+----------- a | integer | @@ -999,7 +997,7 @@ SELECT COUNT(*) FROM t_like2; (1 row) \d t_like2 - Table "public.t_like2" +Table "db_b_new_gram_test.t_like2" Column | Type | Modifiers --------+---------+----------- a | integer | @@ -1029,10 +1027,8 @@ SELECT COUNT(*) FROM t_ctas_new; DROP TABLE t_ctas_new; DROP TABLE t_ctas; -drop database if exists test_m; -NOTICE: database "test_m" does not exist, skipping -create database test_m dbcompatibility 'b'; -\c test_m +create schema test_m; +set current_schema to 'test_m'; create table test_unique( f1 int, f2 int, @@ -1044,7 +1040,7 @@ create table test_unique( NOTICE: CREATE TABLE / UNIQUE will create implicit index "u_idx_name" for table "test_unique" NOTICE: CREATE TABLE / UNIQUE will create implicit index "u_key_name" for table "test_unique" \d+ test_unique - Table "public.test_unique" + Table "test_m.test_unique" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- f1 | integer | | plain | | @@ -1207,6 +1203,8 @@ select * from ignore_range_range partition (p_201901, p_201905_b); (2 rows) drop table ignore_range_range; -\c postgres -drop database if exists test_m; -drop database db_b_new_gram_test; +drop schema test_m cascade; +NOTICE: drop cascades to table test_unique +drop schema db_b_new_gram_test cascade; +--?.* +reset current_schema; diff --git a/contrib/dolphin/expected/db_b_parser1.out b/contrib/dolphin/expected/db_b_parser1.out index d657801f7cc9f729956636436c77bb99cc5a260b..ba66c9033ed66762d08e47e3bb7a98afab811943 100644 --- a/contrib/dolphin/expected/db_b_parser1.out +++ b/contrib/dolphin/expected/db_b_parser1.out @@ -1,7 +1,5 @@ -drop database if exists db_b_parser1; -NOTICE: database "db_b_parser1" does not exist, skipping -create database db_b_parser1 dbcompatibility 'b'; -\c db_b_parser1 +create schema db_b_parser1; +set current_schema to 'db_b_parser1'; select 'bbbbb' regexp '^([bc])\1*$' as t, 'bbbbb' not regexp '^([bc])\1*$' as t2, 'bbbbb' rlike '^([bc])\1*$' as t; t | t2 | t ---+----+--- @@ -356,5 +354,5 @@ select '-12.3abc' rlike 'null'; 0 (1 row) -\c postgres -drop database if exists db_b_parser1; +drop schema db_b_parser1 cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/db_b_parser2.out b/contrib/dolphin/expected/db_b_parser2.out index a04872cca44ad0bad4369278909ba0a94505a4c5..fbb001fb077ebcf95b46c1d66b25fc0380cd6e54 100644 --- a/contrib/dolphin/expected/db_b_parser2.out +++ b/contrib/dolphin/expected/db_b_parser2.out @@ -1,7 +1,5 @@ -drop database if exists db_b_parser2; -NOTICE: database "db_b_parser2" does not exist, skipping -create database db_b_parser2 dbcompatibility 'b'; -\c db_b_parser2 +create schema db_b_parser2; +set current_schema to 'db_b_parser2'; --验证DAYOFMONTH() DAYOFWEEK() DAYOFYEAR() HOUR() MICROSECOND() MINUTE() QUARTER() SECOND() WEEKDAY() WEEKOFYEAR() YEAR() select DAYOFMONTH(datetime '2021-11-4 16:30:44.341191'); dayofmonth @@ -648,5 +646,5 @@ select fchar,length(fchar) from fchar_test order by 1,2; (1 row) drop table fchar_test; -\c postgres -drop database if exists db_b_parser2; +drop schema db_b_parser2 cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/db_b_parser3.out b/contrib/dolphin/expected/db_b_parser3.out index ba7bd86035625c20e62a5347612a76b04924569b..b6d3cc5853f8640a2d8777406d7c76408448392b 100644 --- a/contrib/dolphin/expected/db_b_parser3.out +++ b/contrib/dolphin/expected/db_b_parser3.out @@ -1,7 +1,5 @@ -drop database if exists db_b_parser3; -NOTICE: database "db_b_parser3" does not exist, skipping -create database db_b_parser3 dbcompatibility 'b'; -\c db_b_parser3 +create schema db_b_parser3; +set current_schema to 'db_b_parser3'; --测试点一:验证lcase函数 select lcase('ABc'), lcase('哈哈'), lcase('123456'),lcase('哈市&%%¥#'),lcase(null); lcase | lcase | lcase | lcase | lcase @@ -416,5 +414,8 @@ select acos(-1.000001); (1 row) -\c postgres -drop database if exists db_b_parser3; +drop schema db_b_parser3 cascade; +NOTICE: drop cascades to 2 other objects +DETAIL: drop cascades to table tb_db_b_parser0003 +drop cascades to table t1 +reset current_schema; diff --git a/contrib/dolphin/expected/db_b_parser4.out b/contrib/dolphin/expected/db_b_parser4.out index f4942158c20c4fc4d4200c15d0329d1302b4f0d1..af8153daaed27fd873ea8222d0e6204716488268 100644 --- a/contrib/dolphin/expected/db_b_parser4.out +++ b/contrib/dolphin/expected/db_b_parser4.out @@ -1,7 +1,5 @@ -drop database if exists db_b_parser4; -NOTICE: database "db_b_parser4" does not exist, skipping -create database db_b_parser4 dbcompatibility 'b'; -\c db_b_parser4 +create schema db_b_parser4; +set current_schema to 'db_b_parser4'; --验证text类型 drop table if exists tb_db_b_parser_0001; NOTICE: table "tb_db_b_parser_0001" does not exist, skipping @@ -11,7 +9,7 @@ drop table if exists tb_db_b_parser_0002; NOTICE: table "tb_db_b_parser_0002" does not exist, skipping create table tb_db_b_parser_0002(a decimal, b number, c dec, d numeric, e fixed); \d tb_db_b_parser_0002 - Table "public.tb_db_b_parser_0002" +Table "db_b_parser4.tb_db_b_parser_0002" Column | Type | Modifiers --------+---------------+----------- a | numeric(10,0) | @@ -25,7 +23,7 @@ drop table if exists tb_default_float; NOTICE: table "tb_default_float" does not exist, skipping create table tb_default_float(a float4(10)); \d tb_default_float -Table "public.tb_default_float" +Table "db_b_parser4.tb_default_float" Column | Type | Modifiers --------+------+----------- a | real | @@ -34,7 +32,7 @@ drop table if exists tb_default_double; NOTICE: table "tb_default_double" does not exist, skipping create table tb_default_double(a double); \d tb_default_double - Table "public.tb_default_double" +Table "db_b_parser4.tb_default_double" Column | Type | Modifiers --------+------------------+----------- a | double precision | @@ -44,7 +42,7 @@ drop table if exists tb_real_float; NOTICE: table "tb_real_float" does not exist, skipping create table tb_real_float(a real, b float); \d tb_real_float -Table "public.tb_real_float" +Table "db_b_parser4.tb_real_float" Column | Type | Modifiers --------+------+----------- a | real | @@ -54,5 +52,6 @@ drop table if exists tb_db_b_parser_0002; drop table if exists tb_default_float; drop table if exists tb_default_double; drop table if exists tb_real_float; -\c postgres -drop database if exists db_b_parser4; +drop schema db_b_parser4 cascade; +NOTICE: drop cascades to table tb_db_b_parser_0001 +reset current_schema; diff --git a/contrib/dolphin/expected/db_b_plpgsql_test.out b/contrib/dolphin/expected/db_b_plpgsql_test.out index a8b28cfa094a9dc2f2a8c0b6e17fa8307a3e09b1..b8b4603a2ee8749e4da04d3dc53dc3eed8d23988 100644 --- a/contrib/dolphin/expected/db_b_plpgsql_test.out +++ b/contrib/dolphin/expected/db_b_plpgsql_test.out @@ -1,7 +1,5 @@ -drop database if exists db_b_plpgsql_test; -NOTICE: database "db_b_plpgsql_test" does not exist, skipping -create database db_b_plpgsql_test dbcompatibility 'b'; -\c db_b_plpgsql_test +create schema db_b_plpgsql_test; +set current_schema to 'db_b_plpgsql_test'; create table tb_b_grammar_0038(a text(10)) engine = 表1; create or replace procedure proc_01() as @@ -47,5 +45,11 @@ SELECT * from tb_b_grammar_0038; tom (1 row) -\c postgres -drop database if exists db_b_plpgsql_test; +drop schema db_b_plpgsql_test cascade; +NOTICE: drop cascades to 5 other objects +DETAIL: drop cascades to table tb_b_grammar_0038 +drop cascades to function proc_01() +drop cascades to table j1_tbl +drop cascades to table j2_tbl +drop cascades to function peoc_165() +reset current_schema; diff --git a/contrib/dolphin/expected/db_b_rename_user_test.out b/contrib/dolphin/expected/db_b_rename_user_test.out index 35d8b9fc720b6e06da5cff23f5c34a6e8eebb660..d627e6b1b5917aef3c0732017f36096d80c5078c 100644 --- a/contrib/dolphin/expected/db_b_rename_user_test.out +++ b/contrib/dolphin/expected/db_b_rename_user_test.out @@ -1,7 +1,5 @@ -drop database if exists db_b_rename_user_test; -NOTICE: database "db_b_rename_user_test" does not exist, skipping -create database db_b_rename_user_test dbcompatibility 'b'; -\c db_b_rename_user_test +create schema db_b_rename_user_test; +set current_schema to 'db_b_rename_user_test'; CREATE USER user1 WITH ENCRYPTED PASSWORD 'user1@1234'; NOTICE: The iteration value of password is not recommended.Setting the iteration value too small reduces the security of the password, and setting it too large results in performance degradation. CREATE USER user2 WITH ENCRYPTED PASSWORD 'user2@1234'; @@ -55,5 +53,5 @@ select usename from pg_catalog.pg_user drop user user4; drop user user5; drop user user6; -\c postgres -drop database if exists db_b_rename_user_test; +drop schema db_b_rename_user_test cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/default_guc.out b/contrib/dolphin/expected/default_guc.out index b178a6eb3b100ebc33a2cdb06497b1b3ea9c8250..54b3934259c195ed2eca54a233562dc9918fc893 100644 --- a/contrib/dolphin/expected/default_guc.out +++ b/contrib/dolphin/expected/default_guc.out @@ -1,7 +1,5 @@ -drop database if exists default_guc; -NOTICE: database "default_guc" does not exist, skipping -create database default_guc dbcompatibility 'b'; -\c default_guc +create schema default_guc; +set current_schema to 'default_guc'; show behavior_compat_options; behavior_compat_options ------------------------- @@ -51,5 +49,5 @@ select md5('0.123'); 677738b969d6037efce2c328c6814580 (1 row) -\c postgres -drop database if exists default_guc; +drop schema default_guc cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/describe.out b/contrib/dolphin/expected/describe.out index 1b57dec16404da51803fbad9f5d4e9de4e22267a..d9ec2283fb729b065e80c41d8ee04395eb1649fc 100644 --- a/contrib/dolphin/expected/describe.out +++ b/contrib/dolphin/expected/describe.out @@ -1,7 +1,5 @@ -drop database if exists db_describe; -NOTICE: database "db_describe" does not exist, skipping -create database db_describe dbcompatibility 'b'; -\c db_describe +create schema db_describe; +set current_schema to 'db_describe'; CREATE TABLE test2 ( id int PRIMARY KEY @@ -141,7 +139,7 @@ describe test; c | character varying(10) | NO | | NULL | (3 rows) -desc public.test; +desc db_describe.test; Field | Type | Null | Key | Default | Extra -------+-----------------------+------+-----+---------------------------------+------- a | integer | NO | PRI | nextval('test_a_seq'::regclass) | @@ -177,5 +175,9 @@ desc sc.test4; -------+------+------+-----+---------+------- (0 rows) -\c postgres -drop database if exists db_describe; +drop schema db_describe cascade; +NOTICE: drop cascades to 3 other objects +DETAIL: drop cascades to table db_describe.test2 +drop cascades to table db_describe.test3 +drop cascades to table db_describe.test +reset current_schema; diff --git a/contrib/dolphin/expected/empty_value_lists.out b/contrib/dolphin/expected/empty_value_lists.out index 282b0639dd17be613c53ee8494eda26a94f40b9f..c0affa4a8ca4a47d7d40e778a152564ef328dcfa 100644 --- a/contrib/dolphin/expected/empty_value_lists.out +++ b/contrib/dolphin/expected/empty_value_lists.out @@ -1,8 +1,5 @@ --- b compatibility case -drop database if exists empty_value_lists; -NOTICE: database "empty_value_lists" does not exist, skipping -create database empty_value_lists dbcompatibility 'b'; -\c empty_value_lists +create schema empty_value_lists; +set current_schema to 'empty_value_lists'; create table test1(num int); create table test2(num int default 3); create table test3(num int not null); @@ -551,5 +548,51 @@ select * from m4; | | abc | bcd (2 rows) -\c postgres -drop database if exists empty_value_lists; +drop schema empty_value_lists cascade; +NOTICE: drop cascades to 45 other objects +DETAIL: drop cascades to table test1 +drop cascades to table test2 +drop cascades to table test3 +drop cascades to table test4 +drop cascades to table test5 +drop cascades to table test6 +drop cascades to table test7 +drop cascades to table test8 +drop cascades to table test9 +drop cascades to table test10 +drop cascades to table test11 +drop cascades to table test12 +drop cascades to table test13 +drop cascades to table test14 +drop cascades to table test15 +drop cascades to table test16 +drop cascades to table test17 +drop cascades to table test18 +drop cascades to table test19 +drop cascades to table test20 +drop cascades to table test21 +drop cascades to table test22 +drop cascades to table test23 +drop cascades to table test24 +drop cascades to table test25 +drop cascades to table test26 +drop cascades to table test27 +drop cascades to table test28 +drop cascades to table test29 +drop cascades to table test30 +drop cascades to table test31 +drop cascades to table test32 +drop cascades to table test33 +drop cascades to table test34 +drop cascades to table test35 +drop cascades to table test36 +drop cascades to table test37 +drop cascades to table test38 +drop cascades to table test39 +drop cascades to table test40 +drop cascades to table test41 +drop cascades to table m1 +drop cascades to table m2 +drop cascades to table m3 +drop cascades to table m4 +reset current_schema; diff --git a/contrib/dolphin/expected/empty_value_support_value.out b/contrib/dolphin/expected/empty_value_support_value.out index ff1eb4700b70e6be4ca11b53f11d8f4ba22bfd68..1c92f4b04e29202df08a6c3a7714522bbe84c2cb 100644 --- a/contrib/dolphin/expected/empty_value_support_value.out +++ b/contrib/dolphin/expected/empty_value_support_value.out @@ -1,7 +1,5 @@ -drop database if exists empty_value_support_value; -NOTICE: database "empty_value_support_value" does not exist, skipping -create database empty_value_support_value dbcompatibility 'b'; -\c empty_value_support_value +create schema empty_value_support_value; +set current_schema to 'empty_value_support_value'; create table test1(num int not null); insert into test1 value(); ERROR: null value in column "num" violates not-null constraint @@ -26,5 +24,6 @@ select * from test1; 0 (3 rows) -\c postgres -drop database if exists empty_value_support_value; +drop schema empty_value_support_value cascade; +NOTICE: drop cascades to table test1 +reset current_schema; diff --git a/contrib/dolphin/expected/explain_desc.out b/contrib/dolphin/expected/explain_desc.out index 79edf4da90a603bfa875730f2eac691308fcd9cb..96e980e5bd934a15bc9b24245e9e423458863da0 100644 --- a/contrib/dolphin/expected/explain_desc.out +++ b/contrib/dolphin/expected/explain_desc.out @@ -1,5 +1,5 @@ -create database db_explain_desc with dbcompatibility 'B'; -\c db_explain_desc +create schema db_explain_desc; +set current_schema to 'db_explain_desc'; create table ed_t(c1 int, c2 varchar(100), c3 int default 10); insert into ed_t values(generate_series(1, 10), 'hello', 100); -- 1.use explain to query table's info @@ -11,7 +11,7 @@ explain ed_t; c3 | integer | YES | | 10 | (3 rows) -explain public.ed_t; +explain db_explain_desc.ed_t; Field | Type | Null | Key | Default | Extra -------+------------------------+------+-----+---------+------- c1 | integer | YES | | NULL | @@ -424,5 +424,5 @@ explain format='TraDitional' delete from ed_t where c1 < 5; (3 rows) drop table ed_t; -\c postgres -drop database db_explain_desc; +drop schema db_explain_desc cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/export_set.out b/contrib/dolphin/expected/export_set.out index 45374c2a532841fd37347006d7bbf096737d5b96..4b27aa8432ca4ab460b3a4eaaae852b8b5716594 100644 --- a/contrib/dolphin/expected/export_set.out +++ b/contrib/dolphin/expected/export_set.out @@ -1,7 +1,5 @@ -drop database if exists export_set; -NOTICE: database "export_set" does not exist, skipping -create database export_set dbcompatibility 'b'; -\c export_set +create schema export_set; +set current_schema to 'export_set'; -- 测试缺省值 SELECT EXPORT_SET(5,'Y','N',',',5); export_set @@ -122,5 +120,5 @@ SELECT EXPORT_SET(5,'Y','N',',,,,,,,,,,,,',5); Y,,,,,,,,,,,,N,,,,,,,,,,,,Y,,,,,,,,,,,,N,,,,,,,,,,,,N (1 row) -\c postgres -drop database if exists export_set; +drop schema export_set cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/float_numeric_test/db_b_log_test.out b/contrib/dolphin/expected/float_numeric_test/db_b_log_test.out index 9565c3ad2c7f183992c39850bed529fbb1206afa..2225b51c50914a61fc5f7169673a19d1c6b199f9 100644 --- a/contrib/dolphin/expected/float_numeric_test/db_b_log_test.out +++ b/contrib/dolphin/expected/float_numeric_test/db_b_log_test.out @@ -1,13 +1,17 @@ +drop database if exists db_b_log_test; +NOTICE: database "db_b_log_test" does not exist, skipping +create database db_b_log_test dbcompatibility 'A'; +\c db_b_log_test SELECT LOG(10); log ----- 1 (1 row) -drop database if exists db_b_log_test; -NOTICE: database "db_b_log_test" does not exist, skipping -create database db_b_log_test dbcompatibility 'B'; -\c db_b_log_test +\c contrib_regression +drop database db_b_log_test; +create schema db_b_log_test; +set current_schema to 'db_b_log_test'; SELECT LOG(10); log ------------------ @@ -218,5 +222,5 @@ select log10(b'111'::int); 0.845098040014257 (1 row) -\c postgres -drop database db_b_log_test; +drop schema db_b_log_test cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/float_numeric_test/db_b_sqrt_test.out b/contrib/dolphin/expected/float_numeric_test/db_b_sqrt_test.out index d2f59e5e0ed627e5a5a31b17b8215f5fdb369143..4278a612ce1b335b6dbbff1e37ff66a56e1c360d 100644 --- a/contrib/dolphin/expected/float_numeric_test/db_b_sqrt_test.out +++ b/contrib/dolphin/expected/float_numeric_test/db_b_sqrt_test.out @@ -1,3 +1,7 @@ +drop database if exists db_b_sqrt_test; +NOTICE: database "db_b_sqrt_test" does not exist, skipping +create database db_b_sqrt_test dbcompatibility 'A'; +\c db_b_sqrt_test SELECT SQRT(64); sqrt ------ @@ -7,10 +11,10 @@ SELECT SQRT(64); SELECT SQRT(-64); ERROR: cannot take square root of a negative number CONTEXT: referenced column: sqrt -drop database if exists db_b_sqrt_test; -NOTICE: database "db_b_sqrt_test" does not exist, skipping -create database db_b_sqrt_test dbcompatibility 'B'; -\c db_b_sqrt_test +\c contrib_regression +drop database db_b_sqrt_test; +create schema db_b_sqrt_test; +set current_schema to 'db_b_sqrt_test'; SELECT SQRT(64); sqrt ------ @@ -59,5 +63,5 @@ select sqrt(b'111'::int); 2.64575131106459 (1 row) -\c postgres -drop database db_b_sqrt_test; +drop schema db_b_sqrt_test cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/flush.out b/contrib/dolphin/expected/flush.out index 949b641cd0c8780a9a8f89ed75a216b61dd3a1f2..1903c3f75dfbb9e2b14fa463aff6485c48209150 100644 --- a/contrib/dolphin/expected/flush.out +++ b/contrib/dolphin/expected/flush.out @@ -1,12 +1,10 @@ -drop database if exists db_flush; -NOTICE: database "db_flush" does not exist, skipping -create database db_flush dbcompatibility 'b'; -\c db_flush +create schema db_flush; +set current_schema to 'db_flush'; FLUSH BINARY LOGS; --? pg_switch_xlog --?.* --?.* (1 row) -\c postgres -drop database if exists db_flush; +drop schema db_flush cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/get_b_database.out b/contrib/dolphin/expected/get_b_database.out index 3d08b6036c1e4981069babfe81c0d3a2f1d80fbd..ce4a1caae3cf6c67ee9b1c7b57a5fed634205c12 100644 --- a/contrib/dolphin/expected/get_b_database.out +++ b/contrib/dolphin/expected/get_b_database.out @@ -1,11 +1,9 @@ -drop database if exists get_db; -NOTICE: database "get_db" does not exist, skipping -create database get_db dbcompatibility 'b'; -\c get_db +create schema get_db; +set current_schema to 'get_db'; select database(); database ---------- - public + get_db (1 row) create schema testdb; @@ -30,5 +28,5 @@ select database(); testdb1 (1 row) -\c postgres -drop database if exists get_db; +drop schema get_db cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/greatest_least.out b/contrib/dolphin/expected/greatest_least.out index 799466e15495bcbf4ff1a22ad820df2e6ba82580..9ea11b371929cbbc80b38f129badf08180f81b6d 100644 --- a/contrib/dolphin/expected/greatest_least.out +++ b/contrib/dolphin/expected/greatest_least.out @@ -1,7 +1,5 @@ -drop database if exists greatest_least; -NOTICE: database "greatest_least" does not exist, skipping -create database greatest_least dbcompatibility 'b'; -\c greatest_least +create schema greatest_least; +set current_schema to 'greatest_least'; --return null if input include null select GREATEST(null,1,2), GREATEST(null,1,2) is null; greatest | ?column? @@ -28,5 +26,5 @@ select LEAST(1,2); 1 (1 row) -\c postgres -drop database if exists greatest_least; +drop schema greatest_least cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/group_concat_test.out b/contrib/dolphin/expected/group_concat_test.out index 7387479cb23829be6b6db960fc4c4d473d3fd519..0c84da59a36c998cf3f29929317a8f1e97731076 100644 --- a/contrib/dolphin/expected/group_concat_test.out +++ b/contrib/dolphin/expected/group_concat_test.out @@ -1,5 +1,5 @@ -create database t dbcompatibility 'B'; -\c t; +create schema t; +set current_schema to 't'; create table t(id text, v text); insert into t(id, v) values('1','a'),('2','b'),('1','c'),('2','d'); select group_concat(id,v separator ';') into tmp_table from t; @@ -19,9 +19,12 @@ explain verbose select id, group_concat(VARIADIC ARRAY[id,':',v] order by id) as -> Sort (cost=61.11..63.28 rows=869 width=64) Output: id, v Sort Key: t.id - -> Seq Scan on public.t (cost=0.00..18.69 rows=869 width=64) + -> Seq Scan on t.t (cost=0.00..18.69 rows=869 width=64) Output: id, v (8 rows) -\c postgres -drop database t; +drop schema t cascade; +NOTICE: drop cascades to 2 other objects +DETAIL: drop cascades to table t +drop cascades to table tmp_table +reset current_schema; diff --git a/contrib/dolphin/expected/if_not_exists_test.out b/contrib/dolphin/expected/if_not_exists_test.out index f31a01c07346e23f35638f3ea31c74192d11121b..5a527022b2f9c16fe16ed02dc01d8b991cf03251 100644 --- a/contrib/dolphin/expected/if_not_exists_test.out +++ b/contrib/dolphin/expected/if_not_exists_test.out @@ -1,7 +1,5 @@ -drop database if exists test_if_not_exists; -NOTICE: database "test_if_not_exists" does not exist, skipping -create database test_if_not_exists dbcompatibility 'B'; -\c test_if_not_exists +create schema test_if_not_exists; +set current_schema to 'test_if_not_exists'; CREATE USER ZZZ WITH PASSWORD 'openGauss@123'; NOTICE: The iteration value of password is not recommended.Setting the iteration value too small reduces the security of the password, and setting it too large results in performance degradation. CREATE USER ZZZ WITH PASSWORD 'openGauss@123'; @@ -11,5 +9,5 @@ NOTICE: role "zzz" already exists DROP USER ZZZ; CREATE USER IF NOT EXISTS ZZZ WITH PASSWORD 'openGauss@123'; NOTICE: The iteration value of password is not recommended.Setting the iteration value too small reduces the security of the password, and setting it too large results in performance degradation. -\c postgres -drop database test_if_not_exists; +drop schema test_if_not_exists cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/implicit_cast.out b/contrib/dolphin/expected/implicit_cast.out index 40034d726b1a39bdb5125cafee3bfe7b2581a3f9..f6a1cfad6fdfa7924cc5b9baf621a9de260a35bb 100644 --- a/contrib/dolphin/expected/implicit_cast.out +++ b/contrib/dolphin/expected/implicit_cast.out @@ -1,7 +1,5 @@ -drop database if exists implicit_cast; -NOTICE: database "implicit_cast" does not exist, skipping -create database implicit_cast dbcompatibility 'b'; -\c implicit_cast +create schema implicit_cast; +set current_schema to 'implicit_cast'; select 1::int1 % 1::float4; ?column? ---------- @@ -290,5 +288,5 @@ select 1::int8 | 1::text; 1 (1 row) -\c postgres -drop database if exists implicit_cast; +drop schema implicit_cast cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/insert_set.out b/contrib/dolphin/expected/insert_set.out index 737f33b816507d1dfccf8ef2a8774f94a657d427..e391cab2433f7903a5bf97f97c83f454e9ee75bb 100644 --- a/contrib/dolphin/expected/insert_set.out +++ b/contrib/dolphin/expected/insert_set.out @@ -1,7 +1,5 @@ -drop database if exists insert_set; -NOTICE: database "insert_set" does not exist, skipping -create database insert_set dbcompatibility 'B'; -\c insert_set +create schema insert_set; +set current_schema to 'insert_set'; create table test_figure(tinyint tinyint, smallint smallint, integer integer, binary_integer binary_integer, bigint bigint); insert into test_figure set bigint = 7234134, binary_integer = 1011101, integer = 10000, smallint = 1, tinyint = 3; select * from test_figure; @@ -95,5 +93,14 @@ select * from test_error; | 23 (1 row) -\c postgres -drop database insert_set; +drop schema insert_set cascade; +NOTICE: drop cascades to 8 other objects +DETAIL: drop cascades to table test_figure +drop cascades to table test_money +drop cascades to table test_boolean +drop cascades to table test_char +drop cascades to table test_binary +drop cascades to table test_time +drop cascades to table test_netid +drop cascades to table test_error +reset current_schema; diff --git a/contrib/dolphin/expected/join_without_on.out b/contrib/dolphin/expected/join_without_on.out index 0e69cd9c610404264f11bcc100a23583e1714dcc..5be7016d98dbd668da7e67dd80567ff1def3e1dc 100644 --- a/contrib/dolphin/expected/join_without_on.out +++ b/contrib/dolphin/expected/join_without_on.out @@ -1,7 +1,5 @@ -drop database if exists join_without_on; -NOTICE: database "join_without_on" does not exist, skipping -create database join_without_on dbcompatibility 'b'; -\c join_without_on +create schema join_without_on; +set current_schema to 'join_without_on'; CREATE TABLE J1_TBL ( i integer, j integer, @@ -136,5 +134,10 @@ SELECT * FROM J1_TBL JOIN J2_TBL INNER JOIN J3_TBL INNER JOIN J4_TBL ON J1_TBL 1 | 4 | one | 1 | -1 | 2 | 2 | 1 | -1 (4 rows) -\c postgres -drop database if exists join_without_on; +drop schema join_without_on cascade; +NOTICE: drop cascades to 4 other objects +DETAIL: drop cascades to table j1_tbl +drop cascades to table j2_tbl +drop cascades to table j3_tbl +drop cascades to table j4_tbl +reset current_schema; diff --git a/contrib/dolphin/expected/json_array.out b/contrib/dolphin/expected/json_array.out index 21ce52181fdd20c3d9a3fe3b3f9d97d8783d65c4..4be0dd043699b6e21d2652f391225e659c391bce 100644 --- a/contrib/dolphin/expected/json_array.out +++ b/contrib/dolphin/expected/json_array.out @@ -1,7 +1,5 @@ -drop database if exists test_json_array; -NOTICE: database "test_json_array" does not exist, skipping -create database test_json_array dbcompatibility 'B'; -\c test_json_array +create schema test_json_array; +set current_schema to 'test_json_array'; select json_array(1,2,3,4); json_array -------------- @@ -77,5 +75,5 @@ select name from dataa; (1 row) drop table dataa; -\c postgres -drop database if exists test_json_array; +drop schema test_json_array cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/json_array_append.out b/contrib/dolphin/expected/json_array_append.out index 840ff387d2569c555128c5f3e4e097ef8fd95549..05f21f96d2505d8205bc316163bfc1f295478857 100644 --- a/contrib/dolphin/expected/json_array_append.out +++ b/contrib/dolphin/expected/json_array_append.out @@ -1,7 +1,5 @@ -drop database if exists test_json_array_append; -NOTICE: database "test_json_array_append" does not exist, skipping -create database test_json_array_append dbcompatibility'B'; -\c test_json_array_append +create schema test_json_array_append; +set current_schema to 'test_json_array_append'; select JSON_ARRAY_APPEND('[1, [2, 3]]', '$[1]', 4); json_array_append ------------------- @@ -184,5 +182,5 @@ CONTEXT: referenced column: json_array_append select JSON_ARRAY_APPEND('[1, [2, 3]]', ' ', 4); ERROR: Invalid JSON path expression. The error is around argument 1. CONTEXT: referenced column: json_array_append -\c postgres -drop database if exists test_json_array_append; +drop schema test_json_array_append cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/json_array_insert.out b/contrib/dolphin/expected/json_array_insert.out index a2cf343886d7ce72b204aff11dfc0f4e36eeb246..d81b94b91177f795d5297d5ab335ca15eec39524 100644 --- a/contrib/dolphin/expected/json_array_insert.out +++ b/contrib/dolphin/expected/json_array_insert.out @@ -1,7 +1,5 @@ -drop database if exists test_json_array_insert; -NOTICE: database "test_json_array_insert" does not exist, skipping -create database test_json_array_insert dbcompatibility 'B'; -\c test_json_array_insert +create schema test_json_array_insert; +set current_schema to 'test_json_array_insert'; SELECT JSON_ARRAY_INSERT('[1, [2, 3], {"a": [4, 5]}]', '$[0]', 0); json_array_insert ------------------------------- @@ -204,5 +202,5 @@ SELECT JSON_ARRAY_INSERT('[1, [2, 3]]', ' ', 4); ERROR: Invalid JSON path expression. The error is around argument 1. CONTEXT: referenced column: json_array_insert -\c postgres -drop database if exists test_json_array_insert; +drop schema test_json_array_insert cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/json_arrayagg.out b/contrib/dolphin/expected/json_arrayagg.out index 96dcc7e598590fda06987664594924e0d108db24..24a28e6a0a97bfa18e3ffbe89c8197b228137aac 100644 --- a/contrib/dolphin/expected/json_arrayagg.out +++ b/contrib/dolphin/expected/json_arrayagg.out @@ -1,7 +1,5 @@ -drop database if exists json_arrayagg_test; -NOTICE: database "json_arrayagg_test" does not exist, skipping -create database json_arrayagg_test dbcompatibility 'B'; -\c json_arrayagg_test +create schema json_arrayagg_test; +set current_schema to 'json_arrayagg_test'; -- create table for test create table City(District varchar(30), Name varchar(30), Population int); insert into City values ('Capital Region','Canberra',322723); @@ -85,5 +83,7 @@ select json_arrayagg(a) from time_table; ["08-22-2020", "10-01-2021", "12-04-2022"] (1 row) -\c postgres -drop database json_arrayagg_test; +drop schema json_arrayagg_test cascade; +NOTICE: drop cascades to table city +reset json_arrayagg_test; +ERROR: unrecognized configuration parameter "json_arrayagg_test" diff --git a/contrib/dolphin/expected/json_contains.out b/contrib/dolphin/expected/json_contains.out index 2641cf48c28dff2a2fcdc6534e948847aa37c0ae..d435a263719d5b961c50c8f13bf66a152e550745 100644 --- a/contrib/dolphin/expected/json_contains.out +++ b/contrib/dolphin/expected/json_contains.out @@ -1,7 +1,5 @@ -drop database if exists test_json_contains; -NOTICE: database "test_json_contains" does not exist, skipping -create database test_json_contains dbcompatibility 'b'; -\c test_json_contains +create schema test_json_contains; +set current_schema to 'test_json_contains'; select json_contains('1',null); json_contains --------------- @@ -369,5 +367,5 @@ select *, json_contains(target, candidate, path) from json_contains_test; (8 rows) drop table json_contains_test; -\c postgres; -drop database if exists test_json_contains; +drop schema test_json_contains cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/json_contains_path.out b/contrib/dolphin/expected/json_contains_path.out index 191aa069052ef806653bdf69bfbb0f6e61e5760b..c303c7c19fb003d820e1303e47b04f8e5d0d3d24 100644 --- a/contrib/dolphin/expected/json_contains_path.out +++ b/contrib/dolphin/expected/json_contains_path.out @@ -1,7 +1,5 @@ -drop database if exists test_json_contains_path; -NOTICE: database "test_json_contains_path" does not exist, skipping -create database test_json_contains_path dbcompatibility 'b'; -\c test_json_contains_path +create schema test_json_contains_path; +set current_schema to 'test_json_contains_path'; select json_contains_path(null,'one','$[0]'); json_contains_path -------------------- @@ -178,5 +176,5 @@ select *, json_contains_path(target, mode, '$.a.d', '$.c.d') from json_contains_ (2 rows) drop table json_contains_path_test; -\c postgres; -drop database if exists test_json_contains_path; +drop schema test_json_contains_path cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/json_depth.out b/contrib/dolphin/expected/json_depth.out index cb57354e50b2dc0eabd8ee90cb19eb7db8e47f2d..8a96092c642e0a1d9b6462ed46e0f9ab2c826c12 100644 --- a/contrib/dolphin/expected/json_depth.out +++ b/contrib/dolphin/expected/json_depth.out @@ -1,7 +1,5 @@ -drop database if exists test_json_depth; -NOTICE: database "test_json_depth" does not exist, skipping -create database test_json_depth dbcompatibility 'B'; -\c test_json_depth +create schema test_json_depth; +set current_schema to 'test_json_depth'; select json_depth('{}'); json_depth ------------ @@ -142,5 +140,5 @@ select json_depth(data) from test1; (3 rows) drop table test1; -\c postgres -drop database if exists test_json_depth; +drop schema test_json_depth cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/json_extract.out b/contrib/dolphin/expected/json_extract.out index e3163be1ef0e42d81195589a0af8c60d486b0949..787927314954778475331a50913a2032773f71e8 100644 --- a/contrib/dolphin/expected/json_extract.out +++ b/contrib/dolphin/expected/json_extract.out @@ -1,7 +1,5 @@ -drop database if exists test_json_extract; -NOTICE: database "test_json_extract" does not exist, skipping -create database test_json_extract dbcompatibility'B'; -\c test_json_extract +create schema test_json_extract; +set current_schema to 'test_json_extract'; select json_extract('{"a": "lihua"}', '$.a'); json_extract -------------- @@ -109,5 +107,5 @@ select * from test; {"c": true} (5 rows) -\c postgres -drop database if exists test_json_extract; +drop schema test_json_extract cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/json_insert.out b/contrib/dolphin/expected/json_insert.out index 83549365cf34aab3c59a6f8786e6b9e47e2f61b7..81046931ef651c01b76bbf49719669afa3623633 100644 --- a/contrib/dolphin/expected/json_insert.out +++ b/contrib/dolphin/expected/json_insert.out @@ -1,7 +1,5 @@ -drop database if exists test_json_insert; -NOTICE: database "test_json_insert" does not exist, skipping -create database test_json_insert dbcompatibility'B'; -\c test_json_insert +create schema test_json_insert; +set current_schema to 'test_json_insert'; -- test for basic functionality of json_replace select JSON_INSERT('{"a": 43}', '$.b', 55); json_insert @@ -206,5 +204,5 @@ select * from test; {"a": 43, "b": [{"c": true}, "Test"]} (5 rows) -\c postgres -drop database if exists test_json_insert; +drop schema test_json_insert cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/json_keys.out b/contrib/dolphin/expected/json_keys.out index 4bdb620abf92f206f36ed7f775e5c62c37cef073..7209576b498ea9e037ab7a28d6e3ac27d6cb07cc 100644 --- a/contrib/dolphin/expected/json_keys.out +++ b/contrib/dolphin/expected/json_keys.out @@ -1,7 +1,5 @@ -drop database if exists test_json_keys; -NOTICE: database "test_json_keys" does not exist, skipping -create database test_json_keys dbcompatibility'B'; -\c test_json_keys +create schema test_json_keys; +set current_schema to 'test_json_keys'; SELECT JSON_KEYS('{"a":"t1"}'); json_keys ----------- @@ -139,5 +137,6 @@ select name from student; ["a", "b"] (1 row) -\c postgres -drop database if exists test_json_keys; +drop schema test_json_keys cascade; +NOTICE: drop cascades to table student +reset current_schema; diff --git a/contrib/dolphin/expected/json_length.out b/contrib/dolphin/expected/json_length.out index 4c31455c7853ca75da55bffd302b2118aa2537a0..05cec8dbb4b4cc0af0ba5b73b60ef2c54d58542e 100644 --- a/contrib/dolphin/expected/json_length.out +++ b/contrib/dolphin/expected/json_length.out @@ -1,7 +1,5 @@ -drop database if exists test_json_length; -NOTICE: database "test_json_length" does not exist, skipping -create database test_json_length dbcompatibility 'B'; -\c test_json_length +create schema test_json_length; +set current_schema to 'test_json_length'; select json_length(NULL); json_length ------------- @@ -177,5 +175,5 @@ insert into test values (2 rows) drop table test; -\c postgres; -drop database if exists test_json_length; +drop schema test_json_length cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/json_merge_patch.out b/contrib/dolphin/expected/json_merge_patch.out index 1da85515f48aee2258efb26bbbf1297617555a44..a94d75b24aba4286a8550abb4e417cf2d4f3b4ff 100644 --- a/contrib/dolphin/expected/json_merge_patch.out +++ b/contrib/dolphin/expected/json_merge_patch.out @@ -1,7 +1,5 @@ -drop database if exists test_json_merge_patch; -NOTICE: database "test_json_merge_patch" does not exist, skipping -create database test_json_merge_patch dbcompatibility 'B'; -\c test_json_merge_patch +create schema test_json_merge_patch; +set current_schema to 'test_json_merge_patch'; select json_merge_patch(NULL); ERROR: Incorrect parameter count CONTEXT: referenced column: json_merge_patch @@ -340,5 +338,8 @@ insert into test1 values {"a": [1, 2], "colin": "huawei", "colinew": "handsome"} | {"1": "jks"} (1 row) -\c postgres; -drop database if exists test_json_merge_patch; +drop schema test_json_merge_patch cascade; +NOTICE: drop cascades to 2 other objects +DETAIL: drop cascades to table test +drop cascades to table test1 +reset current_schema; diff --git a/contrib/dolphin/expected/json_merge_preserve.out b/contrib/dolphin/expected/json_merge_preserve.out index ed903d51deaaf5d63aa37a5d86c8c97225c2f0bf..3bc9e63fefa3dc339c27346952bc991c65c2ac7e 100644 --- a/contrib/dolphin/expected/json_merge_preserve.out +++ b/contrib/dolphin/expected/json_merge_preserve.out @@ -1,7 +1,5 @@ -drop database if exists test_json_merge_preserve; -NOTICE: database "test_json_merge_preserve" does not exist, skipping -create database test_json_merge_preserve dbcompatibility 'B'; -\c test_json_merge_preserve +create schema test_json_merge_preserve; +set current_schema to 'test_json_merge_preserve'; select json_merge_preserve(NULL); ERROR: Incorrect parameter count CONTEXT: referenced column: json_merge_preserve @@ -340,5 +338,8 @@ insert into test1 values {"a": [1, 2], "colin": [{"a": "abc"}, "bcd", "huawei"], "colinew": "handsome"} | [{"a": "abc"}, "bcd", {"1": "jks"}] (1 row) -\c postgres; -drop database if exists test_json_merge_preserve; +drop schema test_json_merge_preserve cascade; +NOTICE: drop cascades to 2 other objects +DETAIL: drop cascades to table test +drop cascades to table test1 +reset current_schema; diff --git a/contrib/dolphin/expected/json_object.out b/contrib/dolphin/expected/json_object.out index 302a4acc51d143a05b9ef226e4b780a2fea007c4..0927e24dc367654e5f554cf27bc583c32cc7cc9b 100644 --- a/contrib/dolphin/expected/json_object.out +++ b/contrib/dolphin/expected/json_object.out @@ -1,7 +1,5 @@ -drop database if exists test_json_object; -NOTICE: database "test_json_object" does not exist, skipping -create database test_json_object dbcompatibility 'B'; -\c test_json_object +create schema test_json_object; +set current_schema to 'test_json_object'; -- test for b_compatibility_mode = false select json_object('{a,1,b,2,3,NULL,"d e f","a b c"}'); json_object @@ -364,5 +362,8 @@ select json_object('{a,b,"a b c"}', '{a,1,1}'); {"a" : "a", "b" : "1", "a b c" : "1"} (1 row) -\c postgres -drop database if exists test_json_object; +drop schema test_json_object cascade; +NOTICE: drop cascades to 2 other objects +DETAIL: drop cascades to table tab_json1 +drop cascades to table info1 +reset current_schema; diff --git a/contrib/dolphin/expected/json_objectagg.out b/contrib/dolphin/expected/json_objectagg.out index b676b21fcc7eda782a8560343fc865c3d58dc549..c070f9c303768ee403627bd0771b93dfada23b0c 100644 --- a/contrib/dolphin/expected/json_objectagg.out +++ b/contrib/dolphin/expected/json_objectagg.out @@ -1,7 +1,5 @@ -drop database if exists json_objectagg_test; -NOTICE: database "json_objectagg_test" does not exist, skipping -create database json_objectagg_test dbcompatibility 'B'; -\c json_objectagg_test +create schema json_objectagg_test; +set current_schema to 'json_objectagg_test'; -- create table for test create table City(District varchar(30), Name varchar(30), Population int); insert into City values ('Capital Region','Canberra',322723); @@ -64,5 +62,6 @@ select json_objectagg(b, a) from time_table; {"1": "08-22-2020", "2": "10-01-2021", "3": "12-04-2022"} (1 row) -\c postgres -drop database json_objectagg_test; +drop schema json_objectagg_test cascade; +NOTICE: drop cascades to table city +reset current_schema; diff --git a/contrib/dolphin/expected/json_operator.out b/contrib/dolphin/expected/json_operator.out index dbd6fe019a95d2f8607ea9f203c94f25f0ee4e4c..b69300b04f34a5fceb8520e44380fbe7d72adb51 100644 --- a/contrib/dolphin/expected/json_operator.out +++ b/contrib/dolphin/expected/json_operator.out @@ -1,7 +1,5 @@ -drop database if exists test_operator; -NOTICE: database "test_operator" does not exist, skipping -create database test_operator dbcompatibility 'B'; -\c test_operator +create schema test_operator; +set current_schema to 'test_operator'; drop table if exists test1; NOTICE: table "test1" does not exist, skipping create table test1(data json); @@ -152,5 +150,8 @@ select data->>'c' from test2; susan (3 rows) -\c postgres -drop database test_operator; +drop schema test_operator cascade; +NOTICE: drop cascades to 2 other objects +DETAIL: drop cascades to table test1 +drop cascades to table test2 +reset current_schema; diff --git a/contrib/dolphin/expected/json_pretty.out b/contrib/dolphin/expected/json_pretty.out index f65d4db78113046ce672b5727fa77a5814ffd892..41f48dbea70de6350e069cc2e27e47caf79f1318 100644 --- a/contrib/dolphin/expected/json_pretty.out +++ b/contrib/dolphin/expected/json_pretty.out @@ -1,7 +1,5 @@ -drop database if exists test_json_pretty; -NOTICE: database "test_json_pretty" does not exist, skipping -create database test_json_pretty dbcompatibility'B'; -\c test_json_pretty +create schema test_json_pretty; +set current_schema to 'test_json_pretty'; -- test for basic functionality of json_replace select JSON_PRETTY('{"a": 43}'); json_pretty @@ -342,5 +340,5 @@ select * from test; } (5 rows) -\c postgres -drop database test_json_pretty; +drop schema test_json_pretty cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/json_quote.out b/contrib/dolphin/expected/json_quote.out index 390a6146f7cc640f81dbfd4e13b5c08871922fa7..30302c643b0d0c503ca789ac48dc571aed4312bb 100644 --- a/contrib/dolphin/expected/json_quote.out +++ b/contrib/dolphin/expected/json_quote.out @@ -1,7 +1,5 @@ -drop database if exists test_json_quote; -NOTICE: database "test_json_quote" does not exist, skipping -create database test_json_quote dbcompatibility'B'; -\c test_json_quote +create schema test_json_quote; +set current_schema to 'test_json_quote'; select json_quote(E'a\tb'); json_quote ------------ @@ -82,5 +80,5 @@ select name from student; (1 row) drop table student; -\c postgres -drop database test_json_quote; +drop schema test_json_quote cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/json_remove.out b/contrib/dolphin/expected/json_remove.out index 5230c75746aa4e7810be70bee8737ac3d0387205..eed2e5b5a51a0451b528136d9340117de216179d 100644 --- a/contrib/dolphin/expected/json_remove.out +++ b/contrib/dolphin/expected/json_remove.out @@ -1,7 +1,5 @@ -drop database if exists test_json_remove; -NOTICE: database "test_json_remove" does not exist, skipping -create database test_json_remove dbcompatibility'B'; -\c test_json_remove +create schema test_json_remove; +set current_schema to 'test_json_remove'; SELECT JSON_REMOVE('[0, 1, 2, [3, 4]]', '$[0]', '$[2]'); json_remove ------------- @@ -114,5 +112,8 @@ select * from tab_json1; {"x": {"a": 3}, "y": 2} (1 row) -\c postgres -drop database if exists test_json_remove; +drop schema test_json_remove cascade; +NOTICE: drop cascades to 2 other objects +DETAIL: drop cascades to table tab_json1 +drop cascades to table info1 +reset current_schema; diff --git a/contrib/dolphin/expected/json_replace.out b/contrib/dolphin/expected/json_replace.out index b6dc21076ed200bb66036491dcd0d31fe88bb896..6aaba5f09fe0ee4b40e2424be94c338d91e7ff82 100644 --- a/contrib/dolphin/expected/json_replace.out +++ b/contrib/dolphin/expected/json_replace.out @@ -1,7 +1,5 @@ -drop database if exists test_json_replace; -NOTICE: database "test_json_replace" does not exist, skipping -create database test_json_replace dbcompatibility 'B'; -\c test_json_replace +create schema test_json_replace; +set current_schema to 'test_json_replace'; -- test for basic functionality of json_replace SELECT JSON_REPLACE('{"a": 1, "b": 2, "c": 3}', '$.b', 9); json_replace @@ -243,5 +241,5 @@ CONTEXT: referenced column: json_replace SELECT JSON_REPLACE('x',2,2); ERROR: Invalid JSON text in argument 1 to function json_replace. CONTEXT: referenced column: json_replace -\c postgres -drop database if exists test_json_replace; +drop schema test_json_replace cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/json_search.out b/contrib/dolphin/expected/json_search.out index 365cb8463e26b6ee33c7305f78a1e5e173cbcae7..6eab60d10d1ca7367cadc4f3d7c80828c065bd5d 100644 --- a/contrib/dolphin/expected/json_search.out +++ b/contrib/dolphin/expected/json_search.out @@ -1,7 +1,5 @@ -drop database if exists test_json_search; -NOTICE: database "test_json_search" does not exist, skipping -create database test_json_search dbcompatibility'B'; -\c test_json_search +create schema test_json_search; +set current_schema to 'test_json_search'; select json_search('null','one','null','&','$'); json_search ------------- @@ -810,5 +808,6 @@ select * from json_search_test; (6 rows) drop table json_search_test; -\c postgres; -drop database if exists test_json_search; +drop schema test_json_search cascade; +NOTICE: drop cascades to table c +reset current_schema; diff --git a/contrib/dolphin/expected/json_set.out b/contrib/dolphin/expected/json_set.out index 325d194c111d1f89fbd56d184c4fdd6983b50c45..497aa4ba5dc9fd11f8f8d827e96e043fcc5b09e9 100644 --- a/contrib/dolphin/expected/json_set.out +++ b/contrib/dolphin/expected/json_set.out @@ -1,7 +1,5 @@ -drop database if exists test_json_set; -NOTICE: database "test_json_set" does not exist, skipping -create database test_json_set dbcompatibility 'B'; -\c test_json_set +create schema test_json_set; +set current_schema to 'test_json_set'; select json_set('{"1":2}','$."1"',6); json_set ---------- @@ -108,5 +106,5 @@ select name from dataa; (1 row) drop table dataa; -\c postgres -drop database if exists test_json_set; +drop schema test_json_set cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/json_storage_size.out b/contrib/dolphin/expected/json_storage_size.out index 05a7e01a282ae12c0b167ea3fab2124bdeb206ef..4206f4d65a39e9276fdeba95b796323337716bfb 100644 --- a/contrib/dolphin/expected/json_storage_size.out +++ b/contrib/dolphin/expected/json_storage_size.out @@ -1,7 +1,5 @@ -drop database if exists test_json_storage_size; -NOTICE: database "test_json_storage_size" does not exist, skipping -create database test_json_storage_size dbcompatibility'B'; -\c test_json_storage_size +create schema test_json_storage_size; +set current_schema to 'test_json_storage_size'; set enable_set_variable_b_format to on; SELECT JSON_STORAGE_SIZE('0'); json_storage_size @@ -141,5 +139,6 @@ FROM SELECT JSON_STORAGE_SIZE('{0,1}'); ERROR: Invalid JSON text in argument 1 to function json_storage_size. CONTEXT: referenced column: json_storage_size -\c postgres -drop database test_json_storage_size +drop schema test_json_storage_size cascade; +NOTICE: drop cascades to table test_json_storage_size +reset current_schema; diff --git a/contrib/dolphin/expected/json_type.out b/contrib/dolphin/expected/json_type.out index 139b025c7bebe42bb11f9e928f4fd5dc2bc40f13..8b4c9cd127b32ab1c8d7a7946b6aab7827a9c67f 100644 --- a/contrib/dolphin/expected/json_type.out +++ b/contrib/dolphin/expected/json_type.out @@ -1,7 +1,5 @@ -drop database if exists test_json_type; -NOTICE: database "test_json_type" does not exist, skipping -create database test_json_type dbcompatibility'B'; -\c test_json_type +create schema test_json_type; +set current_schema to 'test_json_type'; ---string select json_type('"aa"'); json_type @@ -182,5 +180,6 @@ select json_type(t1) from test_type; object (1 row) -\c postgres -drop database test_json_type; +drop schema test_json_type cascade; +NOTICE: drop cascades to table test_type +reset current_schema; diff --git a/contrib/dolphin/expected/json_unquote.out b/contrib/dolphin/expected/json_unquote.out index 50f6a8ae0105abc8bf23dda404138dc31f2a5030..06a0058b979d14e2a60b79a759db17354deada62 100644 --- a/contrib/dolphin/expected/json_unquote.out +++ b/contrib/dolphin/expected/json_unquote.out @@ -1,7 +1,5 @@ -drop database if exists test_json_unquote; -NOTICE: database "test_json_unquote" does not exist, skipping -create database test_json_unquote dbcompatibility 'B'; -\c test_json_unquote +create schema test_json_unquote; +set current_schema to 'test_json_unquote'; select json_unquote('"abc"'); json_unquote -------------- @@ -150,5 +148,5 @@ select name from data; (1 row) drop table data; -\c postgres -drop database if exists test_json_unquote; +drop schema test_json_unquote cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/json_valid.out b/contrib/dolphin/expected/json_valid.out index d4bfb757db87364616309d04fb7ed22802ba441f..bc5d6d0385e9e2c7a9aee8285aec8c7e217ad2d9 100644 --- a/contrib/dolphin/expected/json_valid.out +++ b/contrib/dolphin/expected/json_valid.out @@ -1,7 +1,5 @@ -drop database if exists test_json_valid; -NOTICE: database "test_json_valid" does not exist, skipping -create database test_json_valid dbcompatibility 'b'; -\c test_json_valid +create schema test_json_valid; +set current_schema to 'test_json_valid'; select json_valid(NULL); json_valid ------------ @@ -663,5 +661,5 @@ select target, json_valid(target) from json_valid_test; (3 rows) drop table json_valid_test; -\c postgres -drop database if exists test_json_valid; +drop schema test_json_valid cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/keyword_ignore_test/ignore_invalid_input.out b/contrib/dolphin/expected/keyword_ignore_test/ignore_invalid_input.out index 5102eb672ab27caf5fdb3ba3d99460be36156f10..46cbb6e965e348d90db1fa90f0fda896e834bdac 100644 --- a/contrib/dolphin/expected/keyword_ignore_test/ignore_invalid_input.out +++ b/contrib/dolphin/expected/keyword_ignore_test/ignore_invalid_input.out @@ -1,6 +1,6 @@ -- test for insert/update ignore. -create database sql_ignore_invalid_input_test dbcompatibility 'B'; -\c sql_ignore_invalid_input_test; +create schema sql_ignore_invalid_input_test; +set current_schema to 'sql_ignore_invalid_input_test'; set timezone to 'PRC'; -- type: tinyint drop table if exists t_tinyint; @@ -1143,5 +1143,35 @@ show timezone; PST8PDT (1 row) -\c postgres -drop database if exists sql_ignore_invalid_input_test; +drop schema sql_ignore_invalid_input_test cascade; +NOTICE: drop cascades to 29 other objects +DETAIL: drop cascades to table t_tinyint +drop cascades to table t_tinyint_unsigned +drop cascades to table t_smallint +drop cascades to table t_smallint_unsigned +drop cascades to table t_int +drop cascades to table t_int_unsigned +drop cascades to table t_bigint +drop cascades to table t_bigint_unsigned +drop cascades to table t_float4 +drop cascades to table t_float8 +drop cascades to table t_numeric +drop cascades to table t_date +drop cascades to table t_time +drop cascades to table t_timestamp +drop cascades to table t_timestamptz +drop cascades to table t_timetz +drop cascades to table t_interval +drop cascades to table t_tinterval +drop cascades to table t_smalldatetime +drop cascades to table t_uuid +drop cascades to table t_point +drop cascades to table t_path +drop cascades to table t_polygon +drop cascades to table t_circle +drop cascades to table t_lseg +drop cascades to table t_box +drop cascades to table t_json +drop cascades to table t_jsonb +drop cascades to table t_bit +reset current_schema; diff --git a/contrib/dolphin/expected/keyword_ignore_test/ignore_no_matched_partition.out b/contrib/dolphin/expected/keyword_ignore_test/ignore_no_matched_partition.out index 85fb47809b2302b706878615a5318649d6a40f3d..19cea54a3c86a37bf0d227a6d78555d5b6e2daec 100644 --- a/contrib/dolphin/expected/keyword_ignore_test/ignore_no_matched_partition.out +++ b/contrib/dolphin/expected/keyword_ignore_test/ignore_no_matched_partition.out @@ -1,6 +1,6 @@ -- test for ignore error of no partition matched -create database sql_ignore_no_matched_partition_test dbcompatibility 'B'; -\c sql_ignore_no_matched_partition_test; +create schema sql_ignore_no_matched_partition_test; +set current_schema to 'sql_ignore_no_matched_partition_test'; -- sqlbypass set enable_opfusion = on; set enable_partition_opfusion = on; @@ -308,5 +308,6 @@ set enable_opfusion = on; set enable_partition_opfusion = off; drop table t_ignore; drop table t_from; -\c postgres -drop database if exists sql_ignore_no_matched_partition_test; +drop schema sql_ignore_no_matched_partition_test cascade; +NOTICE: drop cascades to table ignore_range_range +reset current_schema; diff --git a/contrib/dolphin/expected/keyword_ignore_test/ignore_not_null_constraints.out b/contrib/dolphin/expected/keyword_ignore_test/ignore_not_null_constraints.out index 9a9b3f84bc8a8413b71463e1303461744cdc983f..d9006d0173f28e6eea9fd88aa0e1bb2fb66c3ea0 100644 --- a/contrib/dolphin/expected/keyword_ignore_test/ignore_not_null_constraints.out +++ b/contrib/dolphin/expected/keyword_ignore_test/ignore_not_null_constraints.out @@ -1,6 +1,6 @@ -- test for insert/update ignore. -create database sql_ignore_not_null_test dbcompatibility 'B'; -\c sql_ignore_not_null_test; +create schema sql_ignore_not_null_test; +set current_schema to 'sql_ignore_not_null_test'; drop table if exists t_ignore; NOTICE: table "t_ignore" does not exist, skipping create table t_ignore(col1 int, col2 int not null, col3 varchar not null); @@ -1482,5 +1482,39 @@ select * from t_ignore; (2 rows) -- restore context -\c postgres -drop database if exists sql_ignore_not_null_test; \ No newline at end of file +drop schema sql_ignore_not_null_test cascade; +NOTICE: drop cascades to 33 other objects +DETAIL: drop cascades to table t_from +drop cascades to table t_timestamp +drop cascades to table t_timetz +drop cascades to table t_time +drop cascades to table t_interval +drop cascades to table t_tinterval +drop cascades to table t_smalldatetime +drop cascades to table t_date +drop cascades to table t_uuid +drop cascades to table t_name +drop cascades to table t_point +drop cascades to table t_path +drop cascades to table t_polygon +drop cascades to table t_circle +drop cascades to table t_box +drop cascades to table t_json +drop cascades to table t_jsonb +drop cascades to table t_bit +drop cascades to table t_tinyint +drop cascades to table t_smallint +drop cascades to table t_int +drop cascades to table t_bigint +drop cascades to table t_float +drop cascades to table t_float8 +drop cascades to table t_numeric +drop cascades to table t_serial +drop cascades to table t_bool +drop cascades to table t_charn +drop cascades to table t_varcharn +drop cascades to table t_text +drop cascades to table t_not_null_key_partition +drop cascades to table ignore_range_range +drop cascades to table t_ignore +reset current_schema; diff --git a/contrib/dolphin/expected/keyword_ignore_test/ignore_type_transform.out b/contrib/dolphin/expected/keyword_ignore_test/ignore_type_transform.out index 23e35c35c7bfbaf4130d2e00b32a67fe0936dca8..8863bf8fec43f7ee3bc5a294e6b294845c315101 100644 --- a/contrib/dolphin/expected/keyword_ignore_test/ignore_type_transform.out +++ b/contrib/dolphin/expected/keyword_ignore_test/ignore_type_transform.out @@ -1,5 +1,5 @@ -create database sql_ignore_type_transform_test dbcompatibility 'B'; -\c sql_ignore_type_transform_test; +create schema sql_ignore_type_transform_test; +set current_schema to 'sql_ignore_type_transform_test'; -- test for tinyint drop table if exists t; NOTICE: table "t" does not exist, skipping @@ -788,5 +788,20 @@ select * from t_nvarchar2; 123456789123456789 (3 rows) -\c postgres -drop database if exists sql_ignore_type_transform_test; +drop schema sql_ignore_type_transform_test cascade; +NOTICE: drop cascades to 14 other objects +DETAIL: drop cascades to table t +drop cascades to table t_tinyint +drop cascades to table t_smallint +drop cascades to table t_int +drop cascades to table t_bigint +drop cascades to table t_numeric +drop cascades to table t_float4 +drop cascades to table t_char +drop cascades to table t_varchar +drop cascades to table t_nchar +drop cascades to table t_character +drop cascades to table t_varchar2 +drop cascades to table t_nvarchar2 +drop cascades to table t_text +reset current_schema; diff --git a/contrib/dolphin/expected/keyword_ignore_test/ignore_unique_constraints.out b/contrib/dolphin/expected/keyword_ignore_test/ignore_unique_constraints.out index 276cec58d1683fd9be0bad2b8551702769bb59cb..e0cad03a68ba55fba3959346046cf4c931a756b5 100644 --- a/contrib/dolphin/expected/keyword_ignore_test/ignore_unique_constraints.out +++ b/contrib/dolphin/expected/keyword_ignore_test/ignore_unique_constraints.out @@ -1,5 +1,5 @@ -create database sql_ignore_unique_test dbcompatibility 'B'; -\c sql_ignore_unique_test; +create schema sql_ignore_unique_test; +set current_schema to 'sql_ignore_unique_test'; drop table if exists t_ignore; NOTICE: table "t_ignore" does not exist, skipping create table t_ignore(col1 int, col2 int unique, col3 int unique); @@ -365,5 +365,10 @@ select * from t_ignore; 2 (2 rows) -\c postgres -drop database if exists sql_ignore_unique_test; +drop schema sql_ignore_unique_test cascade; +NOTICE: drop cascades to 4 other objects +DETAIL: drop cascades to table t_unique_upsert +drop cascades to table t_unique_key_partition +drop cascades to table ignore_range_range +drop cascades to table t_ignore +reset current_schema; diff --git a/contrib/dolphin/expected/kill.out b/contrib/dolphin/expected/kill.out index e389eb038b8370a9a118a7306abe13e037e1a935..c41bcae03b870d792cea41baeff2c3eb3334c6a8 100644 --- a/contrib/dolphin/expected/kill.out +++ b/contrib/dolphin/expected/kill.out @@ -1,7 +1,5 @@ -drop database if exists test_kill; -NOTICE: database "test_kill" does not exist, skipping -create database test_kill dbcompatibility 'b'; -\c test_kill +create schema test_kill; +set current_schema to 'test_kill'; kill query (select sessionid from pg_stat_activity where application_name = 'JobScheduler'); result -------- @@ -20,5 +18,5 @@ kill (select sessionid from pg_stat_activity where application_name = 'Percentil t (1 row) -\c postgres -drop database if exists test_kill; +drop schema test_kill cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/like_default_test.out b/contrib/dolphin/expected/like_default_test.out index b29149f919c7cebfb01c8515402dd72fdb5c88a6..432739093b41c8a0d0c76edf42d30cbfa5347238 100644 --- a/contrib/dolphin/expected/like_default_test.out +++ b/contrib/dolphin/expected/like_default_test.out @@ -1,13 +1,9 @@ --- b compatibility case -drop database if exists like_default_test; -NOTICE: database "like_default_test" does not exist, skipping --- create database like_default_test dbcompatibility 'b'; -create database like_default_test with DBCOMPATIBILITY = 'B'; -\c like_default_test +create schema like_default_test; +set current_schema to 'like_default_test'; create table test_nv (name national varchar(10)); \d test_nv - Table "public.test_nv" + Table "like_default_test.test_nv" Column | Type | Modifiers --------+---------------+----------- name | nvarchar2(10) | @@ -15,7 +11,7 @@ create table test_nv (name national varchar(10)); drop table if exists test_nv; create table test_nv (id int, name national varchar(10)); \d test_nv - Table "public.test_nv" + Table "like_default_test.test_nv" Column | Type | Modifiers --------+---------------+----------- id | integer | @@ -24,7 +20,7 @@ create table test_nv (id int, name national varchar(10)); drop table if exists test_nv; create table test_nv (id int, name nvarchar(10)); \d test_nv - Table "public.test_nv" + Table "like_default_test.test_nv" Column | Type | Modifiers --------+---------------+----------- id | integer | @@ -45,7 +41,7 @@ partition by range(id) ); create table test_non_like1 (like test_non); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -53,7 +49,7 @@ create table test_non_like1 (like test_non); create table test_non_like2 like test_non; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -61,7 +57,7 @@ create table test_non_like2 like test_non; create table test_part_like1 (like test_part); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -69,7 +65,7 @@ create table test_part_like1 (like test_part); create table test_part_like2 like test_part; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -81,7 +77,7 @@ drop table if exists test_non_like1, test_non_like2, test_part_like1, test_part_ create table if not exists test_non_like1 (like test_non); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -89,7 +85,7 @@ create table if not exists test_non_like1 (like test_non); create table if not exists test_non_like2 like test_non; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -97,7 +93,7 @@ create table if not exists test_non_like2 like test_non; create table if not exists test_part_like1 (like test_part); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -105,7 +101,7 @@ create table if not exists test_part_like1 (like test_part); create table if not exists test_part_like2 like test_part; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -117,7 +113,7 @@ drop table if exists test_non_like1, test_non_like2, test_part_like1, test_part_ create table test_non_like1 (like test_non including indexes); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -125,7 +121,7 @@ create table test_non_like1 (like test_non including indexes); create table test_non_like2 like test_non including indexes; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -133,7 +129,7 @@ create table test_non_like2 like test_non including indexes; create table test_part_like1 (like test_part including indexes); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -141,7 +137,7 @@ create table test_part_like1 (like test_part including indexes); create table test_part_like2 like test_part including indexes; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -159,7 +155,7 @@ ERROR: could not specify "INCLUDING PARTITION" for non-partitioned-table relati \d test_non_like2 create table test_part_like1 (like test_part including partition); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -169,7 +165,7 @@ Number of partitions: 5 (View pg_partition to check each partition range.) create table test_part_like2 like test_part including partition; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -189,7 +185,7 @@ ERROR: could not specify "INCLUDING PARTITION" for non-partitioned-table relati \d test_non_like2 create table test_part_like1 (like test_part including partition including indexes); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -199,7 +195,7 @@ Number of partitions: 5 (View pg_partition to check each partition range.) create table test_part_like2 like test_part including partition including indexes; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -213,7 +209,7 @@ NOTICE: table "test_non_like2" does not exist, skipping create table test_non_like1 (like test_non including all); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -221,7 +217,7 @@ create table test_non_like1 (like test_non including all); create table test_non_like2 like test_non including all; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -229,7 +225,7 @@ create table test_non_like2 like test_non including all; create table test_part_like1 (like test_part including all); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -239,7 +235,7 @@ Number of partitions: 5 (View pg_partition to check each partition range.) create table test_part_like2 like test_part including all; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -251,7 +247,7 @@ drop table if exists test_non_like1, test_non_like2, test_part_like1, test_part_ create table test_non_like1 (like test_non including all excluding indexes); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -259,7 +255,7 @@ create table test_non_like1 (like test_non including all excluding indexes); create table test_non_like2 like test_non including all excluding indexes; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -267,7 +263,7 @@ create table test_non_like2 like test_non including all excluding indexes; create table test_part_like1 (like test_part including all excluding indexes); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -277,7 +273,7 @@ Number of partitions: 5 (View pg_partition to check each partition range.) create table test_part_like2 like test_part including all excluding indexes; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -289,7 +285,7 @@ drop table if exists test_non_like1, test_non_like2, test_part_like1, test_part_ create table test_non_like1 (like test_non including all excluding partition); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -297,7 +293,7 @@ create table test_non_like1 (like test_non including all excluding partition); create table test_non_like2 like test_non including all excluding partition; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -305,7 +301,7 @@ create table test_non_like2 like test_non including all excluding partition; create table test_part_like1 (like test_part including all excluding partition); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -313,7 +309,7 @@ create table test_part_like1 (like test_part including all excluding partition); create table test_part_like2 like test_part including all excluding partition; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -323,7 +319,7 @@ drop table if exists test_non_like1, test_non_like2, test_part_like1, test_part_ create table test_non_like1 (like test_non including all excluding partition excluding indexes); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -331,7 +327,7 @@ create table test_non_like1 (like test_non including all excluding partition exc create table test_non_like2 like test_non including all excluding partition excluding indexes; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -339,7 +335,7 @@ create table test_non_like2 like test_non including all excluding partition excl create table test_part_like1 (like test_part including all excluding partition excluding indexes); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -347,7 +343,7 @@ create table test_part_like1 (like test_part including all excluding partition e create table test_part_like2 like test_part including all excluding partition excluding indexes; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -357,7 +353,7 @@ drop table if exists test_non_like1, test_non_like2, test_part_like1, test_part_ create table test_non_like1 (like test_non excluding indexes); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -365,7 +361,7 @@ create table test_non_like1 (like test_non excluding indexes); create table test_non_like2 like test_non excluding indexes; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -373,7 +369,7 @@ create table test_non_like2 like test_non excluding indexes; create table test_part_like1 (like test_part excluding indexes); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -381,7 +377,7 @@ create table test_part_like1 (like test_part excluding indexes); create table test_part_like2 like test_part excluding indexes; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -393,7 +389,7 @@ drop table if exists test_non_like1, test_non_like2, test_part_like1, test_part_ create table test_non_like1 (like test_non excluding partition); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -401,7 +397,7 @@ create table test_non_like1 (like test_non excluding partition); create table test_non_like2 like test_non excluding partition; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -409,7 +405,7 @@ create table test_non_like2 like test_non excluding partition; create table test_part_like1 (like test_part excluding partition); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -417,7 +413,7 @@ create table test_part_like1 (like test_part excluding partition); create table test_part_like2 like test_part excluding partition; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -427,7 +423,7 @@ drop table if exists test_non_like1, test_non_like2, test_part_like1, test_part_ create table test_non_like1 (like test_non excluding reloptions); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -435,7 +431,7 @@ create table test_non_like1 (like test_non excluding reloptions); create table test_non_like2 like test_non excluding reloptions; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -443,7 +439,7 @@ create table test_non_like2 like test_non excluding reloptions; create table test_part_like1 (like test_part excluding reloptions); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -451,7 +447,7 @@ create table test_part_like1 (like test_part excluding reloptions); create table test_part_like2 like test_part excluding reloptions; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -463,7 +459,7 @@ drop table if exists test_non_like1, test_non_like2, test_part_like1, test_part_ create table test_non_like1 (like test_non excluding partition excluding indexes); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -471,7 +467,7 @@ create table test_non_like1 (like test_non excluding partition excluding indexes create table test_non_like2 like test_non excluding partition excluding indexes; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -479,7 +475,7 @@ create table test_non_like2 like test_non excluding partition excluding indexes; create table test_part_like1 (like test_part excluding partition excluding indexes); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -487,7 +483,7 @@ create table test_part_like1 (like test_part excluding partition excluding index create table test_part_like2 like test_part excluding partition excluding indexes; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -497,7 +493,7 @@ drop table if exists test_non_like1, test_non_like2, test_part_like1, test_part_ create table test_non_like1 (like test_non excluding all); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -505,7 +501,7 @@ create table test_non_like1 (like test_non excluding all); create table test_non_like2 like test_non excluding all; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -513,7 +509,7 @@ create table test_non_like2 like test_non excluding all; create table test_part_like1 (like test_part excluding all); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -521,7 +517,7 @@ create table test_part_like1 (like test_part excluding all); create table test_part_like2 like test_part excluding all; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -537,7 +533,7 @@ ERROR: could not specify "INCLUDING PARTITION" for non-partitioned-table relati \d test_non_like2 create table test_part_like1 (like test_part excluding all including partition); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -547,7 +543,7 @@ Number of partitions: 5 (View pg_partition to check each partition range.) create table test_part_like2 like test_part excluding all including partition; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -561,7 +557,7 @@ NOTICE: table "test_non_like2" does not exist, skipping create table test_non_like1 (like test_non excluding all including indexes); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -569,7 +565,7 @@ create table test_non_like1 (like test_non excluding all including indexes); create table test_non_like2 like test_non excluding all including indexes; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -577,7 +573,7 @@ create table test_non_like2 like test_non excluding all including indexes; create table test_part_like1 (like test_part excluding all including indexes); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -585,7 +581,7 @@ create table test_part_like1 (like test_part excluding all including indexes); create table test_part_like2 like test_part excluding all including indexes; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -601,7 +597,7 @@ ERROR: could not specify "INCLUDING PARTITION" for non-partitioned-table relati \d test_non_like2 create table test_part_like1 (like test_part excluding all including partition including indexes); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -611,7 +607,7 @@ Number of partitions: 5 (View pg_partition to check each partition range.) create table test_part_like2 like test_part excluding all including partition including indexes; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -628,7 +624,7 @@ create index test_part_id_idx on test_part(id); create table test_non_like1 (like test_non); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -636,7 +632,7 @@ create table test_non_like1 (like test_non); create table test_non_like2 like test_non; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -646,7 +642,7 @@ Indexes: create table test_part_like1 (like test_part); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -654,7 +650,7 @@ create table test_part_like1 (like test_part); create table test_part_like2 like test_part; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -668,7 +664,7 @@ drop table if exists test_non_like1, test_non_like2, test_part_like1, test_part_ create table if not exists test_non_like1 (like test_non); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -676,7 +672,7 @@ create table if not exists test_non_like1 (like test_non); create table if not exists test_non_like2 like test_non; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -686,7 +682,7 @@ Indexes: create table if not exists test_part_like1 (like test_part); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -694,7 +690,7 @@ create table if not exists test_part_like1 (like test_part); create table if not exists test_part_like2 like test_part; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -708,7 +704,7 @@ drop table if exists test_non_like1, test_non_like2, test_part_like1, test_part_ create table test_non_like1 (like test_non including indexes); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -718,7 +714,7 @@ Indexes: create table test_non_like2 like test_non including indexes; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -731,7 +727,7 @@ ERROR: non-partitioned table does not support global partitioned indexes \d test_part_like1 create table test_part_like2 like test_part including indexes; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -752,7 +748,7 @@ ERROR: could not specify "INCLUDING PARTITION" for non-partitioned-table relati \d test_non_like2 create table test_part_like1 (like test_part including partition); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -762,7 +758,7 @@ Number of partitions: 5 (View pg_partition to check each partition range.) create table test_part_like2 like test_part including partition; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -784,7 +780,7 @@ ERROR: could not specify "INCLUDING PARTITION" for non-partitioned-table relati \d test_non_like2 create table test_part_like1 (like test_part including partition including indexes); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -796,7 +792,7 @@ Number of partitions: 5 (View pg_partition to check each partition range.) create table test_part_like2 like test_part including partition including indexes; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -812,7 +808,7 @@ NOTICE: table "test_non_like2" does not exist, skipping create table test_non_like1 (like test_non including all); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -822,7 +818,7 @@ Indexes: create table test_non_like2 like test_non including all; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -832,7 +828,7 @@ Indexes: create table test_part_like1 (like test_part including all); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -844,7 +840,7 @@ Number of partitions: 5 (View pg_partition to check each partition range.) create table test_part_like2 like test_part including all; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -858,7 +854,7 @@ drop table if exists test_non_like1, test_non_like2, test_part_like1, test_part_ create table test_non_like1 (like test_non including all excluding indexes); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -866,7 +862,7 @@ create table test_non_like1 (like test_non including all excluding indexes); create table test_non_like2 like test_non including all excluding indexes; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -874,7 +870,7 @@ create table test_non_like2 like test_non including all excluding indexes; create table test_part_like1 (like test_part including all excluding indexes); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -884,7 +880,7 @@ Number of partitions: 5 (View pg_partition to check each partition range.) create table test_part_like2 like test_part including all excluding indexes; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -896,7 +892,7 @@ drop table if exists test_non_like1, test_non_like2, test_part_like1, test_part_ create table test_non_like1 (like test_non including all excluding partition); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -906,7 +902,7 @@ Indexes: create table test_non_like2 like test_non including all excluding partition; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -926,7 +922,7 @@ NOTICE: table "test_part_like2" does not exist, skipping create table test_non_like1 (like test_non including all excluding partition excluding indexes); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -934,7 +930,7 @@ create table test_non_like1 (like test_non including all excluding partition exc create table test_non_like2 like test_non including all excluding partition excluding indexes; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -942,7 +938,7 @@ create table test_non_like2 like test_non including all excluding partition excl create table test_part_like1 (like test_part including all excluding partition excluding indexes); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -950,7 +946,7 @@ create table test_part_like1 (like test_part including all excluding partition e create table test_part_like2 like test_part including all excluding partition excluding indexes; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -960,7 +956,7 @@ drop table if exists test_non_like1, test_non_like2, test_part_like1, test_part_ create table test_non_like1 (like test_non excluding indexes); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -968,7 +964,7 @@ create table test_non_like1 (like test_non excluding indexes); create table test_non_like2 like test_non excluding indexes; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -976,7 +972,7 @@ create table test_non_like2 like test_non excluding indexes; create table test_part_like1 (like test_part excluding indexes); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -984,7 +980,7 @@ create table test_part_like1 (like test_part excluding indexes); create table test_part_like2 like test_part excluding indexes; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -996,7 +992,7 @@ drop table if exists test_non_like1, test_non_like2, test_part_like1, test_part_ create table test_non_like1 (like test_non excluding partition); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -1004,7 +1000,7 @@ create table test_non_like1 (like test_non excluding partition); create table test_non_like2 like test_non excluding partition; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -1014,7 +1010,7 @@ Indexes: create table test_part_like1 (like test_part excluding partition); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -1028,7 +1024,7 @@ NOTICE: table "test_part_like2" does not exist, skipping create table test_non_like1 (like test_non excluding partition excluding indexes); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -1036,7 +1032,7 @@ create table test_non_like1 (like test_non excluding partition excluding indexes create table test_non_like2 like test_non excluding partition excluding indexes; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -1044,7 +1040,7 @@ create table test_non_like2 like test_non excluding partition excluding indexes; create table test_part_like1 (like test_part excluding partition excluding indexes); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -1052,7 +1048,7 @@ create table test_part_like1 (like test_part excluding partition excluding index create table test_part_like2 like test_part excluding partition excluding indexes; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -1062,7 +1058,7 @@ drop table if exists test_non_like1, test_non_like2, test_part_like1, test_part_ create table test_non_like1 (like test_non excluding all); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -1070,7 +1066,7 @@ create table test_non_like1 (like test_non excluding all); create table test_non_like2 like test_non excluding all; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -1078,7 +1074,7 @@ create table test_non_like2 like test_non excluding all; create table test_part_like1 (like test_part excluding all); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -1086,7 +1082,7 @@ create table test_part_like1 (like test_part excluding all); create table test_part_like2 like test_part excluding all; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -1102,7 +1098,7 @@ ERROR: could not specify "INCLUDING PARTITION" for non-partitioned-table relati \d test_non_like2 create table test_part_like1 (like test_part excluding all including partition); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -1112,7 +1108,7 @@ Number of partitions: 5 (View pg_partition to check each partition range.) create table test_part_like2 like test_part excluding all including partition; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -1126,7 +1122,7 @@ NOTICE: table "test_non_like2" does not exist, skipping create table test_non_like1 (like test_non excluding all including indexes); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -1136,7 +1132,7 @@ Indexes: create table test_non_like2 like test_non excluding all including indexes; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -1162,7 +1158,7 @@ ERROR: could not specify "INCLUDING PARTITION" for non-partitioned-table relati \d test_non_like2 create table test_part_like1 (like test_part excluding all including partition including indexes); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -1174,7 +1170,7 @@ Number of partitions: 5 (View pg_partition to check each partition range.) create table test_part_like2 like test_part excluding all including partition including indexes; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -1262,11 +1258,6 @@ select * from test_insert; (15 rows) -\c postgres -drop database if exists like_default_test; - - - - - - +drop schema like_default_test cascade; +NOTICE: drop cascades to table test_insert +reset current_schema; diff --git a/contrib/dolphin/expected/mysqlmode_fullgroup.out b/contrib/dolphin/expected/mysqlmode_fullgroup.out index e611c2559eedbde783a088871132776046cd969d..85024355bd880ecfe232bf0ed77fb5924876ff0b 100644 --- a/contrib/dolphin/expected/mysqlmode_fullgroup.out +++ b/contrib/dolphin/expected/mysqlmode_fullgroup.out @@ -1,5 +1,5 @@ -CREATE DATABASE sql_mode_full_group dbcompatibility 'B'; -\c sql_mode_full_group; +create schema sql_mode_full_group; +set current_schema to 'sql_mode_full_group'; create table test_group(a int, b int, c int, d int); create table test_group1(a int, b int, c int, d int); insert into test_group values(1,2,3,4); @@ -48,5 +48,8 @@ select t.a, (select sum(b) from test_group i where i.b = t.b ) from test_group t 2 | 8 (3 rows) -\c contrib_regression; -drop DATABASE if exists sql_mode_full_group; +drop schema sql_mode_full_group cascade; +NOTICE: drop cascades to 2 other objects +DETAIL: drop cascades to table test_group +drop cascades to table test_group1 +reset current_schema; diff --git a/contrib/dolphin/expected/mysqlmode_strict.out b/contrib/dolphin/expected/mysqlmode_strict.out index 0daf8e196fe46307797f92dfc3ae244de931cbb6..1dfc6c51f8bb0bb6cdb8ef8f55fc57499d72af42 100644 --- a/contrib/dolphin/expected/mysqlmode_strict.out +++ b/contrib/dolphin/expected/mysqlmode_strict.out @@ -1,7 +1,5 @@ -drop DATABASE if exists sql_mode_strict; -NOTICE: database "sql_mode_strict" does not exist, skipping -CREATE DATABASE sql_mode_strict dbcompatibility 'B'; -\c sql_mode_strict; +create schema sql_mode_strict; +set current_schema to 'sql_mode_strict'; set dolphin.sql_mode = ''; create table test_tint(a tinyint); create table test_sint(a smallint); @@ -2183,5 +2181,57 @@ DETAIL: Failing row contains (null, null). insert into test_notnull_numeric_strict(b) values(null); ERROR: null value in column "b" violates not-null constraint DETAIL: Failing row contains (null, null). -\c contrib_regression; -drop DATABASE if exists sql_mode_strict; +drop schema sql_mode_strict cascade; +NOTICE: drop cascades to 51 other objects +DETAIL: drop cascades to table test_tint +drop cascades to table test_sint +drop cascades to table test_int +drop cascades to table test_bint +drop cascades to table test_float4 +drop cascades to table test_float8 +drop cascades to table test_char +drop cascades to table test_varchar7 +drop cascades to table test_tint1 +drop cascades to table test_sint1 +drop cascades to table test_int1 +drop cascades to table test_bint1 +drop cascades to table test_char1 +drop cascades to table test_varchar7_1 +drop cascades to table test_notnull_tint +drop cascades to table test_notnull_sint +drop cascades to table test_notnull_int +drop cascades to table test_notnull_bint +drop cascades to table test_notnull_char +drop cascades to table test_notnull_varchar +drop cascades to table test_notnull_clob +drop cascades to table test_notnull_text +drop cascades to table test_notnull_real +drop cascades to table test_notnull_double +drop cascades to table test_notnull_numeric +drop cascades to table test_multi_default +drop cascades to table test_tint_strict +drop cascades to table test_sint_strict +drop cascades to table test_int_strict +drop cascades to table test_bint_strict +drop cascades to table test_float4_strict +drop cascades to table test_float8_strict +drop cascades to table test_char_strict +drop cascades to table test_varchar7_strict +drop cascades to table test_tint_strict1 +drop cascades to table test_sint_strict1 +drop cascades to table test_int_strict1 +drop cascades to table test_bint_strict1 +drop cascades to table test_char_strict1 +drop cascades to table test_varchar7_strict_1 +drop cascades to table test_notnull_tint_strict +drop cascades to table test_notnull_sint_strict +drop cascades to table test_notnull_int_strict +drop cascades to table test_notnull_bint_strict +drop cascades to table test_notnull_char_strict +drop cascades to table test_notnull_varchar_strict +drop cascades to table test_notnull_clob_strict +drop cascades to table test_notnull_text_strict +drop cascades to table test_notnull_real_strict +drop cascades to table test_notnull_double_strict +drop cascades to table test_notnull_numeric_strict +reset current_schema; diff --git a/contrib/dolphin/expected/mysqlmode_strict2.out b/contrib/dolphin/expected/mysqlmode_strict2.out index f94314099a2a261ff3912fb0766f4c8b858e4bf1..ff8f9d6d66055f5f7943468a328b82a039c9c0b3 100644 --- a/contrib/dolphin/expected/mysqlmode_strict2.out +++ b/contrib/dolphin/expected/mysqlmode_strict2.out @@ -1,7 +1,5 @@ -drop DATABASE if exists sql_mode_strict2; -NOTICE: database "sql_mode_strict2" does not exist, skipping -CREATE DATABASE sql_mode_strict2 dbcompatibility 'B'; -\c sql_mode_strict2; +create schema sql_mode_strict2; +set current_schema to 'sql_mode_strict2'; set dolphin.sql_mode = ''; create table test_tint(a tinyint unsigned); create table test_sint(a smallint unsigned); @@ -2174,5 +2172,57 @@ DETAIL: Failing row contains (null, null). insert into test_notnull_numeric_strict(b) values(null); ERROR: null value in column "b" violates not-null constraint DETAIL: Failing row contains (null, null). -\c contrib_regression; -drop DATABASE if exists sql_mode_strict2; +drop schema sql_mode_strict2 cascade; +NOTICE: drop cascades to 51 other objects +DETAIL: drop cascades to table test_tint +drop cascades to table test_sint +drop cascades to table test_int +drop cascades to table test_bint +drop cascades to table test_float4 +drop cascades to table test_float8 +drop cascades to table test_char +drop cascades to table test_varchar7 +drop cascades to table test_tint1 +drop cascades to table test_sint1 +drop cascades to table test_int1 +drop cascades to table test_bint1 +drop cascades to table test_char1 +drop cascades to table test_varchar7_1 +drop cascades to table test_notnull_tint +drop cascades to table test_notnull_sint +drop cascades to table test_notnull_int +drop cascades to table test_notnull_bint +drop cascades to table test_notnull_char +drop cascades to table test_notnull_varchar +drop cascades to table test_notnull_clob +drop cascades to table test_notnull_text +drop cascades to table test_notnull_real +drop cascades to table test_notnull_double +drop cascades to table test_notnull_numeric +drop cascades to table test_multi_default +drop cascades to table test_tint_strict +drop cascades to table test_sint_strict +drop cascades to table test_int_strict +drop cascades to table test_bint_strict +drop cascades to table test_float4_strict +drop cascades to table test_float8_strict +drop cascades to table test_char_strict +drop cascades to table test_varchar7_strict +drop cascades to table test_tint_strict1 +drop cascades to table test_sint_strict1 +drop cascades to table test_int_strict1 +drop cascades to table test_bint_strict1 +drop cascades to table test_char_strict1 +drop cascades to table test_varchar7_strict_1 +drop cascades to table test_notnull_tint_strict +drop cascades to table test_notnull_sint_strict +drop cascades to table test_notnull_int_strict +drop cascades to table test_notnull_bint_strict +drop cascades to table test_notnull_char_strict +drop cascades to table test_notnull_varchar_strict +drop cascades to table test_notnull_clob_strict +drop cascades to table test_notnull_text_strict +drop cascades to table test_notnull_real_strict +drop cascades to table test_notnull_double_strict +drop cascades to table test_notnull_numeric_strict +reset current_schema; diff --git a/contrib/dolphin/expected/network.out b/contrib/dolphin/expected/network.out index 5de44220fd29340646fc4a07b2c17311912b597f..36f950817f1c30761abcc8aae464de3e65b0fe22 100644 --- a/contrib/dolphin/expected/network.out +++ b/contrib/dolphin/expected/network.out @@ -1,7 +1,5 @@ -drop database if exists test_network; -NOTICE: database "test_network" does not exist, skipping -create database test_network dbcompatibility 'b'; -\c test_network +create schema test_network; +set current_schema to 'test_network'; create table test (ip1 varchar(20),ip2 char(20),ip3 nvarchar2(20),ip4 text,ip5 clob); insert into test (ip1,ip2,ip3,ip4,ip5) values ('192.168.1.1','127.0.0.1','10.0.0.10','172.0.0.1','0.0.0.0'),('fe80::1','a::f','a::c','a::d','a::e'),('192.168.1.256','192.168.1','256.168.1.1','192.256.1.1','192.168.1.-1'); select is_ipv4(ip1),is_ipv4(ip2),is_ipv4(ip3),is_ipv4(ip4),is_ipv4(ip5) from test; @@ -140,5 +138,6 @@ select is_ipv6(NULL); 0 (1 row) -\c postgres -drop database if exists test_network; +drop schema test_network cascade; +NOTICE: drop cascades to table test +reset current_schema; diff --git a/contrib/dolphin/expected/network2.out b/contrib/dolphin/expected/network2.out index cf61f204d9a3724796bfceb3cd3831f9905dfe16..61390761eda27fd6a031197772d13bc7476da9eb 100644 --- a/contrib/dolphin/expected/network2.out +++ b/contrib/dolphin/expected/network2.out @@ -1,7 +1,5 @@ -drop database if exists network2; -NOTICE: database "network2" does not exist, skipping -create database network2 dbcompatibility 'b'; -\c network2 +create schema network2; +set current_schema to 'network2'; set dolphin.sql_mode = ''; select inet_ntoa(inet_aton('255.255.255.255.255.255.255.255')); inet_ntoa @@ -850,5 +848,5 @@ SELECT IS_IPV4_MAPPED(NULL),IS_IPV4_COMPAT(NULL); (1 row) reset dolphin.sql_mode; -\c postgres -drop database if exists network2; +drop schema network2 cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/none_strict_warning.out b/contrib/dolphin/expected/none_strict_warning.out index 4547e51cd7f30e90743d1c76681fcf4600a7faff..75db18606a67d49c6464cdcacef1af80def158e2 100644 --- a/contrib/dolphin/expected/none_strict_warning.out +++ b/contrib/dolphin/expected/none_strict_warning.out @@ -1,8 +1,5 @@ ----- b compatibility case -drop database if exists none_strict_warning_test; -NOTICE: database "none_strict_warning_test" does not exist, skipping -create database none_strict_warning_test dbcompatibility 'b'; -\c none_strict_warning_test +create schema none_strict_warning_test; +set current_schema to 'none_strict_warning_test'; reset dolphin.sql_mode; create table test_int1(c1 int1); create table test_int8(c1 int8); @@ -2571,6 +2568,14 @@ select * from test_uint8; 0 (26 rows) ----- drop database -\c contrib_regression -DROP DATABASE none_strict_warning_test; +drop schema none_strict_warning_test cascade; +NOTICE: drop cascades to 8 other objects +DETAIL: drop cascades to table test_int1 +drop cascades to table test_int8 +drop cascades to table test_int4 +drop cascades to table test_int2 +drop cascades to table test_uint1 +drop cascades to table test_uint2 +drop cascades to table test_uint4 +drop cascades to table test_uint8 +reset current_schema; diff --git a/contrib/dolphin/expected/nvarchar.out b/contrib/dolphin/expected/nvarchar.out index ee026d565e48d34c0e54abdba09db64af8700be1..850163d3b627ff5142b0446435955c8a79f3f5de 100644 --- a/contrib/dolphin/expected/nvarchar.out +++ b/contrib/dolphin/expected/nvarchar.out @@ -1,8 +1,5 @@ --- b compatibility case -drop database if exists db_nvarchar; -NOTICE: database "db_nvarchar" does not exist, skipping -create database db_nvarchar dbcompatibility 'b'; -\c db_nvarchar +create schema db_nvarchar; +set current_schema to 'db_nvarchar'; -- -- VARCHAR -- @@ -116,5 +113,6 @@ SELECT '' AS four, * FROM NVARCHAR_TBL; | abcd (4 rows) -\c postgres -drop database if exists db_nvarchar; +drop schema db_nvarchar cascade; +NOTICE: drop cascades to table nvarchar_tbl +reset current_schema; diff --git a/contrib/dolphin/expected/oct.out b/contrib/dolphin/expected/oct.out index 032857dd1d98d3e23f49dfd060d5290a17cbb4ee..caa7a6503d223099515d1fd062d9368c1b54e0a0 100644 --- a/contrib/dolphin/expected/oct.out +++ b/contrib/dolphin/expected/oct.out @@ -1,7 +1,5 @@ -drop database if exists db_oct; -NOTICE: database "db_oct" does not exist, skipping -create database db_oct dbcompatibility 'b'; -\c db_oct +create schema db_oct; +set current_schema to 'db_oct'; -- 测试正常数字十进制转八进制 SELECT OCT(10); oct @@ -116,5 +114,5 @@ select oct(name) from test_oct; (2 rows) drop table if exists test_oct; -\c postgres -drop database if exists db_oct; +drop schema db_oct cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/option.out b/contrib/dolphin/expected/option.out index 03ff272ce68ad73a7ef8881b82b968a56c596439..bc4b424d6775a3e7c222d5cb8fd93473705a56bb 100644 --- a/contrib/dolphin/expected/option.out +++ b/contrib/dolphin/expected/option.out @@ -1,7 +1,5 @@ -drop database if exists option; -NOTICE: database "option" does not exist, skipping -create database option dbcompatibility = 'b'; -\c option +create schema option; +set current_schema to 'option'; create global temp table test1(a int primary key, b text) on commit delete rows engine = InnoDB with(STORAGE_TYPE = ASTORE); NOTICE: CREATE TABLE / PRIMARY KEY will create implicit index "test1_pkey" for table "test1" create global temp table test2(id int,vname varchar(48),remark text) engine = InnoDB on commit PRESERVE rows ; @@ -123,5 +121,8 @@ drop table test7; drop table test8; drop table test9; drop table test10; -\c postgres -drop database option; +drop schema option cascade; +NOTICE: drop cascades to 2 other objects +DETAIL: drop cascades to table test11 +drop cascades to table test13 +reset current_schema; diff --git a/contrib/dolphin/expected/partition_maxvalue_test.out b/contrib/dolphin/expected/partition_maxvalue_test.out index a94f291acc1abdc6a57ef2188ca626052ac0bfaa..dbb5a2832c4d5cab1d2b9e56ce50196bf9c3d71b 100644 --- a/contrib/dolphin/expected/partition_maxvalue_test.out +++ b/contrib/dolphin/expected/partition_maxvalue_test.out @@ -1,7 +1,5 @@ -drop DATABASE if exists partition_maxvalue_test; -NOTICE: database "partition_maxvalue_test" does not exist, skipping -CREATE DATABASE partition_maxvalue_test dbcompatibility 'B'; -\c partition_maxvalue_test; +create schema partition_maxvalue_test; +set current_schema to 'partition_maxvalue_test'; --test MAXVALUE syntax CREATE TABLE IF NOT EXISTS testsubpart ( @@ -84,5 +82,9 @@ DETAIL: The distributed capability is not supported currently. create table testpart5(a int) DISTRIBUTE by range(a) (SLICE p0 start MAXVALUE end (200), SLICE p1 end(300)); ERROR: Un-support feature DETAIL: The distributed capability is not supported currently. -\c postgres; -drop DATABASE if exists partition_maxvalue_test; +drop schema partition_maxvalue_test cascade; +NOTICE: drop cascades to 3 other objects +DETAIL: drop cascades to table testsubpart +drop cascades to table testpart +drop cascades to table testpart1 +reset current_schema; diff --git a/contrib/dolphin/expected/partition_test1.out b/contrib/dolphin/expected/partition_test1.out index 353645f4a59d5a4dca41437df79ea1b3694a1621..69ae2cd04c5b1c55ee63dbbb48b4acc5a6e58450 100644 --- a/contrib/dolphin/expected/partition_test1.out +++ b/contrib/dolphin/expected/partition_test1.out @@ -1,7 +1,5 @@ -drop DATABASE if exists partition_test1; -NOTICE: database "partition_test1" does not exist, skipping -CREATE DATABASE partition_test1 dbcompatibility 'B'; -\c partition_test1; +create schema partition_test1; +set current_schema to 'partition_test1'; -------test range partition tables ----test partition table CREATE TABLE IF NOT EXISTS test_part @@ -714,7 +712,7 @@ NOTICE: ALTER TABLE / ADD UNIQUE will create implicit index "uidx_d" for table alter table test_part_list add constraint uidx_c unique using index idx_c; NOTICE: ALTER TABLE / ADD CONSTRAINT USING INDEX will rename index "idx_c" to "uidx_c" insert into test_part_list values(2000,1,2,3),(3000,2,3,4),(4000,3,4,5),(5000,4,5,6); -select * from test_part_list; +select * from test_part_list order by a desc; a | b | c | d ------+---+---+--- 5000 | 4 | 5 | 6 @@ -735,7 +733,7 @@ ALTER TABLE test_part_list REBUILD PARTITION p1, p2; test_part_list (1 row) -select * from test_part_list; +select * from test_part_list order by a desc; a | b | c | d ------+---+---+--- 5000 | 4 | 5 | 6 @@ -758,7 +756,7 @@ ALTER TABLE test_part_list REBUILD PARTITION all; test_part_list (1 row) -select * from test_part_list; +select * from test_part_list order by a desc; a | b | c | d ------+---+---+--- 5000 | 4 | 5 | 6 @@ -1510,5 +1508,11 @@ select * from test_part_segment where ((980 < b and b < 1000) or (2180 < b and b alter table test_part_segment remove partitioning; ERROR: The segment table test_part_segment is not supported CONTEXT: referenced column: remove_partitioning -\c postgres; -drop DATABASE if exists partition_test1; +drop schema partition_test1 cascade; +NOTICE: drop cascades to 5 other objects +DETAIL: drop cascades to table test_part_list +drop cascades to table test_part_hash +drop cascades to table test_nopart +drop cascades to table test_part_ustore +drop cascades to table test_part_segment +reset current_schema; diff --git a/contrib/dolphin/expected/partition_test2.out b/contrib/dolphin/expected/partition_test2.out index 19ac3fc01d9fb068e7790299c9c2f0ee98d47134..ae6ec1c0c558a069dc46f8c110bfcc8e791c2758 100644 --- a/contrib/dolphin/expected/partition_test2.out +++ b/contrib/dolphin/expected/partition_test2.out @@ -1,7 +1,5 @@ -drop DATABASE if exists partition_test2; -NOTICE: database "partition_test2" does not exist, skipping -CREATE DATABASE partition_test2 dbcompatibility 'B'; -\c partition_test2; +create schema partition_test2; +set current_schema to 'partition_test2'; CREATE TABLE IF NOT EXISTS test_part1 ( a int, @@ -565,5 +563,14 @@ alter table test_part_hash analyze partition p0,p1; (1 row) alter table test_part_hash analyze partition all; -\c postgres; -drop DATABASE if exists partition_test2; +drop schema partition_test2 cascade; +NOTICE: drop cascades to 8 other objects +DETAIL: drop cascades to table test_part1 +drop cascades to table test_subpart +drop cascades to table test_no_part1 +drop cascades to table test_part_ustore +drop cascades to table test_no_part2 +drop cascades to table test_part_segment +drop cascades to table test_part_list +drop cascades to table test_part_hash +reset current_schema; diff --git a/contrib/dolphin/expected/partition_test3.out b/contrib/dolphin/expected/partition_test3.out index 55f50a9d9c475b9878400ecea5c6d7b99f7f06a9..64945799ba87ab8714c4b55fbeaaff6bec0349e3 100644 --- a/contrib/dolphin/expected/partition_test3.out +++ b/contrib/dolphin/expected/partition_test3.out @@ -1,7 +1,5 @@ -drop DATABASE if exists partition_test3; -NOTICE: database "partition_test3" does not exist, skipping -CREATE DATABASE partition_test3 dbcompatibility 'B'; -\c partition_test3; +create schema partition_test3; +set current_schema to 'partition_test3'; --test add and drop CREATE TABLE IF NOT EXISTS test_part2 ( @@ -213,5 +211,10 @@ LINE 1: ALTER TABLE test_part2_1 add PARTITION p2 VALUES (add(600,10... HINT: No function matches the given name and argument types. You might need to add explicit type casts. ALTER TABLE test_part2_1 add PARTITION p3 VALUES (DEFAULT) (SUBPARTITION p3_0 VALUES LESS THAN (100)); ERROR: can not add none-range partition to range partition table -\c postgres; -drop DATABASE if exists partition_test3; +drop schema partition_test3 cascade; +NOTICE: drop cascades to 4 other objects +DETAIL: drop cascades to table test_part2 +drop cascades to table test_subpart2 +drop cascades to table test_part2_1 +drop cascades to table test_subpart2_1 +reset current_schema; diff --git a/contrib/dolphin/expected/partition_test4.out b/contrib/dolphin/expected/partition_test4.out index ab7c79f1aea6a35663eaca0f7d5a1fbbab58965e..d190c90939d4f00b7cd853e9b8e874c82ebf5df5 100644 --- a/contrib/dolphin/expected/partition_test4.out +++ b/contrib/dolphin/expected/partition_test4.out @@ -1,7 +1,5 @@ -drop DATABASE if exists partition_test4; -NOTICE: database "partition_test4" does not exist, skipping -CREATE DATABASE partition_test4 dbcompatibility 'B'; -\c partition_test4; +create schema partition_test4; +set current_schema to 'partition_test4'; CREATE TABLE test_range_subpart ( a INT4 PRIMARY KEY, @@ -70,7 +68,7 @@ alter table test_range_subpart reorganize partition p1,p2 into (partition m1 val select pg_get_tabledef('test_range_subpart'); pg_get_tabledef ------------------------------------------------------------------------------------------------------------------------ - SET search_path = public; + + SET search_path = partition_test4; + CREATE TABLE test_range_subpart ( + a integer NOT NULL, + b integer + @@ -142,7 +140,7 @@ alter table test_range_part reorganize partition p1,p2 into (partition m1 values select pg_get_tabledef('test_range_part'); pg_get_tabledef ----------------------------------------------------------------------------------------------- - SET search_path = public; + + SET search_path = partition_test4; + CREATE TABLE test_range_part ( + a integer NOT NULL, + b integer + @@ -201,7 +199,7 @@ alter table test_list_part reorganize partition p1,p2 into (partition m1 values( select pg_get_tabledef('test_list_part'); pg_get_tabledef --------------------------------------------------------------------------------------------- - SET search_path = public; + + SET search_path = partition_test4; + CREATE TABLE test_list_part ( + a integer NOT NULL, + b integer + @@ -258,7 +256,7 @@ alter table test_list_subpart reorganize partition p1,p2 into (partition m1 valu select pg_get_tabledef('test_list_subpart'); pg_get_tabledef ---------------------------------------------------------------------------------------------------------------------- - SET search_path = public; + + SET search_path = partition_test4; + CREATE TABLE test_list_subpart ( + a integer NOT NULL, + b integer + @@ -349,7 +347,7 @@ alter table test_part_ustore reorganize partition p1,p2 into (partition m1 value select pg_get_tabledef('test_part_ustore'); pg_get_tabledef ----------------------------------------------------------------------------------------------------------------------------- - SET search_path = public; + + SET search_path = partition_test4; + CREATE TABLE test_part_ustore ( + a integer NOT NULL, + b integer + @@ -420,7 +418,7 @@ alter table test_part_segment reorganize partition p1,p2 into (partition m1 valu select pg_get_tabledef('test_part_segment'); pg_get_tabledef --------------------------------------------------------------------------------------------------- - SET search_path = public; + + SET search_path = partition_test4; + CREATE TABLE test_part_segment ( + a integer NOT NULL, + b integer + @@ -497,7 +495,7 @@ insert into b_range_hash_t01 values(1,2,3),(51,3,4); select pg_get_tabledef('b_range_hash_t01'); pg_get_tabledef --------------------------------------------------------------------------------------------------------------------- - SET search_path = public; + + SET search_path = partition_test4; + CREATE TABLE b_range_hash_t01 ( + c1 integer NOT NULL, + c2 integer, + @@ -545,7 +543,7 @@ alter table b_range_hash_t01 reorganize partition p1 into (partition m1 values l select pg_get_tabledef('b_range_hash_t01'); pg_get_tabledef --------------------------------------------------------------------------------------------------------------------- - SET search_path = public; + + SET search_path = partition_test4; + CREATE TABLE b_range_hash_t01 ( + c1 integer NOT NULL, + c2 integer, + @@ -717,5 +715,19 @@ partition p2 values less than(300), partition p3 values less than (maxvalue) ); NOTICE: CREATE TABLE / PRIMARY KEY will create implicit index "b_range_mt2_pkey" for table "b_range_mt2" -\c postgres; -drop DATABASE if exists partition_test4; +drop schema partition_test4 cascade; +NOTICE: drop cascades to 13 other objects +DETAIL: drop cascades to table test_range_subpart +drop cascades to table test_range_part +drop cascades to table test_list_part +drop cascades to table test_list_subpart +drop cascades to table test_no_part +drop cascades to table test_part_ustore +drop cascades to table test_part_segment +drop cascades to table b_range_hash_t01 +drop cascades to table b_range_hash_t05 +drop cascades to table b_interval_t1 +drop cascades to table b_range_range_t01 +drop cascades to table b_range_mt1 +drop cascades to table b_range_mt2 +reset current_schema; diff --git a/contrib/dolphin/expected/pl_debugger_client.out b/contrib/dolphin/expected/pl_debugger_client.out index 1297ab301a2b5c2b31be24840d2ec07a7659f575..e9784afed16b66d0e0a75212e1726d474b591ab4 100644 --- a/contrib/dolphin/expected/pl_debugger_client.out +++ b/contrib/dolphin/expected/pl_debugger_client.out @@ -1,4 +1,4 @@ -\c test_ansi_quotes +set current_schema to 'test_ansi_quotes'; SET dolphin.sql_mode TO 'sql_mode_strict,sql_mode_full_group,pipes_as_concat'; -- wait for server establishment select pg_sleep(3); diff --git a/contrib/dolphin/expected/pl_debugger_server.out b/contrib/dolphin/expected/pl_debugger_server.out index 1f4746d6c94fea0b3d06ffff3b4c0fd0f80cb676..1355d2d3514d91417a106e8b8f5e4a9e08b4801a 100644 --- a/contrib/dolphin/expected/pl_debugger_server.out +++ b/contrib/dolphin/expected/pl_debugger_server.out @@ -1,4 +1,4 @@ -\c test_ansi_quotes +set current_schema to 'test_ansi_quotes'; SET dolphin.sql_mode TO 'sql_mode_strict,sql_mode_full_group,pipes_as_concat'; -- setups drop schema if exists pl_debugger cascade; diff --git a/contrib/dolphin/expected/read_only_guc_test.out b/contrib/dolphin/expected/read_only_guc_test.out old mode 100755 new mode 100644 index 6c76369a32cb1cc71e3a538b115c075bab2881a3..e7fb2ca6dceedf73be199b90555e14652eb5600a --- a/contrib/dolphin/expected/read_only_guc_test.out +++ b/contrib/dolphin/expected/read_only_guc_test.out @@ -1,9 +1,5 @@ --- b compatibility case -drop database if exists read_only_guc_test; -NOTICE: database "read_only_guc_test" does not exist, skipping --- create database read_only_guc_test dbcompatibility 'b'; -create database read_only_guc_test with DBCOMPATIBILITY = 'B'; -\c read_only_guc_test +create schema read_only_guc_test; +set current_schema to 'read_only_guc_test'; show version_comment; version_comment @@ -235,5 +231,5 @@ SELECT * FROM pg_settings WHERE NAME='wait_timeout'; (1 row) -\c postgres -drop database if exists read_only_guc_test; +drop schema read_only_guc_test cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/regexp.out b/contrib/dolphin/expected/regexp.out index 6824f001c93863e6c393c39edc4310a73929ef64..300f26305776c53b5026589157d497c10ed87fcb 100644 --- a/contrib/dolphin/expected/regexp.out +++ b/contrib/dolphin/expected/regexp.out @@ -1,12 +1,10 @@ -drop database if exists db_regexp; -NOTICE: database "db_regexp" does not exist, skipping -create database db_regexp dbcompatibility 'b'; -\c db_regexp +create schema db_regexp; +set current_schema to 'db_regexp'; select regexp('a', true); regexp -------- 1 (1 row) -\c postgres -drop database if exists db_regexp; +drop schema db_regexp cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/replace_test/replace.out b/contrib/dolphin/expected/replace_test/replace.out old mode 100755 new mode 100644 index 98e56aca3d97b47e157a7bcfef8a7ae5d6e9623a..cec863883fe69c0bb0a3279f877e8c053f1e8b1b --- a/contrib/dolphin/expected/replace_test/replace.out +++ b/contrib/dolphin/expected/replace_test/replace.out @@ -1,7 +1,5 @@ -drop database if exists db_replace; -NOTICE: database "db_replace" does not exist, skipping -create database db_replace dbcompatibility 'B'; -\c db_replace; +create schema db_replace; +set current_schema to 'db_replace'; create table t1 (a int); create table t2 (a int); insert into t1 values(1); @@ -51,5 +49,12 @@ replace into Parts partition(p1) table T2; replace DELAYED into Parts partition(p1) values(4); replace DELAYED into Parts partition(p1) values(4); replace DELAYED into Parts partition(p1) select A from T2 where A >=2 ; -\c postgres -drop database db_replace; +drop schema db_replace cascade; +NOTICE: drop cascades to 6 other objects +DETAIL: drop cascades to table t1 +drop cascades to table t2 +drop cascades to table parts +drop cascades to table "T1" +drop cascades to table "T2" +drop cascades to table "Parts" +reset current_schema; diff --git a/contrib/dolphin/expected/second_microsecond.out b/contrib/dolphin/expected/second_microsecond.out index d865a97ac1675f9897eec327ffaec5c8b9adad41..4786adc83f485a6d9145a21e5491a6ef6735e8ae 100644 --- a/contrib/dolphin/expected/second_microsecond.out +++ b/contrib/dolphin/expected/second_microsecond.out @@ -1,5 +1,5 @@ -create database second_microsecond dbcompatibility = 'b'; -\c second_microsecond +create schema second_microsecond; +set current_schema to 'second_microsecond'; select microsecond(timestamp '2021-11-4 16:30:44.3411'); microsecond ------------- @@ -60,5 +60,5 @@ select second(timetz(6) '2021-11-4 16:30:44.3411'); 44 (1 row) -\c postgres -drop database second_microsecond; +drop schema second_microsecond cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/set_password.out b/contrib/dolphin/expected/set_password.out index 1e0352ec2563b102fa6098c69006dc182e8299c2..592c6d63d1167289972ebc4afe6113ab3535f2fc 100644 --- a/contrib/dolphin/expected/set_password.out +++ b/contrib/dolphin/expected/set_password.out @@ -1,7 +1,5 @@ -drop database if exists test_set_password; -NOTICE: database "test_set_password" does not exist, skipping -create database test_set_password dbcompatibility 'b'; -\c test_set_password +create schema test_set_password; +set current_schema to 'test_set_password'; set password = 'abc@1234'; NOTICE: The iteration value of password is not recommended.Setting the iteration value too small reduces the security of the password, and setting it too large results in performance degradation. set password for current_user() = 'abc@2345'; @@ -17,6 +15,7 @@ set session authorization user1 password 'abc@3456'; set password for 'user1'@'%' = PASSWORD('abc@4567') replace 'abc@3456'; NOTICE: The iteration value of password is not recommended.Setting the iteration value too small reduces the security of the password, and setting it too large results in performance degradation. set session authorization user1 password 'abc@4567'; -\c postgres -drop database if exists test_set_password; +\c contrib_regression drop user user1; +drop schema test_set_password cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/show.out b/contrib/dolphin/expected/show.out index 26383f3ce1fe3fea97f16cf2c011b07f271079bb..8340aeb7ca97b72a8f9f4c3f721f923307e557aa 100644 --- a/contrib/dolphin/expected/show.out +++ b/contrib/dolphin/expected/show.out @@ -1,5 +1,5 @@ -create database show_test dbcompatibility 'b'; -\c show_test +create schema show_test; +set current_schema to 'show_test'; create user grant_test identified by 'H&*#^DH85@#(J'; NOTICE: The iteration value of password is not recommended.Setting the iteration value too small reduces the security of the password, and setting it too large results in performance degradation. set search_path = 'grant_test'; @@ -270,5 +270,5 @@ drop cascades to function grant_test.tri_insert_func() drop cascades to function grant_test.tri_update_func() drop cascades to function grant_test.tri_delete_func() drop cascades to function grant_test.tri_truncate_func0010() -\c postgres -drop database show_test; \ No newline at end of file +drop schema show_test cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/show_create.out b/contrib/dolphin/expected/show_create.out index 3511d099f1809372c552baa36f3e6c8f729dcd37..e3c55daa3b7d3ec956105aa035c521e865617cad 100644 --- a/contrib/dolphin/expected/show_create.out +++ b/contrib/dolphin/expected/show_create.out @@ -1,7 +1,5 @@ -drop database if exists show_create; -NOTICE: database "show_create" does not exist, skipping -create database show_create dbcompatibility 'b'; -\c show_create +create schema show_create; +set current_schema to 'show_create'; CREATE USER test_showcreate WITH PASSWORD 'openGauss@123'; NOTICE: The iteration value of password is not recommended.Setting the iteration value too small reduces the security of the password, and setting it too large results in performance degradation. GRANT ALL PRIVILEGES TO test_showcreate; @@ -1226,5 +1224,5 @@ reset current_schema; drop schema test_get_def cascade; RESET ROLE; DROP USER test_showcreate; -\c postgres -drop database if exists show_create; +drop schema show_create cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/show_create_database.out b/contrib/dolphin/expected/show_create_database.out index 10daeea806e8f2d01fe2395ab6b28def29f14753..81262b8b39fb68180fd7d211cfc3ca87c2b320f2 100644 --- a/contrib/dolphin/expected/show_create_database.out +++ b/contrib/dolphin/expected/show_create_database.out @@ -1,7 +1,5 @@ -drop database if exists show_createdatabase; -NOTICE: database "show_createdatabase" does not exist, skipping -create database show_createdatabase dbcompatibility 'b'; -\c show_createdatabase +create schema show_createdatabase; +set current_schema to 'show_createdatabase'; CREATE USER test_showcreate_database WITH PASSWORD 'openGauss@123'; NOTICE: The iteration value of password is not recommended.Setting the iteration value too small reduces the security of the password, and setting it too large results in performance degradation. GRANT ALL PRIVILEGES TO test_showcreate_database; @@ -64,5 +62,5 @@ CONTEXT: referenced column: Create Database drop schema test_get_database cascade; RESET ROLE; DROP USER test_showcreate_database; -\c postgres -drop database if exists show_createdatabase; +drop schema show_createdatabase cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/show_variables.out b/contrib/dolphin/expected/show_variables.out index ba2a875f03e26ea1ad7afe331542448cd3b42d9e..1551256b14b572cbde748634e330fe777f579d32 100644 --- a/contrib/dolphin/expected/show_variables.out +++ b/contrib/dolphin/expected/show_variables.out @@ -1,7 +1,5 @@ -drop database if exists show_variables; -NOTICE: database "show_variables" does not exist, skipping -create database show_variables dbcompatibility 'b'; -\c show_variables +create schema show_variables; +set current_schema to 'show_variables'; SET datestyle TO postgres, dmy; show variables like 'DateSty%'; Variable_name | Value @@ -71,5 +69,5 @@ show global variables where variable_name = 'DateStyle'; (1 row) RESET datestyle; -\c postgres -drop database if exists show_variables; +drop schema show_variables cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/signed_unsigned_cast.out b/contrib/dolphin/expected/signed_unsigned_cast.out index 3506472cc76586b010cfe7c1f036de6059c24f05..492d3373eca7c069af423f2cfa7d0a20e21724a4 100644 --- a/contrib/dolphin/expected/signed_unsigned_cast.out +++ b/contrib/dolphin/expected/signed_unsigned_cast.out @@ -1,7 +1,5 @@ -drop database if exists signed_unsigned_cast; -NOTICE: database "signed_unsigned_cast" does not exist, skipping -create database signed_unsigned_cast dbcompatibility 'b'; -\c signed_unsigned_cast +create schema signed_unsigned_cast; +set current_schema to 'signed_unsigned_cast'; select cast(1-2 as unsigned); uint8 ---------------------- @@ -26,5 +24,5 @@ select cast(cast(1 + 5 as unsigned) as signed); 6 (1 row) -\c postgres -drop database signed_unsigned_cast; +drop schema signed_unsigned_cast cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/single_line_trigger.out b/contrib/dolphin/expected/single_line_trigger.out index f1fcc789f9853733b6e38a79ad681f741448cd15..9d30c3cea220a96997573b10ff3540a6fca5dc82 100644 --- a/contrib/dolphin/expected/single_line_trigger.out +++ b/contrib/dolphin/expected/single_line_trigger.out @@ -1,9 +1,7 @@ --create trigger -- test mysql compatibility trigger -drop database if exists db_mysql; -NOTICE: database "db_mysql" does not exist, skipping -create database db_mysql dbcompatibility 'B'; -\c db_mysql +create schema db_mysql; +set current_schema to 'db_mysql'; create table t (id int); create table t1 (id int); create table animals (id int, name char(30)); @@ -309,5 +307,5 @@ NOTICE: trigger "ins_call100" already exists, skipping drop table t, t1; drop procedure proc; reset enable_set_variable_b_format; -\c postgres -drop database db_mysql; +drop schema db_mysql cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/string_func_test/db_b_ascii_test.out b/contrib/dolphin/expected/string_func_test/db_b_ascii_test.out index 8902df3d7186026bdf88a21ddc41da0622c28327..f69fff7447babc86da906cb2cbffb433b836d2c6 100644 --- a/contrib/dolphin/expected/string_func_test/db_b_ascii_test.out +++ b/contrib/dolphin/expected/string_func_test/db_b_ascii_test.out @@ -1,3 +1,7 @@ +drop database if exists db_b_ascii_test; +NOTICE: database "db_b_ascii_test" does not exist, skipping +create database db_b_ascii_test dbcompatibility 'A'; +\c db_b_ascii_test SELECT ASCII('a'); ascii ------- @@ -10,10 +14,10 @@ SELECT ASCII('你'); 20320 (1 row) -drop database if exists db_b_ascii_test; -NOTICE: database "db_b_ascii_test" does not exist, skipping -create database db_b_ascii_test dbcompatibility 'B'; -\c db_b_ascii_test +\c contrib_regression +drop database db_b_ascii_test; +create schema db_b_ascii_test; +set current_schema to 'db_b_ascii_test'; SELECT ASCII('a'); ascii ------- @@ -26,5 +30,5 @@ SELECT ASCII('你'); 228 (1 row) -\c postgres -drop database db_b_ascii_test; +drop schema db_b_ascii_test cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/string_func_test/db_b_from_base64_test.out b/contrib/dolphin/expected/string_func_test/db_b_from_base64_test.out index 3e17f363945f2d89b59a0e4446d043fb962c5fd7..2338575f229546f35ebbc0df12f15642d3a8536f 100644 --- a/contrib/dolphin/expected/string_func_test/db_b_from_base64_test.out +++ b/contrib/dolphin/expected/string_func_test/db_b_from_base64_test.out @@ -1,7 +1,5 @@ -drop database if exists from_base64; -NOTICE: database "from_base64" does not exist, skipping -create database from_base64 dbcompatibility 'b'; -\c from_base64 +create schema from_base64; +set current_schema to 'from_base64'; --测试正常base64编码作为输入,返回base64编码的解码结果 SELECT FROM_BASE64('YWJj'); from_base64 @@ -82,5 +80,6 @@ SELECT FROM_BASE64(name) from test_base64; (3 rows) -\c postgres -drop database if exists from_base64; +drop schema from_base64 cascade; +NOTICE: drop cascades to table test_base64 +reset current_schema; diff --git a/contrib/dolphin/expected/string_func_test/db_b_insert_test.out b/contrib/dolphin/expected/string_func_test/db_b_insert_test.out index 87dc88453d12127497d38832089b13ff19b65c88..58cff70b2ce13e066dec50fc5a706e733a7426b9 100644 --- a/contrib/dolphin/expected/string_func_test/db_b_insert_test.out +++ b/contrib/dolphin/expected/string_func_test/db_b_insert_test.out @@ -1,7 +1,5 @@ -drop database if exists db_b_insert_test; -NOTICE: database "db_b_insert_test" does not exist, skipping -create database db_b_insert_test dbcompatibility 'B'; -\c db_b_insert_test +create schema db_b_insert_test; +set current_schema to 'db_b_insert_test'; select insert('abcdefg', 2, 4, 'yyy'); insert -------- @@ -157,5 +155,5 @@ CONTEXT: referenced column: insert abcdefg (1 row) -\c postgres -drop database db_b_insert_test; +drop schema db_b_insert_test cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/string_func_test/db_b_left_right_test.out b/contrib/dolphin/expected/string_func_test/db_b_left_right_test.out index f14bba904a4abbfa990c94ee42a477ee1d602d48..611694721169ebfbeea1ae618dedc2436770f2da 100644 --- a/contrib/dolphin/expected/string_func_test/db_b_left_right_test.out +++ b/contrib/dolphin/expected/string_func_test/db_b_left_right_test.out @@ -1,3 +1,7 @@ +drop database if exists db_b_left_right_test; +NOTICE: database "db_b_left_right_test" does not exist, skipping +create database db_b_left_right_test dbcompatibility 'A'; +\c db_b_left_right_test SELECT left('abcdefg', 3); left ------ @@ -22,10 +26,10 @@ SELECT right('abcdefg', -3); defg (1 row) -drop database if exists db_b_left_right_test; -NOTICE: database "db_b_left_right_test" does not exist, skipping -create database db_b_left_right_test dbcompatibility 'B'; -\c db_b_left_right_test +\c contrib_regression +drop database db_b_left_right_test; +create schema db_b_left_right_test; +set current_schema to 'db_b_left_right_test'; set bytea_output to escape; SELECT left('abcdefg', 3); left @@ -303,5 +307,5 @@ select right('abc',5/2); abc (1 row) -\c postgres -drop database db_b_left_right_test; +drop schema db_b_left_right_test cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/string_func_test/db_b_ord_test.out b/contrib/dolphin/expected/string_func_test/db_b_ord_test.out index 493b9e614d619be630ac6dc8f2c7810c29d6d8dd..141e8a6e2117289fdf669f1e6b47acb339e0ba3f 100644 --- a/contrib/dolphin/expected/string_func_test/db_b_ord_test.out +++ b/contrib/dolphin/expected/string_func_test/db_b_ord_test.out @@ -1,7 +1,5 @@ -drop database if exists db_b_ord_test; -NOTICE: database "db_b_ord_test" does not exist, skipping -create database db_b_ord_test dbcompatibility 'B'; -\c db_b_ord_test +create schema db_b_ord_test; +set current_schema to 'db_b_ord_test'; -- test 1 byte select ord('1111'); ord @@ -85,5 +83,6 @@ select ord(name) from test_ord; 4036199316 (3 rows) -\c postgres -drop database if exists db_b_ord_test; +drop schema db_b_ord_test cascade; +NOTICE: drop cascades to table test_ord +reset current_schema; diff --git a/contrib/dolphin/expected/string_func_test/db_b_quote_test.out b/contrib/dolphin/expected/string_func_test/db_b_quote_test.out index 8f5674c5f6ab5a6751d941d5a894f382d6d6d49a..6bc339d7e45cb1bc78677963f19fb16df0d2ed07 100644 --- a/contrib/dolphin/expected/string_func_test/db_b_quote_test.out +++ b/contrib/dolphin/expected/string_func_test/db_b_quote_test.out @@ -1,7 +1,5 @@ -drop database if exists db_b_quote_test; -NOTICE: database "db_b_quote_test" does not exist, skipping -create database db_b_quote_test dbcompatibility 'B'; -\c db_b_quote_test +create schema db_b_quote_test; +set current_schema to 'db_b_quote_test'; SELECT QUOTE(E'Don\'t!'); quote ----------- @@ -20,5 +18,5 @@ SELECT QUOTE('O\hello'); E'O\\hello' (1 row) -\c postgres -drop database db_b_quote_test; +drop schema db_b_quote_test cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/string_func_test/db_b_string_length_test.out b/contrib/dolphin/expected/string_func_test/db_b_string_length_test.out index f385e34e0b8b0adf6b449aca094c135e07fe63c4..9afceacb95b32b10a79176cab56d8d5ce9ccd8e8 100644 --- a/contrib/dolphin/expected/string_func_test/db_b_string_length_test.out +++ b/contrib/dolphin/expected/string_func_test/db_b_string_length_test.out @@ -1,3 +1,7 @@ +drop database if exists db_b_string_length_test; +NOTICE: database "db_b_string_length_test" does not exist, skipping +create database db_b_string_length_test dbcompatibility 'A'; +\c db_b_string_length_test SELECT length('jose'); length -------- @@ -22,10 +26,10 @@ SELECT length('你好呀jose'); 7 (1 row) -drop database if exists db_b_string_length_test; -NOTICE: database "db_b_string_length_test" does not exist, skipping -create database db_b_string_length_test dbcompatibility 'B'; -\c db_b_string_length_test +\c contrib_regression +drop database db_b_string_length_test; +create schema db_b_string_length_test; +set current_schema to 'db_b_string_length_test'; SELECT length('jose'); length -------- @@ -50,5 +54,5 @@ SELECT length('你好呀jose'); 13 (1 row) -\c postgres -drop database db_b_string_length_test; +drop schema db_b_string_length_test cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/string_func_test/db_b_substr_test.out b/contrib/dolphin/expected/string_func_test/db_b_substr_test.out index 3a3c96dabc32815ac37d179cca7c3029b32d232b..e419278c8b9c60c15308564956c20ca9fae288d0 100644 --- a/contrib/dolphin/expected/string_func_test/db_b_substr_test.out +++ b/contrib/dolphin/expected/string_func_test/db_b_substr_test.out @@ -1,3 +1,7 @@ +drop database if exists db_b_substr_test; +NOTICE: database "db_b_substr_test" does not exist, skipping +create database db_b_substr_test dbcompatibility 'A'; +\c db_b_substr_test DROP TABLE IF EXISTS template_string; NOTICE: table "template_string" does not exist, skipping CREATE TABLE template_string(a TEXT, b BYTEA); @@ -55,10 +59,10 @@ FROM template_string; (1 row) DROP TABLE IF EXISTS template_string; -drop database if exists db_b_substr_test; -NOTICE: database "db_b_substr_test" does not exist, skipping -create database db_b_substr_test dbcompatibility 'B'; -\c db_b_substr_test +\c contrib_regression +drop database db_b_substr_test; +create schema db_b_substr_test; +set current_schema to 'db_b_substr_test'; set bytea_output to escape; DROP TABLE IF EXISTS template_string; NOTICE: table "template_string" does not exist, skipping @@ -567,5 +571,9 @@ select c1, c2, substr(c1 for c2) from test_row order by c1; abcdefg | -2 | (3 rows) -\c postgres -drop database db_b_substr_test; +drop schema db_b_substr_test cascade; +NOTICE: drop cascades to 3 other objects +DETAIL: drop cascades to table template_string +drop cascades to table test_column +drop cascades to table test_row +reset current_schema; diff --git a/contrib/dolphin/expected/string_func_test/db_b_to_base64_test.out b/contrib/dolphin/expected/string_func_test/db_b_to_base64_test.out index ae3fe66811669002e8d10895627662d1255d3787..b5dc2c084e188fbc774f78bfdf94a62db887fa33 100644 --- a/contrib/dolphin/expected/string_func_test/db_b_to_base64_test.out +++ b/contrib/dolphin/expected/string_func_test/db_b_to_base64_test.out @@ -1,7 +1,5 @@ -drop database if exists to_base64_test; -NOTICE: database "to_base64_test" does not exist, skipping -create database to_base64_test dbcompatibility 'b'; -\c to_base64_test +create schema to_base64_test; +set current_schema to 'to_base64_test'; --测试字符串作为输入,返回base64编码的编码结果 SELECT TO_BASE64('123456'); to_base64 @@ -124,5 +122,6 @@ SELECT TO_BASE64(name) from test_base64; dG9fYmFzZTY0 (2 rows) -\c postgres -drop database if exists to_base64_test; +drop schema to_base64_test cascade; +NOTICE: drop cascades to table test_base64 +reset current_schema; diff --git a/contrib/dolphin/expected/string_func_test/db_b_trim_test.out b/contrib/dolphin/expected/string_func_test/db_b_trim_test.out index 88354f30a20b20c053a01670faa3e609d92aae51..a5714ccd9bed171af31dc2c75e604fd9fc047427 100644 --- a/contrib/dolphin/expected/string_func_test/db_b_trim_test.out +++ b/contrib/dolphin/expected/string_func_test/db_b_trim_test.out @@ -1,3 +1,7 @@ +drop database if exists db_b_trim_test; +NOTICE: database "db_b_trim_test" does not exist, skipping +create database db_b_trim_test dbcompatibility 'A'; +\c db_b_trim_test SELECT TRIM(' bar '); btrim ------- @@ -40,10 +44,10 @@ SELECT TRIM(TRAILING 'xyz' FROM 'xyzxbarxxyz'); xyzxbar (1 row) -drop database if exists db_b_trim_test; -NOTICE: database "db_b_trim_test" does not exist, skipping -create database db_b_trim_test dbcompatibility 'B'; -\c db_b_trim_test +\c contrib_regression +drop database db_b_trim_test; +create schema db_b_trim_test; +set current_schema to 'db_b_trim_test'; SELECT TRIM(' bar '); trim ------ @@ -141,5 +145,5 @@ SELECT TRIM(TRAILING ' X '::bytea); X (1 row) -\c postgres -drop database db_b_trim_test; +drop schema db_b_trim_test cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/string_func_test/db_b_unhex_test.out b/contrib/dolphin/expected/string_func_test/db_b_unhex_test.out index fdc0066270ceeacab7891c75ffd801a1e8a201c7..e138fbda06dcb5239d0ae6034359c9b056cfc472 100644 --- a/contrib/dolphin/expected/string_func_test/db_b_unhex_test.out +++ b/contrib/dolphin/expected/string_func_test/db_b_unhex_test.out @@ -1,7 +1,5 @@ -drop database if exists unhex_test; -NOTICE: database "unhex_test" does not exist, skipping -create database unhex_test dbcompatibility 'b'; -\c unhex_test +create schema unhex_test; +set current_schema to 'unhex_test'; --测试字符串作为输入,返回十六进制编码的编码结果 SELECT UNHEX('6f70656e4761757373'); unhex @@ -101,5 +99,6 @@ SELECT UNHEX(name) from test_unhex; openGauss (2 rows) -\c postgres -drop database if exists unhex_test; +drop schema unhex_test cascade; +NOTICE: drop cascades to table test_unhex +reset current_schema; diff --git a/contrib/dolphin/expected/string_func_test/test_substring_index.out b/contrib/dolphin/expected/string_func_test/test_substring_index.out index 2edab4fdce323ba2cdd89cb48eec2fcec84e4318..933598550a3b21663644b152e70a8d4d102bc471 100644 --- a/contrib/dolphin/expected/string_func_test/test_substring_index.out +++ b/contrib/dolphin/expected/string_func_test/test_substring_index.out @@ -1,7 +1,5 @@ -drop database if exists test_substring_index; -NOTICE: database "test_substring_index" does not exist, skipping -create database test_substring_index dbcompatibility 'b'; -\c test_substring_index +create schema test_substring_index; +set current_schema to 'test_substring_index'; SELECT SUBSTRING_INDEX('www.opengauss.com','.',0); substring_index ----------------- @@ -245,5 +243,5 @@ SELECT SUBSTRING_INDEX(myDate,'-',1) FROM myTable; (1 row) drop table myTable; -\c postgres -drop database test_substring_index; +drop schema test_substring_index cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/test_alter_table.out b/contrib/dolphin/expected/test_alter_table.out index 0bd95470841fad07157860c6730569b98c56a70b..b2a3a980f93cdaf627a6da94020e30d383eab045 100644 --- a/contrib/dolphin/expected/test_alter_table.out +++ b/contrib/dolphin/expected/test_alter_table.out @@ -1,7 +1,5 @@ -drop database if exists db_alter_table; -NOTICE: database "db_alter_table" does not exist, skipping -create database db_alter_table dbcompatibility 'b'; -\c db_alter_table +create schema db_alter_table; +set current_schema to 'db_alter_table'; create table alter_table_tbl1 (a int primary key, b int); NOTICE: CREATE TABLE / PRIMARY KEY will create implicit index "alter_table_tbl1_pkey" for table "alter_table_tbl1" create table alter_table_tbl2 (c int primary key, d int); @@ -81,7 +79,7 @@ show dolphin.sql_mode; reset dolphin.sql_mode; \d+ table_ddl_0030_02 - Table "public.table_ddl_0030_02" + Table "db_alter_table.table_ddl_0030_02" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- col1 | integer | | plain | | @@ -96,7 +94,7 @@ create table test_primary(f11 int, f12 varchar(20), f13 bool); alter table test_primary add primary key using btree(f11 desc, f12 asc) comment 'primary key' using btree; NOTICE: ALTER TABLE / ADD PRIMARY KEY will create implicit index "test_primary_pkey" for table "test_primary" \d+ test_primary - Table "public.test_primary" + Table "db_alter_table.test_primary" Column | Type | Modifiers | Storage | Stats target | Description --------+-----------------------+-----------+----------+--------------+------------- f11 | integer | not null | plain | | @@ -112,7 +110,7 @@ create table test_primary(f11 int, f12 varchar(20), f13 bool); alter table test_primary add primary key (f11 desc, f12 asc) comment 'primary key' using btree; NOTICE: ALTER TABLE / ADD PRIMARY KEY will create implicit index "test_primary_pkey" for table "test_primary" \d+ test_primary - Table "public.test_primary" + Table "db_alter_table.test_primary" Column | Type | Modifiers | Storage | Stats target | Description --------+-----------------------+-----------+----------+--------------+------------- f11 | integer | not null | plain | | @@ -128,7 +126,7 @@ create table test_primary(f11 int, f12 varchar(20), f13 bool); alter table test_primary add primary key using btree(f11 desc, f12 asc) comment 'primary key' using btree using btree; NOTICE: ALTER TABLE / ADD PRIMARY KEY will create implicit index "test_primary_pkey" for table "test_primary" \d+ test_primary - Table "public.test_primary" + Table "db_alter_table.test_primary" Column | Type | Modifiers | Storage | Stats target | Description --------+-----------------------+-----------+----------+--------------+------------- f11 | integer | not null | plain | | @@ -144,7 +142,7 @@ create table test_unique(f31 int, f32 varchar(20)); alter table test_unique add unique using btree(f31) comment 'unique index' using btree; NOTICE: ALTER TABLE / ADD UNIQUE will create implicit index "test_unique_f31_key" for table "test_unique" \d+ test_unique - Table "public.test_unique" + Table "db_alter_table.test_unique" Column | Type | Modifiers | Storage | Stats target | Description --------+-----------------------+-----------+----------+--------------+------------- f31 | integer | | plain | | @@ -159,7 +157,7 @@ create table test_unique(f31 int, f32 varchar(20)); alter table test_unique add unique (f31) comment 'unique index' using btree; NOTICE: ALTER TABLE / ADD UNIQUE will create implicit index "test_unique_f31_key" for table "test_unique" \d+ test_unique - Table "public.test_unique" + Table "db_alter_table.test_unique" Column | Type | Modifiers | Storage | Stats target | Description --------+-----------------------+-----------+----------+--------------+------------- f31 | integer | | plain | | @@ -174,7 +172,7 @@ create table test_unique(f31 int, f32 varchar(20)); alter table test_unique add unique using btree(f31) comment 'unique index' using btree using btree; NOTICE: ALTER TABLE / ADD UNIQUE will create implicit index "test_unique_f31_key" for table "test_unique" \d+ test_unique - Table "public.test_unique" + Table "db_alter_table.test_unique" Column | Type | Modifiers | Storage | Stats target | Description --------+-----------------------+-----------+----------+--------------+------------- f31 | integer | | plain | | @@ -188,7 +186,7 @@ drop table test_unique; create table test_unique(f31 int, f32 varchar(20), constraint con_t_unique unique using btree(f31, f32) comment 'unique index' using btree); NOTICE: CREATE TABLE / UNIQUE will create implicit index "con_t_unique" for table "test_unique" \d+ test_unique - Table "public.test_unique" + Table "db_alter_table.test_unique" Column | Type | Modifiers | Storage | Stats target | Description --------+-----------------------+-----------+----------+--------------+------------- f31 | integer | | plain | | @@ -202,7 +200,7 @@ drop table test_unique; create table test_unique(f31 int, f32 varchar(20), constraint con_t_unique unique (f31, f32) comment 'unique index' using btree); NOTICE: CREATE TABLE / UNIQUE will create implicit index "con_t_unique" for table "test_unique" \d+ test_unique - Table "public.test_unique" + Table "db_alter_table.test_unique" Column | Type | Modifiers | Storage | Stats target | Description --------+-----------------------+-----------+----------+--------------+------------- f31 | integer | | plain | | @@ -216,7 +214,7 @@ drop table test_unique; create table test_unique(f31 int, f32 varchar(20), constraint con_t_unique unique (f31, f32) comment 'unique index' using btree using btree); NOTICE: CREATE TABLE / UNIQUE will create implicit index "con_t_unique" for table "test_unique" \d+ test_unique - Table "public.test_unique" + Table "db_alter_table.test_unique" Column | Type | Modifiers | Storage | Stats target | Description --------+-----------------------+-----------+----------+--------------+------------- f31 | integer | | plain | | @@ -230,7 +228,7 @@ drop table test_unique; create table test_primary(f11 int, f12 varchar(20), f13 bool, constraint con_t_pri primary key using btree(f11 desc, f12 asc) comment 'primary key' using btree); NOTICE: CREATE TABLE / PRIMARY KEY will create implicit index "con_t_pri" for table "test_primary" \d+ test_primary - Table "public.test_primary" + Table "db_alter_table.test_primary" Column | Type | Modifiers | Storage | Stats target | Description --------+-----------------------+-----------+----------+--------------+------------- f11 | integer | not null | plain | | @@ -245,7 +243,7 @@ drop table test_primary; create table test_primary(f11 int, f12 varchar(20), f13 bool, constraint con_t_pri primary key (f11 desc, f12 asc) comment 'primary key' using btree); NOTICE: CREATE TABLE / PRIMARY KEY will create implicit index "con_t_pri" for table "test_primary" \d+ test_primary - Table "public.test_primary" + Table "db_alter_table.test_primary" Column | Type | Modifiers | Storage | Stats target | Description --------+-----------------------+-----------+----------+--------------+------------- f11 | integer | not null | plain | | @@ -260,7 +258,7 @@ drop table test_primary; create table test_primary(f11 int, f12 varchar(20), f13 bool, constraint con_t_pri primary key using btree(f11 desc, f12 asc) comment 'primary key' using btree using btree); NOTICE: CREATE TABLE / PRIMARY KEY will create implicit index "con_t_pri" for table "test_primary" \d+ test_primary - Table "public.test_primary" + Table "db_alter_table.test_primary" Column | Type | Modifiers | Storage | Stats target | Description --------+-----------------------+-----------+----------+--------------+------------- f11 | integer | not null | plain | | @@ -272,5 +270,5 @@ Has OIDs: no Options: orientation=row, compression=no drop table test_primary; -\c postgres -drop database if exists db_alter_table; +drop schema db_alter_table cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/test_binary.out b/contrib/dolphin/expected/test_binary.out index 4896028c6a88eab53b854562d3d030e5f32c7dc9..207e9943d3558617f8945d25a03f9d25a7dce850 100644 --- a/contrib/dolphin/expected/test_binary.out +++ b/contrib/dolphin/expected/test_binary.out @@ -1,7 +1,5 @@ -drop database if exists test_binary; -NOTICE: database "test_binary" does not exist, skipping -create database test_binary dbcompatibility 'B'; -\c test_binary +create schema test_binary; +set current_schema to 'test_binary'; create table binary_templates (a bytea, b binary(5), c varbinary(5)); -- invalid typmod create table invalid_table (b binary(-1)); @@ -56,7 +54,7 @@ create index on test_index using btree (b); create index on test_index using hash (b); create index on test_index using gin (to_tsvector(b::text)); \d test_index - Table "public.test_index" + Table "test_binary.test_index" Column | Type | Modifiers --------+-----------------+----------- a | "binary"(10) | @@ -121,5 +119,6 @@ select * from t_varbinary_061; drop table if exists t_binary_061; drop table if exists t_varbinary_061; -\c postgres -drop database test_binary; +drop schema test_binary cascade; +NOTICE: drop cascades to table test_bytea +reset current_schema; diff --git a/contrib/dolphin/expected/test_bit_xor.out b/contrib/dolphin/expected/test_bit_xor.out index b065f0e906d1bdf159789f691562fcf81b0c4898..6ea0befad84e28f1c928832deff55c9acf721d25 100644 --- a/contrib/dolphin/expected/test_bit_xor.out +++ b/contrib/dolphin/expected/test_bit_xor.out @@ -1,7 +1,5 @@ -drop database if exists test_bit_xor; -NOTICE: database "test_bit_xor" does not exist, skipping -create database test_bit_xor dbcompatibility 'B'; -\c test_bit_xor +create schema test_bit_xor; +set current_schema to 'test_bit_xor'; -- test datetime create table test_datetime (t datetime); select bit_xor(t) from test_datetime; @@ -765,5 +763,12 @@ select bit_xor(col) from test_varbit; (1 row) drop table test_varbit; -\c postgres -drop database test_bit_xor; +drop schema test_bit_xor cascade; +NOTICE: drop cascades to 6 other objects +DETAIL: drop cascades to table test_time +drop cascades to table test_time_with_zone +drop cascades to table test_time_with_null +drop cascades to table test_time_with_zone_with_null +drop cascades to table test_date +drop cascades to table test_bit +reset current_schema; diff --git a/contrib/dolphin/expected/test_blob.out b/contrib/dolphin/expected/test_blob.out index 7a680dfcbc0a0d6d046d7c68f0505552fa03db27..a6eb644e733a2c92a4902cdceec4094ef590fe45 100644 --- a/contrib/dolphin/expected/test_blob.out +++ b/contrib/dolphin/expected/test_blob.out @@ -1,7 +1,5 @@ -drop database if exists test_blob; -NOTICE: database "test_blob" does not exist, skipping -create database test_blob dbcompatibility 'B'; -\c test_blob +create schema test_blob; +set current_schema to 'test_blob'; create table test_template (t tinyblob, b blob, m mediumblob, l longblob); insert into test_template values('aaaaaaaaa', 'aaaaaaaaa', 'aaaaaaaaa', 'aaaaaaaaa'); create table test_tiny (t tinyblob); @@ -79,5 +77,5 @@ drop table test_tiny; drop table test_blob; drop table test_medium; drop table test_long; -\c postgres -drop database test_blob; +drop schema test_blob cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/test_condition.out b/contrib/dolphin/expected/test_condition.out index 6f0db9ac33c2a6f64f0e6940567277dfc384e18f..1ac8b868190df061933e709d3295120d1ea7e479 100644 --- a/contrib/dolphin/expected/test_condition.out +++ b/contrib/dolphin/expected/test_condition.out @@ -1,9 +1,5 @@ --- b compatibility case -drop database if exists db_test_condition; -NOTICE: database "db_test_condition" does not exist, skipping --- create database db_test_condition dbcompatibility 'b'; -create database db_test_condition with DBCOMPATIBILITY = 'B'; -\c db_test_condition +create schema db_test_condition; +set current_schema to 'db_test_condition'; set dolphin.sql_mode = ''; create table test_bccf (t1 int ,t2 float, t3 char, t4 text); insert into test_bccf values(1,3,null,null); @@ -6998,5 +6994,6 @@ select strcmp(blb, txt) from typeset; -1 (1 row) -\c postgres -drop database db_test_condition; +drop schema db_test_condition cascade; +NOTICE: drop cascades to table typeset +reset current_schema; diff --git a/contrib/dolphin/expected/test_current_user.out b/contrib/dolphin/expected/test_current_user.out index b485d2164fdcdafb966ca8e23e720ee0407964e1..d0f6cc638177e9d8382ae250b3da6081167e6409 100644 --- a/contrib/dolphin/expected/test_current_user.out +++ b/contrib/dolphin/expected/test_current_user.out @@ -1,7 +1,5 @@ -drop database if exists test_current_user; -NOTICE: database "test_current_user" does not exist, skipping -create database test_current_user dbcompatibility 'b'; -\c test_current_user +create schema test_current_user; +set current_schema to 'test_current_user'; select current_user; current_user -------------- @@ -86,5 +84,5 @@ DROP USER MAPPING FOR USER SERVER s1; CREATE USER MAPPING FOR u1 SERVER s1; DROP USER MAPPING FOR u1 SERVER s1; drop user u1; -\c postgres -drop database test_current_user; +drop schema test_current_user cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/test_datatype.out b/contrib/dolphin/expected/test_datatype.out index bd569bf8bfafa1f951f4c732b168a00fe8d6a1ce..1939f2d03648ca6b73931f6cb0183c3ded2fd5a6 100644 --- a/contrib/dolphin/expected/test_datatype.out +++ b/contrib/dolphin/expected/test_datatype.out @@ -1,7 +1,5 @@ -drop database if exists b_datatype_test; -NOTICE: database "b_datatype_test" does not exist, skipping -create database b_datatype_test dbcompatibility 'B'; -\c b_datatype_test +create schema b_datatype_test; +set current_schema to 'b_datatype_test'; -- bit(n), when insert into bit, support the length less than n, which must be equal to n in normal case create table bit_test(a bit); create table bit_test2(a bit(5)); @@ -262,7 +260,7 @@ select b'11'::bit(33); --tinyint(n),smallint(n),mediumint,mediumint(n),int(n),bigint(n) create table all_int_test(a tinyint(9999999999), b smallint(9999999999), c mediumint, d mediumint(9999999999), e int(9999999999), f bigint(9999999999)); \d all_int_test - Table "public.all_int_test" +Table "b_datatype_test.all_int_test" Column | Type | Modifiers --------+----------+----------- a | tinyint | @@ -273,5 +271,5 @@ create table all_int_test(a tinyint(9999999999), b smallint(9999999999), c mediu f | bigint | drop table all_int_test; -\c postgres -drop database b_datatype_test; +drop schema b_datatype_test cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/test_fixed.out b/contrib/dolphin/expected/test_fixed.out index 22f456ee2babccc9e5e8d1be8feaa788a95c42b0..14d7655f9a0c33dd2c824372ee4632579efef7b6 100644 --- a/contrib/dolphin/expected/test_fixed.out +++ b/contrib/dolphin/expected/test_fixed.out @@ -1,16 +1,14 @@ -drop database if exists test_fixed; -NOTICE: database "test_fixed" does not exist, skipping -create database test_fixed dbcompatibility 'B'; -\c test_fixed +create schema test_fixed; +set current_schema to 'test_fixed'; DROP TABLE IF EXISTS fixed_test; NOTICE: table "fixed_test" does not exist, skipping CREATE TABLE fixed_test (a fixed(10, 5)); \d fixed_test - Table "public.fixed_test" + Table "test_fixed.fixed_test" Column | Type | Modifiers --------+---------------+----------- a | numeric(10,5) | DROP TABLE fixed_test; -\c postgres -drop database test_fixed; +drop schema test_fixed cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/test_float_double_real_double_precision_MD.out b/contrib/dolphin/expected/test_float_double_real_double_precision_MD.out index 23ad8e8c5cb5c967a584591fe6894513844e64c5..a75277ea2e5a0e8adb1d51025def2923c296d790 100644 --- a/contrib/dolphin/expected/test_float_double_real_double_precision_MD.out +++ b/contrib/dolphin/expected/test_float_double_real_double_precision_MD.out @@ -1,10 +1,8 @@ -drop database if exists float_double_real_double_precision_MD; -NOTICE: database "float_double_real_double_precision_md" does not exist, skipping -create database float_double_real_double_precision_MD dbcompatibility 'b'; -\c float_double_real_double_precision_MD; +create schema double_precision; +set current_schema to 'double_precision'; create table test(a float(20, 2), b double(20, 2), c real(20, 2), d double precision(20, 2)); \d test; - Table "public.test" + Table "double_precision.test" Column | Type | Modifiers --------+---------------+----------- a | numeric(20,2) | @@ -118,8 +116,15 @@ LINE 1: create table test3(a double precision(3.6, 1.6)); ^ create table test3(a double(3.6, 1.6)); \d test3; - Table "public.test3" + Table "double_precision.test3" Column | Type | Modifiers --------+--------------+----------- a | numeric(4,2) | +drop schema double_precision cascade; +NOTICE: drop cascades to 4 other objects +DETAIL: drop cascades to table test +drop cascades to table test1 +drop cascades to table test2 +drop cascades to table test3 +reset current_schema; diff --git a/contrib/dolphin/expected/test_mysql_char.out b/contrib/dolphin/expected/test_mysql_char.out index 5fa3ac6a3240f44e020ca5ecb72ca4019d92a060..178e6f2ae7f558d5815dfa5b73e5140af2a3ffeb 100644 --- a/contrib/dolphin/expected/test_mysql_char.out +++ b/contrib/dolphin/expected/test_mysql_char.out @@ -1,7 +1,5 @@ -drop database if exists test_char; -NOTICE: database "test_char" does not exist, skipping -create database test_char with dbcompatibility='B'; -\c test_char +create schema test_char; +set current_schema to 'test_char'; set dolphin.b_compatibility_mode=1; set dolphin.sql_mode = ''; ---create table @@ -559,5 +557,5 @@ select '0.0100abc' || null; t (1 row) -\c postgres -drop database test_char; +drop schema test_char cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/test_mysql_enum.out b/contrib/dolphin/expected/test_mysql_enum.out index d5b5465dcb52edb7b8a94632dbce57e2d375a886..692deb0ad55f2e7e35306a27fe4bf22435560f22 100644 --- a/contrib/dolphin/expected/test_mysql_enum.out +++ b/contrib/dolphin/expected/test_mysql_enum.out @@ -1,7 +1,5 @@ -drop database if exists test_enum; -NOTICE: database "test_enum" does not exist, skipping -CREATE DATABASE test_enum with dbcompatibility='B'; -\c test_enum +create schema test_enum; +set current_schema to 'test_enum'; -- create extension dolphin; show sql_compatibility; sql_compatibility @@ -113,10 +111,10 @@ CREATE TABLE testtttttttttttttttttttttttttttttttttt ( age INT, myjobbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb enum('x','y') ); -SELECT * FROM pg_type WHERE typname like '%anonymous_enum%'; - typname | typnamespace | typowner | typlen | typbyval | typtype | typcategory | typispreferred | typisdefined | typdelim | typrelid | typelem | typarray | typinput | typoutput | typreceive | typsend | typmodin | typmodout | typanalyze | typalign | typstorage | typnotnull | typbasetype | typtypmod | typndims | typcollation | typdefaultbin | typdefault | typacl ------------------------------------------------------------------+--------------+----------+--------+----------+---------+-------------+----------------+--------------+----------+----------+---------+----------+----------+-----------+------------+-----------+----------+-----------+------------+----------+------------+------------+-------------+-----------+----------+--------------+---------------+------------+-------- - testtttttttttttttttt_myjobbbbbbbbbbbbbbbb_2200_anonymous_enum_1 | 2200 | 10 | 4 | t | e | E | f | t | , | 0 | 0 | 0 | enum_in | enum_out | enum_recv | enum_send | - | - | - | i | p | f | 0 | -1 | 0 | 0 | | | +SELECT count(*) FROM pg_type WHERE typname like 'testtttttttttttttttt_myjobbbbbbbbbbbbbbb%_anonymous_enum_1'; + count +------- + 1 (1 row) drop table testtttttttttttttttttttttttttttttttttt; @@ -240,5 +238,6 @@ W_COUNTRY VARCHAR(20) , W_GMT_OFFSET DECIMAL(5,2) ); ERROR: anoymous enum type does not support foreign key -\c postgres -DROP DATABASE test_enum; +drop schema test_enum cascade; +--?.* +reset current_schema; diff --git a/contrib/dolphin/expected/test_mysql_operator.out b/contrib/dolphin/expected/test_mysql_operator.out index ec62c1fd7c504969667f5de6cfefef0a8f5baadd..14cb64d1a968550f86a9b710a046b6dfa2fefa1d 100644 --- a/contrib/dolphin/expected/test_mysql_operator.out +++ b/contrib/dolphin/expected/test_mysql_operator.out @@ -1,7 +1,5 @@ -drop database if exists test_op_and; -NOTICE: database "test_op_and" does not exist, skipping -CREATE DATABASE test_op_and with dbcompatibility='B'; -\c test_op_and +create schema test_op_and; +set current_schema to 'test_op_and'; set dolphin.b_compatibility_mode = 1; set dolphin.sql_mode = 'sql_mode_strict,sql_mode_full_group'; ---create table @@ -1076,12 +1074,10 @@ drop table testforint2_p5; ---drop database set dolphin.b_compatibility_mode = 0; set dolphin.sql_mode = 'sql_mode_strict,sql_mode_full_group,pipes_as_concat'; -\c postgres -drop database test_op_and; -drop database if exists test_op_xor; -NOTICE: database "test_op_xor" does not exist, skipping -CREATE DATABASE test_op_xor with dbcompatibility='B'; -\c test_op_xor +drop schema test_op_and cascade; +reset current_schema; +create schema test_op_xor; +set current_schema to 'test_op_xor'; set dolphin.b_compatibility_mode = 1; select null^1; ?column? @@ -1555,12 +1551,10 @@ drop table testforint2_p4; drop table testforint2_p5; ---drop database set dolphin.b_compatibility_mode = 0; -\c postgres -drop database test_op_xor; -drop database if exists like_test; -NOTICE: database "like_test" does not exist, skipping -create database like_test DBCOMPATIBILITY 'b'; -\c like_test +drop schema test_op_xor cascade; +reset current_schema; +create schema like_test; +set current_schema to 'like_test'; set dolphin.b_compatibility_mode = 1; select 'a' like 'A'; ?column? @@ -2523,5 +2517,5 @@ select 10!; 3628800 (1 row) -\c postgres -drop database if exists like_test; +drop schema like_test cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/test_mysql_prepare.out b/contrib/dolphin/expected/test_mysql_prepare.out index 4251ab9a1ddbc281d35d1a5c22cfc293e212bbb8..d31f14a10e4f080cf9074085a1f2745441ab303e 100644 --- a/contrib/dolphin/expected/test_mysql_prepare.out +++ b/contrib/dolphin/expected/test_mysql_prepare.out @@ -1,7 +1,5 @@ -drop database if exists test_mysql_prepare; -NOTICE: database "test_mysql_prepare" does not exist, skipping -create database test_mysql_prepare dbcompatibility 'b'; -\c test_mysql_prepare +create schema test_mysql_prepare; +set current_schema to 'test_mysql_prepare'; create table test(name text, age int); insert into test values('a',18); prepare s1 as select * from test; @@ -95,11 +93,11 @@ explain (costs off, verbose on) execute p1; Nested Loop Output: prepare_table_01.a, prepare_table_01.b, prepare_table_02.a, prepare_table_02.b Join Filter: (prepare_table_01.a = prepare_table_02.a) - -> Seq Scan on public.prepare_table_01 + -> Seq Scan on test_mysql_prepare.prepare_table_01 Output: prepare_table_01.a, prepare_table_01.b -> Materialize Output: prepare_table_02.a, prepare_table_02.b - -> Seq Scan on public.prepare_table_02 + -> Seq Scan on test_mysql_prepare.prepare_table_02 Output: prepare_table_02.a, prepare_table_02.b (9 rows) @@ -123,12 +121,12 @@ explain (costs off, verbose on) execute p2(1); Nested Loop Output: prepare_table_01.a, prepare_table_01.b, prepare_table_02.a, prepare_table_02.b Join Filter: (prepare_table_01.a = prepare_table_02.a) - -> Seq Scan on public.prepare_table_01 + -> Seq Scan on test_mysql_prepare.prepare_table_01 Output: prepare_table_01.a, prepare_table_01.b Filter: (prepare_table_01.b = ($1)::numeric) -> Materialize Output: prepare_table_02.a, prepare_table_02.b - -> Seq Scan on public.prepare_table_02 + -> Seq Scan on test_mysql_prepare.prepare_table_02 Output: prepare_table_02.a, prepare_table_02.b (10 rows) @@ -188,8 +186,8 @@ ERROR: wrong number of parameters for prepared statement "p2" DETAIL: Expected 1 parameters but got 2. prepare p5 as 'select a from prepare_table_01 INTERSECT select a from prepare_table_02 order by 1 limit 1'; explain (costs off, verbose on) execute p5; - QUERY PLAN -------------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------------------------------- Limit Output: "*SELECT* 1".a, (0) -> Sort @@ -200,11 +198,11 @@ explain (costs off, verbose on) execute p5; -> Append -> Subquery Scan on "*SELECT* 1" Output: "*SELECT* 1".a, 0 - -> Seq Scan on public.prepare_table_01 + -> Seq Scan on test_mysql_prepare.prepare_table_01 Output: prepare_table_01.a -> Subquery Scan on "*SELECT* 2" Output: "*SELECT* 2".a, 1 - -> Seq Scan on public.prepare_table_02 + -> Seq Scan on test_mysql_prepare.prepare_table_02 Output: prepare_table_02.a (16 rows) @@ -222,11 +220,11 @@ explain (costs off, verbose on) execute p6; Output: prepare_table_01.a, prepare_table_01.b Group By Key: prepare_table_01.a, prepare_table_01.b -> Append - -> Seq Scan on public.prepare_table_01 + -> Seq Scan on test_mysql_prepare.prepare_table_01 Output: prepare_table_01.a, prepare_table_01.b -> Subquery Scan on "*SELECT* 2" Output: "*SELECT* 2".a, "*SELECT* 2".b - -> Seq Scan on public.prepare_table_02 + -> Seq Scan on test_mysql_prepare.prepare_table_02 Output: prepare_table_02.a, prepare_table_02.b (10 rows) @@ -328,16 +326,16 @@ insert into t1_xc_fqs values (1,1,1), (2,2,2), (3,3,3), (4,4,4), (5,5,5); explain (costs off, verbose on) execute s using 1,@a; QUERY PLAN ----------------------------------------------------------- - Seq Scan on public.t1_xc_fqs + Seq Scan on test_mysql_prepare.t1_xc_fqs Output: id1, id2, num Filter: ((t1_xc_fqs.id1 = $1) AND (t1_xc_fqs.id2 = $2)) (3 rows) explain (costs off, verbose on) execute i using 6,6,6; - QUERY PLAN ----------------------------- + QUERY PLAN +---------------------------------------- [Bypass] - Insert on public.t1_xc_fqs + Insert on test_mysql_prepare.t1_xc_fqs -> Result Output: $1, $2, $3 (4 rows) @@ -345,8 +343,8 @@ explain (costs off, verbose on) execute i using 6,6,6; explain (costs off, verbose on) execute u using 2,@b; QUERY PLAN ----------------------------------------------------------------- - Update on public.t1_xc_fqs - -> Seq Scan on public.t1_xc_fqs + Update on test_mysql_prepare.t1_xc_fqs + -> Seq Scan on test_mysql_prepare.t1_xc_fqs Output: id1, id2, 0, ctid Filter: ((t1_xc_fqs.id1 = $1) AND (t1_xc_fqs.id2 = $2)) (4 rows) @@ -354,8 +352,8 @@ explain (costs off, verbose on) execute u using 2,@b; explain (costs off, verbose on) execute d using @c,@c; QUERY PLAN ----------------------------------------------------------------- - Delete on public.t1_xc_fqs - -> Seq Scan on public.t1_xc_fqs + Delete on test_mysql_prepare.t1_xc_fqs + -> Seq Scan on test_mysql_prepare.t1_xc_fqs Output: ctid Filter: ((t1_xc_fqs.id1 = $1) AND (t1_xc_fqs.id2 = $2)) (4 rows) @@ -414,16 +412,16 @@ insert into t1_xc_fqs values (1,1,1), (2,2,2), (3,3,3), (4,4,4), (5,5,5); explain (costs off, verbose on) execute s using @a; QUERY PLAN ---------------------------------------------------------- - Seq Scan on public.t1_xc_fqs + Seq Scan on test_mysql_prepare.t1_xc_fqs Output: id1, id2, num Filter: ((t1_xc_fqs.id1 = $1) AND (t1_xc_fqs.id2 = 2)) (3 rows) explain (costs off, verbose on) execute i using 6; - QUERY PLAN ----------------------------- + QUERY PLAN +---------------------------------------- [Bypass] - Insert on public.t1_xc_fqs + Insert on test_mysql_prepare.t1_xc_fqs -> Result Output: $1, 2, 3 (4 rows) @@ -431,8 +429,8 @@ explain (costs off, verbose on) execute i using 6; explain (costs off, verbose on) execute u using 2; QUERY PLAN ---------------------------------------------------------------- - Update on public.t1_xc_fqs - -> Seq Scan on public.t1_xc_fqs + Update on test_mysql_prepare.t1_xc_fqs + -> Seq Scan on test_mysql_prepare.t1_xc_fqs Output: id1, id2, 1, ctid Filter: ((t1_xc_fqs.id1 = $1) AND (t1_xc_fqs.id2 = 2)) (4 rows) @@ -440,8 +438,8 @@ explain (costs off, verbose on) execute u using 2; explain (costs off, verbose on) execute d using @c; QUERY PLAN ---------------------------------------------------------------- - Delete on public.t1_xc_fqs - -> Seq Scan on public.t1_xc_fqs + Delete on test_mysql_prepare.t1_xc_fqs + -> Seq Scan on test_mysql_prepare.t1_xc_fqs Output: ctid Filter: ((t1_xc_fqs.id1 = $1) AND (t1_xc_fqs.id2 = 2)) (4 rows) @@ -510,12 +508,12 @@ explain (costs off, verbose on) execute s0 using @a,@a; ---------------------------------------------------------- Nested Loop Output: t1.id1, t1.id2, t1.num, t2.id1, t2.id2, t2.num - -> Seq Scan on public.t1_xc_fqs t1 + -> Seq Scan on test_mysql_prepare.t1_xc_fqs t1 Output: t1.id1, t1.id2, t1.num Filter: (t1.id1 = $1) -> Materialize Output: t2.id1, t2.id2, t2.num - -> Seq Scan on public.t2_xc_fqs t2 + -> Seq Scan on test_mysql_prepare.t2_xc_fqs t2 Output: t2.id1, t2.id2, t2.num Filter: (t2.id1 = $2) (10 rows) @@ -525,33 +523,33 @@ explain (costs off, verbose on) execute s0 using @a,3; ---------------------------------------------------------- Nested Loop Output: t1.id1, t1.id2, t1.num, t2.id1, t2.id2, t2.num - -> Seq Scan on public.t1_xc_fqs t1 + -> Seq Scan on test_mysql_prepare.t1_xc_fqs t1 Output: t1.id1, t1.id2, t1.num Filter: (t1.id1 = $1) -> Materialize Output: t2.id1, t2.id2, t2.num - -> Seq Scan on public.t2_xc_fqs t2 + -> Seq Scan on test_mysql_prepare.t2_xc_fqs t2 Output: t2.id1, t2.id2, t2.num Filter: (t2.id1 = $2) (10 rows) explain (costs off, verbose on) execute s1 using 2,@b,2; - QUERY PLAN ----------------------------------------------------- + QUERY PLAN +--------------------------------------------------------------- Nested Loop Output: t3.id11 -> Nested Loop - -> Seq Scan on public.t1_xc_fqs t1 + -> Seq Scan on test_mysql_prepare.t1_xc_fqs t1 Output: t1.id1, t1.id2, t1.num Filter: (t1.id1 = $1) -> Materialize Output: t2.id1, t2.id2, t2.num - -> Seq Scan on public.t2_xc_fqs t2 + -> Seq Scan on test_mysql_prepare.t2_xc_fqs t2 Output: t2.id1, t2.id2, t2.num Filter: (t2.id1 = $2) -> Materialize Output: t3.id11 - -> Seq Scan on public.t3_xc_fqs t3 + -> Seq Scan on test_mysql_prepare.t3_xc_fqs t3 Output: t3.id11 Filter: (t3.id11 = $3) (16 rows) @@ -561,55 +559,55 @@ explain (costs off, verbose on) execute s2 using @c; ---------------------------------------------------------- Nested Loop Output: t1.id1, t1.id2, t1.num, t2.id1, t2.id2, t2.num - -> Seq Scan on public.t1_xc_fqs t1 + -> Seq Scan on test_mysql_prepare.t1_xc_fqs t1 Output: t1.id1, t1.id2, t1.num Filter: (t1.id1 = $1) -> Materialize Output: t2.id1, t2.id2, t2.num - -> Seq Scan on public.t2_xc_fqs t2 + -> Seq Scan on test_mysql_prepare.t2_xc_fqs t2 Output: t2.id1, t2.id2, t2.num Filter: (t2.id1 = $1) (10 rows) explain (costs off, verbose on) execute s3 using 4; - QUERY PLAN ---------------------------------------------------- + QUERY PLAN +--------------------------------------------------------------- Hash Join Output: t3.id11 Hash Cond: (t1.id1 = t2.id1) -> Nested Loop Output: t3.id11, t1.id1 - -> Seq Scan on public.t1_xc_fqs t1 + -> Seq Scan on test_mysql_prepare.t1_xc_fqs t1 Output: t1.id1, t1.id2, t1.num -> Materialize Output: t3.id11 - -> Seq Scan on public.t3_xc_fqs t3 + -> Seq Scan on test_mysql_prepare.t3_xc_fqs t3 Output: t3.id11 Filter: (t3.id11 = $1) -> Hash Output: t2.id1 - -> Seq Scan on public.t2_xc_fqs t2 + -> Seq Scan on test_mysql_prepare.t2_xc_fqs t2 Output: t2.id1 (16 rows) explain (costs off, verbose on) execute s4 using 5; - QUERY PLAN ---------------------------------------------------- + QUERY PLAN +--------------------------------------------------------------- Nested Loop Output: t3.id11 -> Nested Loop Output: t1.id1 - -> Seq Scan on public.t1_xc_fqs t1 + -> Seq Scan on test_mysql_prepare.t1_xc_fqs t1 Output: t1.id1, t1.id2, t1.num Filter: (t1.id1 = $1) -> Materialize Output: t2.id1 - -> Seq Scan on public.t2_xc_fqs t2 + -> Seq Scan on test_mysql_prepare.t2_xc_fqs t2 Output: t2.id1 Filter: (t2.id1 = $1) -> Materialize Output: t3.id11 - -> Seq Scan on public.t3_xc_fqs t3 + -> Seq Scan on test_mysql_prepare.t3_xc_fqs t3 Output: t3.id11 Filter: (t3.id11 = $1) (17 rows) @@ -619,10 +617,10 @@ explain (costs off, verbose on) execute s5 using 4,5; ---------------------------------------------------------- Nested Loop Output: t1.id1, t1.id2, t1.num, t2.id1, t2.id2, t2.num - -> Seq Scan on public.t2_xc_fqs t2 + -> Seq Scan on test_mysql_prepare.t2_xc_fqs t2 Output: t2.id1, t2.id2, t2.num Filter: ((t2.id2 = $2) AND (t2.id1 = $1)) - -> Seq Scan on public.t1_xc_fqs t1 + -> Seq Scan on test_mysql_prepare.t1_xc_fqs t1 Output: t1.id1, t1.id2, t1.num Filter: (t1.id1 = $2) (8 rows) @@ -691,12 +689,12 @@ explain (costs off, verbose on) execute s0 using @a,@a; ---------------------------------------------------------- Nested Loop Output: t1.id1, t1.id2, t1.num, t2.id1, t2.id2, t2.num - -> Seq Scan on public.t1_xc_fqs t1 + -> Seq Scan on test_mysql_prepare.t1_xc_fqs t1 Output: t1.id1, t1.id2, t1.num Filter: (t1.id1 = $1) -> Materialize Output: t2.id1, t2.id2, t2.num - -> Seq Scan on public.t2_xc_fqs t2 + -> Seq Scan on test_mysql_prepare.t2_xc_fqs t2 Output: t2.id1, t2.id2, t2.num Filter: (t2.id1 = $2) (10 rows) @@ -706,33 +704,33 @@ explain (costs off, verbose on) execute s0 using @a,@c; ---------------------------------------------------------- Nested Loop Output: t1.id1, t1.id2, t1.num, t2.id1, t2.id2, t2.num - -> Seq Scan on public.t1_xc_fqs t1 + -> Seq Scan on test_mysql_prepare.t1_xc_fqs t1 Output: t1.id1, t1.id2, t1.num Filter: (t1.id1 = $1) -> Materialize Output: t2.id1, t2.id2, t2.num - -> Seq Scan on public.t2_xc_fqs t2 + -> Seq Scan on test_mysql_prepare.t2_xc_fqs t2 Output: t2.id1, t2.id2, t2.num Filter: (t2.id1 = $2) (10 rows) explain (costs off, verbose on) execute s1 using 2,@b,2; - QUERY PLAN ----------------------------------------------------- + QUERY PLAN +--------------------------------------------------------------- Nested Loop Output: t3.id11 -> Nested Loop - -> Seq Scan on public.t1_xc_fqs t1 + -> Seq Scan on test_mysql_prepare.t1_xc_fqs t1 Output: t1.id1, t1.id2, t1.num Filter: (t1.id1 = $1) -> Materialize Output: t2.id1, t2.id2, t2.num - -> Seq Scan on public.t2_xc_fqs t2 + -> Seq Scan on test_mysql_prepare.t2_xc_fqs t2 Output: t2.id1, t2.id2, t2.num Filter: (t2.id1 = $2) -> Materialize Output: t3.id11 - -> Seq Scan on public.t3_xc_fqs t3 + -> Seq Scan on test_mysql_prepare.t3_xc_fqs t3 Output: t3.id11 Filter: (t3.id11 = $3) (16 rows) @@ -742,55 +740,55 @@ explain (costs off, verbose on) execute s2 using @c; ---------------------------------------------------------- Nested Loop Output: t1.id1, t1.id2, t1.num, t2.id1, t2.id2, t2.num - -> Seq Scan on public.t1_xc_fqs t1 + -> Seq Scan on test_mysql_prepare.t1_xc_fqs t1 Output: t1.id1, t1.id2, t1.num Filter: (t1.id1 = $1) -> Materialize Output: t2.id1, t2.id2, t2.num - -> Seq Scan on public.t2_xc_fqs t2 + -> Seq Scan on test_mysql_prepare.t2_xc_fqs t2 Output: t2.id1, t2.id2, t2.num Filter: (t2.id1 = $1) (10 rows) explain (costs off, verbose on) execute s3 using 4; - QUERY PLAN ---------------------------------------------------- + QUERY PLAN +--------------------------------------------------------------- Hash Join Output: t3.id11 Hash Cond: (t1.id1 = t2.id1) -> Nested Loop Output: t3.id11, t1.id1 - -> Seq Scan on public.t1_xc_fqs t1 + -> Seq Scan on test_mysql_prepare.t1_xc_fqs t1 Output: t1.id1, t1.id2, t1.num -> Materialize Output: t3.id11 - -> Seq Scan on public.t3_xc_fqs t3 + -> Seq Scan on test_mysql_prepare.t3_xc_fqs t3 Output: t3.id11 Filter: (t3.id11 = $1) -> Hash Output: t2.id1 - -> Seq Scan on public.t2_xc_fqs t2 + -> Seq Scan on test_mysql_prepare.t2_xc_fqs t2 Output: t2.id1 (16 rows) explain (costs off, verbose on) execute s4 using 5; - QUERY PLAN ---------------------------------------------------- + QUERY PLAN +--------------------------------------------------------------- Nested Loop Output: t3.id11 -> Nested Loop Output: t1.id1 - -> Seq Scan on public.t1_xc_fqs t1 + -> Seq Scan on test_mysql_prepare.t1_xc_fqs t1 Output: t1.id1, t1.id2, t1.num Filter: (t1.id1 = $1) -> Materialize Output: t2.id1 - -> Seq Scan on public.t2_xc_fqs t2 + -> Seq Scan on test_mysql_prepare.t2_xc_fqs t2 Output: t2.id1 Filter: (t2.id1 = $1) -> Materialize Output: t3.id11 - -> Seq Scan on public.t3_xc_fqs t3 + -> Seq Scan on test_mysql_prepare.t3_xc_fqs t3 Output: t3.id11 Filter: (t3.id11 = $1) (17 rows) @@ -800,10 +798,10 @@ explain (costs off, verbose on) execute s5 using 4,5; ---------------------------------------------------------- Nested Loop Output: t1.id1, t1.id2, t1.num, t2.id1, t2.id2, t2.num - -> Seq Scan on public.t2_xc_fqs t2 + -> Seq Scan on test_mysql_prepare.t2_xc_fqs t2 Output: t2.id1, t2.id2, t2.num Filter: ((t2.id2 = $2) AND (t2.id1 = $1)) - -> Seq Scan on public.t1_xc_fqs t1 + -> Seq Scan on test_mysql_prepare.t1_xc_fqs t1 Output: t1.id1, t1.id2, t1.num Filter: (t1.id1 = $2) (8 rows) @@ -872,12 +870,12 @@ explain (costs off, verbose on) execute s0 using @a; -> Sort Output: t1.id1, t1.id2, t1.num Sort Key: t1.id1 - -> Seq Scan on public.t1_xc_fqs t1 + -> Seq Scan on test_mysql_prepare.t1_xc_fqs t1 Output: t1.id1, t1.id2, t1.num -> Sort Output: t2.id1, t2.id2, t2.num Sort Key: t2.id1 - -> Seq Scan on public.t2_xc_fqs t2 + -> Seq Scan on test_mysql_prepare.t2_xc_fqs t2 Output: t2.id1, t2.id2, t2.num Filter: (t2.id1 = $1) (14 rows) @@ -892,11 +890,11 @@ explain (costs off, verbose on) execute s1 using 1; Output: t1.id1, t1.id2, t1.num, t2.id1, t2.id2, t2.num, t2.id1 Hash Cond: (t2.id1 = t1.id1) Join Filter: (t2.id1 = $1) - -> Seq Scan on public.t2_xc_fqs t2 + -> Seq Scan on test_mysql_prepare.t2_xc_fqs t2 Output: t2.id1, t2.id2, t2.num -> Hash Output: t1.id1, t1.id2, t1.num - -> Seq Scan on public.t1_xc_fqs t1 + -> Seq Scan on test_mysql_prepare.t1_xc_fqs t1 Output: t1.id1, t1.id2, t1.num (13 rows) @@ -910,11 +908,11 @@ explain (costs off, verbose on) execute s2 using 1; Output: t1.id1, t1.id2, t1.num, t2.id1, t2.id2, t2.num, t1.id1, t2.id1 Hash Cond: (t1.id1 = t2.id1) Join Filter: (t2.id1 = $1) - -> Seq Scan on public.t1_xc_fqs t1 + -> Seq Scan on test_mysql_prepare.t1_xc_fqs t1 Output: t1.id1, t1.id2, t1.num -> Hash Output: t2.id1, t2.id2, t2.num - -> Seq Scan on public.t2_xc_fqs t2 + -> Seq Scan on test_mysql_prepare.t2_xc_fqs t2 Output: t2.id1, t2.id2, t2.num (13 rows) @@ -957,5 +955,9 @@ deallocate s1; deallocate s2; reset dolphin.b_compatibility_mode; reset enable_set_variable_b_format; -\c postgres -drop database test_mysql_prepare; +drop schema test_mysql_prepare cascade; +NOTICE: drop cascades to 3 other objects +DETAIL: drop cascades to table t1_xc_fqs +drop cascades to table t2_xc_fqs +drop cascades to table t3_xc_fqs +reset current_schema; diff --git a/contrib/dolphin/expected/test_op_blob.out b/contrib/dolphin/expected/test_op_blob.out index daee4e2a87fc0a7f529a85f8ed329e75418331d0..602e43e46c7e303991c2b189a70fdad187d59bca 100644 --- a/contrib/dolphin/expected/test_op_blob.out +++ b/contrib/dolphin/expected/test_op_blob.out @@ -1,7 +1,5 @@ -drop database if exists test_op_blob; -NOTICE: database "test_op_blob" does not exist, skipping -create database test_op_blob dbcompatibility 'b'; -\c test_op_blob +create schema test_op_blob; +set current_schema to 'test_op_blob'; select '1'::blob ^ '1'::blob; ?column? ---------- @@ -81,5 +79,5 @@ select '1'::blob ^ 11::numeric; 10 (1 row) -\c postgres -drop database test_op_blob; +drop schema test_op_blob cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/test_op_xor_boolandfloat.out b/contrib/dolphin/expected/test_op_xor_boolandfloat.out index 09ac502045249a2cb2fd8f9555e20512b528b98d..c72f6447d8b1d1699b3ef617eada9d3a5d97fc88 100644 --- a/contrib/dolphin/expected/test_op_xor_boolandfloat.out +++ b/contrib/dolphin/expected/test_op_xor_boolandfloat.out @@ -1,7 +1,5 @@ -drop database if exists test_bool_float; -NOTICE: database "test_bool_float" does not exist, skipping -create database test_bool_float dbcompatibility 'b'; -\c test_bool_float +create schema test_bool_float; +set current_schema to 'test_bool_float'; set dolphin.b_compatibility_mode = true; select 1::bool ^ 2::int1; ?column? @@ -862,5 +860,5 @@ select (-1)::numeric ^ (-2)::numeric; (1 row) set dolphin.b_compatibility_mode = false; -\c postgres -drop database test_bool_float; +drop schema test_bool_float cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/test_op_xor_unsignedint.out b/contrib/dolphin/expected/test_op_xor_unsignedint.out index 3fb73844af96c13cd0476c5148fe7e93c2b06b42..229697f0ca736ff1deb9f2e07a9b25fe7fa6e0a2 100644 --- a/contrib/dolphin/expected/test_op_xor_unsignedint.out +++ b/contrib/dolphin/expected/test_op_xor_unsignedint.out @@ -1,7 +1,5 @@ -drop database if exists test_op_xor_unsignedint; -NOTICE: database "test_op_xor_unsignedint" does not exist, skipping -create database test_op_xor_unsignedint with dbcompatibility='B'; -\c test_op_xor_unsignedint +create schema test_op_xor_unsignedint; +set current_schema to 'test_op_xor_unsignedint'; select (-1)::uint1 ^ 2::int1; ?column? ---------- @@ -740,5 +738,5 @@ select 2 ::uint8 ^ 2::varchar; 0 (1 row) -\c postgres -drop database test_op_xor_unsignedint; +drop schema test_op_xor_unsignedint cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/test_optimize.out b/contrib/dolphin/expected/test_optimize.out index da70b50645c1400977d43c7a6e66aeba0d7dbd5a..2bca4797b21c27723d4f45924f15577d1926306f 100644 --- a/contrib/dolphin/expected/test_optimize.out +++ b/contrib/dolphin/expected/test_optimize.out @@ -1,7 +1,5 @@ -drop database if exists db_optimize; -NOTICE: database "db_optimize" does not exist, skipping -create database db_optimize dbcompatibility 'b'; -\c db_optimize +create schema db_optimize; +set current_schema to 'db_optimize'; create table doc(id serial primary key, content varchar(255)); NOTICE: CREATE TABLE will create implicit sequence "doc_id_seq" for serial column "doc.id" NOTICE: CREATE TABLE / PRIMARY KEY will create implicit index "doc_pkey" for table "doc" @@ -12,5 +10,5 @@ drop table doc; set xc_maintenance_mode = on; optimize table pg_class; set xc_maintenance_mode = off; -\c postgres -drop database if exists db_optimize; +drop schema db_optimize cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/test_schema.out b/contrib/dolphin/expected/test_schema.out index de83392ff3849870b7c104b4c45940f4e5778bae..4815344decb0574763a57d5aa08265602080ff4b 100644 --- a/contrib/dolphin/expected/test_schema.out +++ b/contrib/dolphin/expected/test_schema.out @@ -1,12 +1,10 @@ -drop database if exists schema_test; -NOTICE: database "schema_test" does not exist, skipping -create database schema_test dbcompatibility 'b'; -\c schema_test +create schema schema_test; +set current_schema to 'schema_test'; SELECT SCHEMA(); - schema --------- - public + schema +------------- + schema_test (1 row) -\c postgres -drop database if exists schema_test; +drop schema schema_test cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/test_set_charset.out b/contrib/dolphin/expected/test_set_charset.out index c92f9957d0b0b86b236328d769945035cd548f30..b7f15c87ffa476524089d8b35b105b9b26ebc836 100644 --- a/contrib/dolphin/expected/test_set_charset.out +++ b/contrib/dolphin/expected/test_set_charset.out @@ -1,7 +1,5 @@ -drop database if exists db_charset; -NOTICE: database "db_charset" does not exist, skipping -create database db_charset dbcompatibility 'b'; -\c db_charset +create schema db_charset; +set current_schema to 'db_charset'; show client_encoding; client_encoding ----------------- @@ -64,5 +62,5 @@ show client_encoding; UTF8 (1 row) -\c postgres -drop database if exists db_charset; +drop schema db_charset cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/test_shows.out b/contrib/dolphin/expected/test_shows.out index b1b59c76e1b86f41e7347a494c7ebb89644fca33..acc5af398493101e93402e107bb0d1a4ec833b45 100644 --- a/contrib/dolphin/expected/test_shows.out +++ b/contrib/dolphin/expected/test_shows.out @@ -1,7 +1,5 @@ -drop database if exists db_show; -NOTICE: database "db_show" does not exist, skipping -create database db_show dbcompatibility 'b'; -\c db_show +create schema db_show; +set current_schema to 'db_show'; show processlist; --? Id | Pid | QueryId | UniqueSqlId | User | Host | db | .* | BackendStart | XactStart | .* | State | .* --?.* @@ -28,5 +26,5 @@ show full processlist; --?.* (8 rows) -\c postgres -drop database if exists db_show; +drop schema db_show cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/test_shows_3.out b/contrib/dolphin/expected/test_shows_3.out index 5f6941226e663a43cd09fcb9ba39d1fe7b854c43..6014cce6b9a0108d0c753c260b7b3d45e5fbcda6 100644 --- a/contrib/dolphin/expected/test_shows_3.out +++ b/contrib/dolphin/expected/test_shows_3.out @@ -1,7 +1,5 @@ -drop database if exists db_show_3; -NOTICE: database "db_show_3" does not exist, skipping -create database db_show_3 dbcompatibility 'b'; -\c db_show_3 +create schema db_show_3; +set current_schema to 'db_show_3'; show databases; Database -------------------- @@ -9,6 +7,8 @@ show databases; blockchain cstore db4ai + db_b_new_gram_test + db_show_3 dbe_perf dbe_pldebugger dbe_pldeveloper @@ -17,9 +17,10 @@ show databases; pg_toast pkg_service public + sc snapshot sqladvisor -(14 rows) +(17 rows) create schema aa1; create schema aa2; @@ -89,7 +90,8 @@ show databases; u2 (9 rows) -\c postgres -drop database if exists db_show_3; +\c contrib_regression drop user u1; drop user u2; +drop schema db_show_3 cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/test_shows_4.out b/contrib/dolphin/expected/test_shows_4.out index 11cde940439da02f9bb54905ec8202c8652a0814..e8162331f799f384884a852e47d03283ec3f93df 100644 --- a/contrib/dolphin/expected/test_shows_4.out +++ b/contrib/dolphin/expected/test_shows_4.out @@ -1,7 +1,5 @@ -drop database if exists db_show_4; -NOTICE: database "db_show_4" does not exist, skipping -create database db_show_4 dbcompatibility 'b'; -\c db_show_4 +create schema db_show_4; +set current_schema to 'db_show_4'; show master status; Xlog_File_Name | Xlog_File_Offset | Xlog_Lsn --------------------------+------------------+----------- @@ -13,5 +11,5 @@ show slave hosts; -----+----------+---------+------------------+-------------+-----------------+-------------+---------------+-------+----------------------+-------------------------+-------------------------+--------------------------+---------------+------------ (0 rows) -\c postgres -drop database if exists db_show_4; +drop schema db_show_4 cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/test_shows_5.out b/contrib/dolphin/expected/test_shows_5.out index 8ae176e49c399602c3dacf636a8ec95d2fc5939a..caa48fbda71f97b6a61dd82617b571c6b5ac1f4a 100644 --- a/contrib/dolphin/expected/test_shows_5.out +++ b/contrib/dolphin/expected/test_shows_5.out @@ -1,10 +1,8 @@ -DROP DATABASE IF EXISTS db_show_5; -NOTICE: database "db_show_5" does not exist, skipping -CREATE DATABASE db_show_5 DBCOMPATIBILITY 'b'; -\c db_show_5 +create schema db_show_5; +set current_schema to 'db_show_5'; CREATE SCHEMA tst_schema5; --orientation=row, normal primary key -CREATE TABLE public.t1 +CREATE TABLE db_show_5.t1 ( id int primary key, name varchar(20), @@ -12,7 +10,7 @@ phone text ) WITH(ORIENTATION=ROW, STORAGE_TYPE=USTORE); NOTICE: CREATE TABLE / PRIMARY KEY will create implicit index "t1_pkey" for table "t1" --orientation=column, serial primary key -CREATE TABLE public.t2 +CREATE TABLE db_show_5.t2 ( id serial primary key, name varchar(20), @@ -50,9 +48,7 @@ SHOW TABLE STATUS; --?.* --?.* --?.* ---?.* ---?.* -(4 rows) +(2 rows) SHOW TABLE STATUS FROM tst_schema5; --? Name | Engine | Version | Row_format | Rows | Avg_row_length | Data_length | Max_data_length | Index_length | Data_free | Auto_increment | Create_time | Update_time | Check_time | Collation | Checksum | Create_options | Comment @@ -119,5 +115,8 @@ RESET ROLE; REVOKE SELECT ON ALL TABLES IN SCHEMA tst_schema5 FROM tst_shows_u5; REVOKE SELECT ON ALL SEQUENCES IN SCHEMA tst_schema5 FROM tst_shows_u5; DROP USER tst_shows_u5; -\c postgres -DROP DATABASE IF EXISTS db_show_5; +drop schema db_show_5 cascade; +NOTICE: drop cascades to 2 other objects +DETAIL: drop cascades to table t1 +drop cascades to table t2 +reset current_schema; diff --git a/contrib/dolphin/expected/test_system_user.out b/contrib/dolphin/expected/test_system_user.out index 9153494c162579acad1f14a03a5096b0c2f6afa5..5711d4a7b7375088494c4176f77994504d4d98ff 100644 --- a/contrib/dolphin/expected/test_system_user.out +++ b/contrib/dolphin/expected/test_system_user.out @@ -1,7 +1,5 @@ -drop database if exists test_system_user; -NOTICE: database "test_system_user" does not exist, skipping -create database test_system_user dbcompatibility 'b'; -\c test_system_user +create schema test_system_user; +set current_schema to 'test_system_user'; select session_user; session_user -------------- @@ -32,5 +30,5 @@ select system_user(); --?.* (1 row) -\c postgres -drop database test_system_user; +drop schema test_system_user cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/test_table_index.out b/contrib/dolphin/expected/test_table_index.out index 4e8cf458766f0d06acde49cc0451266779765874..f2741889f301727856199a999c4beea71d59e96e 100644 --- a/contrib/dolphin/expected/test_table_index.out +++ b/contrib/dolphin/expected/test_table_index.out @@ -1,8 +1,5 @@ ----- b compatibility case -drop database if exists test_table_index; -NOTICE: database "test_table_index" does not exist, skipping -create database test_table_index dbcompatibility 'b'; -\c test_table_index +create schema test_table_index; +set current_schema to 'test_table_index'; -- test crate normal table create table t1(f1 int , index(f1)); create table t2(f1 int , index idx_f2(f1)); @@ -10,7 +7,7 @@ create table t3(f1 int , index idx_f3 using btree (f1)); create table t4(f1 int , index idx_f4 using btree (f1 desc)); create table t5(f1 int , key idx_f5 using btree (f1 asc)); \d+ t1 - Table "public.t1" + Table "test_table_index.t1" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- f1 | integer | | plain | | @@ -20,7 +17,7 @@ Has OIDs: no Options: orientation=row, compression=no \d+ t2 - Table "public.t2" + Table "test_table_index.t2" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- f1 | integer | | plain | | @@ -30,7 +27,7 @@ Has OIDs: no Options: orientation=row, compression=no \d+ t3 - Table "public.t3" + Table "test_table_index.t3" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- f1 | integer | | plain | | @@ -40,7 +37,7 @@ Has OIDs: no Options: orientation=row, compression=no \d+ t4 - Table "public.t4" + Table "test_table_index.t4" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- f1 | integer | | plain | | @@ -50,7 +47,7 @@ Has OIDs: no Options: orientation=row, compression=no \d+ t5 - Table "public.t5" + Table "test_table_index.t5" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- f1 | integer | | plain | | @@ -68,7 +65,7 @@ create table test_expr1(f1 int, f2 int, f3 int, index tbl_idx1 using btree(f1 de create table test_expr2(f1 int, f2 int, f3 int, index tbl_idx2 using btree((abs(f1)) desc, f2 asc)); create table test_expr3(f1 int, f2 int, f3 int, index tbl_idx3 using btree((abs(f1)+10) desc, f2 asc)); \d+ test_expr1 - Table "public.test_expr1" + Table "test_table_index.test_expr1" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- f1 | integer | | plain | | @@ -80,7 +77,7 @@ Has OIDs: no Options: orientation=row, compression=no \d+ test_expr2 - Table "public.test_expr2" + Table "test_table_index.test_expr2" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- f1 | integer | | plain | | @@ -92,7 +89,7 @@ Has OIDs: no Options: orientation=row, compression=no \d+ test_expr3 - Table "public.test_expr3" + Table "test_table_index.test_expr3" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- f1 | integer | | plain | | @@ -113,12 +110,12 @@ create table text_column_table(f11 int, f12 varchar(20), f13 bool, index (f11)) create table test_gist (t tsquery, s tsvector,index using gist(t)); create table test_gin (t tsquery, s tsvector,index using gin(s)); create table text_column_table(f1 int, index(f1)) with (orientation=column); -ERROR: relation "text_column_table" already exists in schema "public" +ERROR: relation "text_column_table" already exists in schema "test_table_index" DETAIL: creating new table with existing name in the same schema create table text_column_table_expr(f1 int, unique((f1+1))) with (orientation=column); ERROR: access method "cbtree" does not support index expressions \d+ test_ubtree - Table "public.test_ubtree" + Table "test_table_index.test_ubtree" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- f1 | integer | | plain | | @@ -130,7 +127,7 @@ Has OIDs: no Options: orientation=row, storage_type=ustore, compression=no \d+ test_gist - Table "public.test_gist" + Table "test_table_index.test_gist" Column | Type | Modifiers | Storage | Stats target | Description --------+----------+-----------+----------+--------------+------------- t | tsquery | | plain | | @@ -141,7 +138,7 @@ Has OIDs: no Options: orientation=row, compression=no \d+ test_gin - Table "public.test_gin" + Table "test_table_index.test_gin" Column | Type | Modifiers | Storage | Stats target | Description --------+----------+-----------+----------+--------------+------------- t | tsquery | | plain | | @@ -152,7 +149,7 @@ Has OIDs: no Options: orientation=row, compression=no \d+ text_column_table - Table "public.text_column_table" + Table "test_table_index.text_column_table" Column | Type | Modifiers | Storage | Stats target | Description --------+-----------------------+-----------+----------+--------------+------------- f11 | integer | | plain | | @@ -185,7 +182,7 @@ PARTITION BY RANGE(f1) PARTITION P4 VALUES LESS THAN(MAXVALUE) ); \d+ test_partition_btree - Table "public.test_partition_btree" + Table "test_table_index.test_partition_btree" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- f1 | integer | | plain | | @@ -214,7 +211,7 @@ PARTITION BY RANGE(f1) PARTITION P4 VALUES LESS THAN(MAXVALUE) ); \d+ test_partition_index - Table "public.test_partition_index" + Table "test_table_index.test_partition_index" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- f1 | integer | | plain | | @@ -243,7 +240,7 @@ PARTITION BY RANGE(f1) PARTITION P4 VALUES LESS THAN(MAXVALUE) ); \d+ test_partition_func - Table "public.test_partition_func" + Table "test_table_index.test_partition_func" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- f1 | integer | | plain | | @@ -272,7 +269,7 @@ PARTITION BY RANGE(f1) PARTITION P4 VALUES LESS THAN(MAXVALUE) ); \d+ test_partition_expr - Table "public.test_partition_expr" + Table "test_table_index.test_partition_expr" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- f1 | integer | | plain | | @@ -301,7 +298,7 @@ PARTITION BY RANGE(f1) PARTITION P4 VALUES LESS THAN(MAXVALUE) ); \d+ test_partition_column - Table "public.test_partition_column" + Table "test_table_index.test_partition_column" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- f1 | integer | | plain | | @@ -338,7 +335,7 @@ SUBPARTITION p_201902_b VALUES ( '2' ) ) ); \d+ test_subpartition_btree - Table "public.test_subpartition_btree" + Table "test_table_index.test_subpartition_btree" Column | Type | Modifiers | Storage | Stats target | Description ------------+-----------------------+-----------+----------+--------------+------------- month_code | character varying(30) | not null | extended | | @@ -376,7 +373,7 @@ SUBPARTITION p_201902_b VALUES ( '2' ) ) ); \d+ test_subpartition_btree - Table "public.test_subpartition_btree" + Table "test_table_index.test_subpartition_btree" Column | Type | Modifiers | Storage | Stats target | Description ------------+-----------------------+-----------+----------+--------------+------------- month_code | character varying(30) | not null | extended | | @@ -427,7 +424,7 @@ create table t1(a int , b int, index (a, b)); alter table t1 add index (a); alter table t1 add index idx_a_1(a); \d+ t1 - Table "public.t1" + Table "test_table_index.t1" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- a | integer | | plain | | @@ -445,7 +442,7 @@ alter table t1 add index using btree(a), add index (b desc); alter table t1 add index idx_a_t1 using btree(a); alter table t1 add index idx_a_b_t1 using btree(a, b desc); \d+ t1 - Table "public.t1" + Table "test_table_index.t1" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- a | integer | | plain | | @@ -464,7 +461,7 @@ create table t1(a int , b int, index (a, b)); alter table t1 add key idx_a_b_t1 using btree(a, b desc); alter table t1 add key idx_a_b_expr_t1 using btree((abs(a+b) + a) desc); \d+ t1 - Table "public.t1" + Table "test_table_index.t1" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- a | integer | | plain | | @@ -486,7 +483,7 @@ ERROR: syntax error at or near "idx1" LINE 1: alter table t1 add index using btree idx1 (a); ^ \d+ t1 - Table "public.t1" + Table "test_table_index.t1" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- a | integer | | plain | | @@ -507,7 +504,7 @@ alter table test_normal_index add key(f11 desc, f12 asc); alter table test_normal_index add key using btree(f11, f12),add unique(f11, f12),add primary key(f11, f12); NOTICE: ALTER TABLE / ADD PRIMARY KEY will create implicit index "test_normal_index_pkey" for table "test_normal_index" \d+ test_normal_index - Table "public.test_normal_index" + Table "test_table_index.test_normal_index" Column | Type | Modifiers | Storage | Stats target | Description --------+-----------------------+-----------+----------+--------------+------------- f11 | integer | not null | plain | | @@ -536,7 +533,7 @@ alter table text_column_table add key (f11,f12); alter table text_column_table add key using btree(f11); alter table text_column_table add key using cbtree(f11); \d+ text_column_table - Table "public.text_column_table" + Table "test_table_index.text_column_table" Column | Type | Modifiers | Storage | Stats target | Description --------+-----------------------+-----------+----------+--------------+------------- f11 | integer | | plain | | @@ -575,7 +572,7 @@ alter table test_partition_btree add index (f1 desc); alter table test_partition_btree add key using btree(f1 desc, f2 asc, f3); alter table test_partition_btree add key using btree((abs(f1)) desc, (f2 * 2 + 1) asc, f3); \d+ test_partition_btree - Table "public.test_partition_btree" + Table "test_table_index.test_partition_btree" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- f1 | integer | | plain | | @@ -606,7 +603,7 @@ drop table if exists test_temporary_index1; create table test_option1(a int, b int, index idx_op1 using btree(a) comment 'yy'); alter table test_option1 add key ixd_at1 (b) comment 'aa'; \d+ test_option1 - Table "public.test_option1" + Table "test_table_index.test_option1" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- a | integer | | plain | | @@ -650,7 +647,7 @@ alter table test_option2 add index ixd_at2 using hash(b) comment 'aa' comment 'b create table test_option3(a int, b int, index idx_op3 (a) using btree); alter table test_option3 add index ixd_at3(b) using btree; \d+ test_option3 - Table "public.test_option3" + Table "test_table_index.test_option3" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- a | integer | | plain | | @@ -666,7 +663,7 @@ alter table test_option4 add index ixd_at4 using hash (b) using btree using hash NOTICE: ALTER TABLE / ADD PRIMARY KEY will create implicit index "test_option4_pkey" for table "test_option4" NOTICE: ALTER TABLE / ADD UNIQUE will create implicit index "test_option4_a_key" for table "test_option4" \d+ test_option4 - Table "public.test_option4" + Table "test_table_index.test_option4" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- a | integer | | plain | | @@ -685,7 +682,7 @@ alter table test_option5 add index ixd_at5 using hash (b) using btree comment 'y NOTICE: ALTER TABLE / ADD PRIMARY KEY will create implicit index "test_option5_pkey" for table "test_option5" NOTICE: ALTER TABLE / ADD UNIQUE will create implicit index "test_option5_a_key" for table "test_option5" \d+ test_option5 - Table "public.test_option5" + Table "test_table_index.test_option5" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- a | integer | | plain | | @@ -715,7 +712,7 @@ Options: orientation=row, compression=no create table test_option6(a int, b int, key idx_op6 using hash(a) using btree comment 'yy' using hash comment 'xx'); \d+ test_option6 - Table "public.test_option6" + Table "test_table_index.test_option6" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- a | integer | | plain | | @@ -734,7 +731,7 @@ Options: orientation=row, compression=no create table test_option7(a int, b int, index idx_op7 (a) using btree comment 'yy' using hash comment 'xx'); \d+ test_option7 - Table "public.test_option7" + Table "test_table_index.test_option7" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- a | integer | | plain | | @@ -747,7 +744,7 @@ Options: orientation=row, compression=no create table test_option8(a int, b int, c int, key idx_op8_a (a) using btree comment 'yy' using hash comment 'xx', index idx_op8_b (b) using btree comment 'yy' using hash comment 'xx'); alter table test_option8 add index ixd_at8_b(b) using btree comment 'yy' using hash comment 'xx', add index ixd_at8_c (c) using btree comment 'yy' using hash comment 'xx'; \d+ test_option8 - Table "public.test_option8" + Table "test_table_index.test_option8" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- a | integer | | plain | | @@ -776,7 +773,7 @@ PARTITION BY RANGE(f1) PARTITION P4 VALUES LESS THAN(MAXVALUE) ); \d+ test_option9 - Table "public.test_option9" + Table "test_table_index.test_option9" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- f1 | integer | | plain | | @@ -811,7 +808,7 @@ PARTITION BY RANGE(f1) PARTITION P4 VALUES LESS THAN(MAXVALUE) ); \d+ test_option10 - Table "public.test_option10" + Table "test_table_index.test_option10" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- f1 | integer | | plain | | @@ -850,5 +847,17 @@ alter table test_option1 add key ixd_at13 using btree (b) using aaa using btree; ERROR: access method "aaa" does not exist alter table test_option1 add key ixd_at14 using btree (b) comment 'xx' using aaa using btree; ERROR: access method "aaa" does not exist -\c contrib_regression -DROP DATABASE test_table_index; +drop schema test_table_index cascade; +NOTICE: drop cascades to 11 other objects +DETAIL: drop cascades to table test_option1 +drop cascades to table test_option2 +drop cascades to table test_option3 +drop cascades to table test_option4 +drop cascades to table test_option5 +drop cascades to table test_option6 +drop cascades to table test_option7 +drop cascades to table test_option8 +drop cascades to table test_option9 +drop cascades to table test_option10 +drop cascades to table test_option11 +reset current_schema; diff --git a/contrib/dolphin/expected/tinyint_agg.out b/contrib/dolphin/expected/tinyint_agg.out index 95b9c21035aa8adf249522c4b559f1f5ac868e5b..5a949de4cd8915060b548873f494c9bb3685227e 100644 --- a/contrib/dolphin/expected/tinyint_agg.out +++ b/contrib/dolphin/expected/tinyint_agg.out @@ -1,7 +1,5 @@ -drop database if exists tinyint_agg; -NOTICE: database "tinyint_agg" does not exist, skipping -create database tinyint_agg dbcompatibility 'b'; -\c tinyint_agg +create schema tinyint_agg; +set current_schema to 'tinyint_agg'; create table u1(a int1, b int2); insert into u1 values(null, null),(127, 127),(0, 0),(-128, -128),(null, null); select avg(a), avg(b) from u1; @@ -110,54 +108,54 @@ insert into t1 select generate_series(1, 1000000); insert into smp_test select a % 128 from t1; set query_dop = 2; explain(costs off, verbose) select avg(a) from smp_test; - QUERY PLAN ------------------------------------------------ + QUERY PLAN +---------------------------------------------------- Aggregate Output: pg_catalog.avg((avg(a))) -> Streaming(type: LOCAL GATHER dop: 1/2) Output: (avg(a)) -> Aggregate Output: avg(a) - -> Seq Scan on public.smp_test + -> Seq Scan on tinyint_agg.smp_test Output: a (8 rows) explain(costs off, verbose) select bit_and(a) from smp_test; - QUERY PLAN ------------------------------------------------ + QUERY PLAN +---------------------------------------------------- Aggregate Output: bit_and((bit_and(a))) -> Streaming(type: LOCAL GATHER dop: 1/2) Output: (bit_and(a)) -> Aggregate Output: bit_and(a) - -> Seq Scan on public.smp_test + -> Seq Scan on tinyint_agg.smp_test Output: a (8 rows) explain(costs off, verbose) select bit_or(a) from smp_test; - QUERY PLAN ------------------------------------------------ + QUERY PLAN +---------------------------------------------------- Aggregate Output: bit_or((bit_or(a))) -> Streaming(type: LOCAL GATHER dop: 1/2) Output: (bit_or(a)) -> Aggregate Output: bit_or(a) - -> Seq Scan on public.smp_test + -> Seq Scan on tinyint_agg.smp_test Output: a (8 rows) explain(costs off, verbose) select count(a) from smp_test; - QUERY PLAN ------------------------------------------------ + QUERY PLAN +---------------------------------------------------- Aggregate Output: count((count(a))) -> Streaming(type: LOCAL GATHER dop: 1/2) Output: (count(a)) -> Aggregate Output: count(a) - -> Seq Scan on public.smp_test + -> Seq Scan on tinyint_agg.smp_test Output: a (8 rows) @@ -179,33 +177,33 @@ explain(costs off, verbose) select count(distinct a) from smp_test; -> HashAggregate Output: a Group By Key: smp_test.a - -> Seq Scan on public.smp_test + -> Seq Scan on tinyint_agg.smp_test Output: a (17 rows) explain(costs off, verbose) select max(a) from smp_test; - QUERY PLAN ------------------------------------------------ + QUERY PLAN +---------------------------------------------------- Aggregate Output: max((max(a))) -> Streaming(type: LOCAL GATHER dop: 1/2) Output: (max(a)) -> Aggregate Output: max(a) - -> Seq Scan on public.smp_test + -> Seq Scan on tinyint_agg.smp_test Output: a (8 rows) explain(costs off, verbose) select min(a)from smp_test; - QUERY PLAN --------------------------------------------------- + QUERY PLAN +---------------------------------------------------- Aggregate Output: min((min((a)::double precision))) -> Streaming(type: LOCAL GATHER dop: 1/2) Output: (min((a)::double precision)) -> Aggregate Output: min((a)::double precision) - -> Seq Scan on public.smp_test + -> Seq Scan on tinyint_agg.smp_test Output: a (8 rows) @@ -218,20 +216,20 @@ explain(costs off, verbose) select stddev(a) from smp_test; Output: (stddev((a)::double precision)) -> Aggregate Output: stddev((a)::double precision) - -> Seq Scan on public.smp_test + -> Seq Scan on tinyint_agg.smp_test Output: a (8 rows) explain(costs off, verbose) select stddev_pop(a) from smp_test; - QUERY PLAN --------------------------------------------------- + QUERY PLAN +---------------------------------------------------- Aggregate Output: pg_catalog.stddev_pop((stddev_pop(a))) -> Streaming(type: LOCAL GATHER dop: 1/2) Output: (stddev_pop(a)) -> Aggregate Output: stddev_pop(a) - -> Seq Scan on public.smp_test + -> Seq Scan on tinyint_agg.smp_test Output: a (8 rows) @@ -244,59 +242,59 @@ explain(costs off, verbose) select stddev_samp(a) from smp_test; Output: (stddev_samp((a)::double precision)) -> Aggregate Output: stddev_samp((a)::double precision) - -> Seq Scan on public.smp_test + -> Seq Scan on tinyint_agg.smp_test Output: a (8 rows) explain(costs off, verbose) select sum(a)from smp_test; - QUERY PLAN --------------------------------------------------- + QUERY PLAN +---------------------------------------------------- Aggregate Output: sum((sum((a)::double precision))) -> Streaming(type: LOCAL GATHER dop: 1/2) Output: (sum((a)::double precision)) -> Aggregate Output: sum((a)::double precision) - -> Seq Scan on public.smp_test + -> Seq Scan on tinyint_agg.smp_test Output: a (8 rows) explain(costs off, verbose) select var_pop(a) from smp_test; - QUERY PLAN ------------------------------------------------ + QUERY PLAN +---------------------------------------------------- Aggregate Output: pg_catalog.var_pop((var_pop(a))) -> Streaming(type: LOCAL GATHER dop: 1/2) Output: (var_pop(a)) -> Aggregate Output: var_pop(a) - -> Seq Scan on public.smp_test + -> Seq Scan on tinyint_agg.smp_test Output: a (8 rows) explain(costs off, verbose) select var_samp(a) from smp_test; - QUERY PLAN ------------------------------------------------ + QUERY PLAN +---------------------------------------------------- Aggregate Output: pg_catalog.var_samp((var_samp(a))) -> Streaming(type: LOCAL GATHER dop: 1/2) Output: (var_samp(a)) -> Aggregate Output: var_samp(a) - -> Seq Scan on public.smp_test + -> Seq Scan on tinyint_agg.smp_test Output: a (8 rows) explain(costs off, verbose) select variance(a)from smp_test; - QUERY PLAN ------------------------------------------------ + QUERY PLAN +---------------------------------------------------- Aggregate Output: pg_catalog.variance((variance(a))) -> Streaming(type: LOCAL GATHER dop: 1/2) Output: (variance(a)) -> Aggregate Output: variance(a) - -> Seq Scan on public.smp_test + -> Seq Scan on tinyint_agg.smp_test Output: a (8 rows) @@ -305,7 +303,7 @@ explain(costs off, verbose) select listagg(a) within group(order by a) from smp_ -------------------------------------------------- Aggregate Output: listagg(a ) WITHIN GROUP ( ORDER BY a) - -> Seq Scan on public.smp_test + -> Seq Scan on tinyint_agg.smp_test Output: a (4 rows) @@ -314,9 +312,13 @@ explain(costs off, verbose) select listagg(a, ',') within group(order by a) from ------------------------------------------------------------- Aggregate Output: listagg(a, ','::text ) WITHIN GROUP ( ORDER BY a) - -> Seq Scan on public.smp_test + -> Seq Scan on tinyint_agg.smp_test Output: a (4 rows) -\c postgres -drop database tinyint_agg; +drop schema tinyint_agg cascade; +NOTICE: drop cascades to 3 other objects +DETAIL: drop cascades to table u1 +drop cascades to table smp_test +drop cascades to table t1 +reset current_schema; diff --git a/contrib/dolphin/expected/tinyint_cast.out b/contrib/dolphin/expected/tinyint_cast.out index b5412455fb5412487bea1a27f2f9d412f2ddb69b..235cb4a8f55951eea5d6e4c1b9b59a95f7e52fb4 100644 --- a/contrib/dolphin/expected/tinyint_cast.out +++ b/contrib/dolphin/expected/tinyint_cast.out @@ -1,7 +1,5 @@ -drop database if exists tinyint_cast; -NOTICE: database "tinyint_cast" does not exist, skipping -create database tinyint_cast dbcompatibility 'b'; -\c tinyint_cast +create schema tinyint_cast; +set current_schema to 'tinyint_cast'; create table t1(a int1); insert into t1 values(''); ERROR: invalid input syntax for integer: "" @@ -382,5 +380,6 @@ select '-128'::text::int1; -128 (1 row) -\c postgres -drop database tinyint_cast; +drop schema tinyint_cast cascade; +NOTICE: drop cascades to table t1 +reset current_schema; diff --git a/contrib/dolphin/expected/tinyint_index.out b/contrib/dolphin/expected/tinyint_index.out index 428856674f90e73014d587d4a7de06515ca64bb2..6c384e623a7a509469d9bb77de31575a0adcfc76 100644 --- a/contrib/dolphin/expected/tinyint_index.out +++ b/contrib/dolphin/expected/tinyint_index.out @@ -1,7 +1,5 @@ -drop database if exists tinyint_index; -NOTICE: database "tinyint_index" does not exist, skipping -create database tinyint_index dbcompatibility 'b'; -\c tinyint_index +create schema tinyint_index; +set current_schema to 'tinyint_index'; create table t1(a int1); insert into t1 select generate_series(-128, 127); insert into t1 select generate_series(-128, 127); @@ -18,7 +16,7 @@ analyze t1; explain(costs off, verbose)select * from t1 where a = 1::int1; QUERY PLAN ------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on tinyint_index.t1 Output: a Recheck Cond: (t1.a = '1'::tinyint) -> Bitmap Index Scan on idx1 @@ -28,7 +26,7 @@ explain(costs off, verbose)select * from t1 where a = 1::int1; explain(costs off, verbose)select * from t1 where a = 1::int2; QUERY PLAN ------------------------------------------ - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on tinyint_index.t1 Output: a Recheck Cond: (t1.a = 1::smallint) -> Bitmap Index Scan on idx1 @@ -36,9 +34,9 @@ explain(costs off, verbose)select * from t1 where a = 1::int2; (5 rows) explain(costs off, verbose)select * from t1 where a = 1::int4; - QUERY PLAN ---------------------------------- - Bitmap Heap Scan on public.t1 + QUERY PLAN +-------------------------------------- + Bitmap Heap Scan on tinyint_index.t1 Output: a Recheck Cond: (t1.a = 1) -> Bitmap Index Scan on idx1 @@ -48,7 +46,7 @@ explain(costs off, verbose)select * from t1 where a = 1::int4; explain(costs off, verbose)select * from t1 where a = 1::int8; QUERY PLAN ---------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on tinyint_index.t1 Output: a Recheck Cond: (t1.a = 1::bigint) -> Bitmap Index Scan on idx1 @@ -118,7 +116,7 @@ select * from t1 where a = 1::int8; explain(costs off, verbose)select * from t1 where a > 1::int1 and a < 3::int1; QUERY PLAN ----------------------------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on tinyint_index.t1 Output: a Recheck Cond: ((t1.a > '1'::tinyint) AND (t1.a < '3'::tinyint)) -> Bitmap Index Scan on idx1 @@ -128,7 +126,7 @@ explain(costs off, verbose)select * from t1 where a > 1::int1 and a < 3::int1; explain(costs off, verbose)select * from t1 where a > 1::int2 and a < 3::int2; QUERY PLAN --------------------------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on tinyint_index.t1 Output: a Recheck Cond: ((t1.a > 1::smallint) AND (t1.a < 3::smallint)) -> Bitmap Index Scan on idx1 @@ -138,7 +136,7 @@ explain(costs off, verbose)select * from t1 where a > 1::int2 and a < 3::int2; explain(costs off, verbose)select * from t1 where a > 1::int4 and a < 3::int4; QUERY PLAN ------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on tinyint_index.t1 Output: a Recheck Cond: ((t1.a > 1) AND (t1.a < 3)) -> Bitmap Index Scan on idx1 @@ -148,7 +146,7 @@ explain(costs off, verbose)select * from t1 where a > 1::int4 and a < 3::int4; explain(costs off, verbose)select * from t1 where a > 1::int8 and a < 3::int8; QUERY PLAN ----------------------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on tinyint_index.t1 Output: a Recheck Cond: ((t1.a > 1::bigint) AND (t1.a < 3::bigint)) -> Bitmap Index Scan on idx1 @@ -218,7 +216,7 @@ select * from t1 where a > 1::int8 and a < 3::int8; explain(costs off, verbose)select * from t1 where a >= -2::int1 and a <= -1::int1; QUERY PLAN --------------------------------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on tinyint_index.t1 Output: a Recheck Cond: ((t1.a >= '-2'::tinyint) AND (t1.a <= '-1'::tinyint)) -> Bitmap Index Scan on idx1 @@ -228,7 +226,7 @@ explain(costs off, verbose)select * from t1 where a >= -2::int1 and a <= -1::int explain(costs off, verbose)select * from t1 where a >= -2::int2 and a <= -1::int2; QUERY PLAN ----------------------------------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on tinyint_index.t1 Output: a Recheck Cond: ((t1.a >= (-2)::smallint) AND (t1.a <= (-1)::smallint)) -> Bitmap Index Scan on idx1 @@ -238,7 +236,7 @@ explain(costs off, verbose)select * from t1 where a >= -2::int2 and a <= -1::int explain(costs off, verbose)select * from t1 where a >= -2::int4 and a <= -1::int4; QUERY PLAN --------------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on tinyint_index.t1 Output: a Recheck Cond: ((t1.a >= (-2)) AND (t1.a <= (-1))) -> Bitmap Index Scan on idx1 @@ -248,7 +246,7 @@ explain(costs off, verbose)select * from t1 where a >= -2::int4 and a <= -1::int explain(costs off, verbose)select * from t1 where a >= -2::int8 and a <= -1::int8; QUERY PLAN ------------------------------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on tinyint_index.t1 Output: a Recheck Cond: ((t1.a >= (-2)::bigint) AND (t1.a <= (-1)::bigint)) -> Bitmap Index Scan on idx1 @@ -361,7 +359,7 @@ analyze t1; explain(costs off, verbose)select * from t1 where a = 1::int1; QUERY PLAN ------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on tinyint_index.t1 Output: a Recheck Cond: (t1.a = '1'::tinyint) -> Bitmap Index Scan on idx1 @@ -371,7 +369,7 @@ explain(costs off, verbose)select * from t1 where a = 1::int1; explain(costs off, verbose)select * from t1 where a = 1::int2; QUERY PLAN ------------------------------------------ - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on tinyint_index.t1 Output: a Recheck Cond: (t1.a = 1::smallint) -> Bitmap Index Scan on idx1 @@ -379,9 +377,9 @@ explain(costs off, verbose)select * from t1 where a = 1::int2; (5 rows) explain(costs off, verbose)select * from t1 where a = 1::int4; - QUERY PLAN ---------------------------------- - Bitmap Heap Scan on public.t1 + QUERY PLAN +-------------------------------------- + Bitmap Heap Scan on tinyint_index.t1 Output: a Recheck Cond: (t1.a = 1) -> Bitmap Index Scan on idx1 @@ -391,7 +389,7 @@ explain(costs off, verbose)select * from t1 where a = 1::int4; explain(costs off, verbose)select * from t1 where a = 1::int8; QUERY PLAN ---------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on tinyint_index.t1 Output: a Recheck Cond: (t1.a = 1::bigint) -> Bitmap Index Scan on idx1 @@ -470,7 +468,7 @@ explain(costs off, verbose)select * from t1 where a >= -1::int1 and a <= 0::int1 ---------------------------------------------------------------------- Row Adapter Output: a - -> CStore Scan on public.t1 + -> CStore Scan on tinyint_index.t1 Output: a Filter: ((t1.a >= '-1'::tinyint) AND (t1.a <= '0'::tinyint)) (5 rows) @@ -480,17 +478,17 @@ explain(costs off, verbose)select * from t1 where a >= -1::int2 and a <= 0::int2 -------------------------------------------------------------------------- Row Adapter Output: a - -> CStore Index Only Scan using idx1 on public.t1 + -> CStore Index Only Scan using idx1 on tinyint_index.t1 Output: a Index Cond: ((t1.a >= (-1)::smallint) AND (t1.a <= 0::smallint)) (5 rows) explain(costs off, verbose)select * from t1 where a >= -1::int4 and a <= 0::int4; - QUERY PLAN ------------------------------------------------------- + QUERY PLAN +------------------------------------------------------------- Row Adapter Output: a - -> CStore Index Only Scan using idx1 on public.t1 + -> CStore Index Only Scan using idx1 on tinyint_index.t1 Output: a Index Cond: ((t1.a >= (-1)) AND (t1.a <= 0)) (5 rows) @@ -500,8 +498,13 @@ explain(costs off, verbose)select * from t1 where a >= -1::int8 and a <= 0::int8 ---------------------------------------------------------------------- Row Adapter Output: a - -> CStore Index Only Scan using idx1 on public.t1 + -> CStore Index Only Scan using idx1 on tinyint_index.t1 Output: a Index Cond: ((t1.a >= (-1)::bigint) AND (t1.a <= 0::bigint)) (5 rows) +drop schema tinyint_index cascade; +NOTICE: drop cascades to 2 other objects +DETAIL: drop cascades to table t1 +drop cascades to table t2 +reset current_schema; diff --git a/contrib/dolphin/expected/tinyint_operator.out b/contrib/dolphin/expected/tinyint_operator.out index c0267aaa64deb178a28d774104cf0a93899f080f..a84a757722d57331712a5db176acb254411413e9 100644 --- a/contrib/dolphin/expected/tinyint_operator.out +++ b/contrib/dolphin/expected/tinyint_operator.out @@ -1,7 +1,5 @@ -drop database if exists tinyint_operator; -NOTICE: database "tinyint_operator" does not exist, skipping -create database tinyint_operator dbcompatibility 'b'; -\c tinyint_operator +create schema tinyint_operator; +set current_schema to 'tinyint_operator'; select 1::int1 + 1::int1; ?column? ---------- @@ -258,5 +256,5 @@ select @127::int1; select @(-128)::int1; ERROR: tinyint out of range -\c postgres -drop database tinyint_operator; +drop schema tinyint_operator cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/tinyint_partition.out b/contrib/dolphin/expected/tinyint_partition.out index c4a56d9f163be92fb335edc6921e5b2fc1a2b72c..f71fee7645de096c20c013f37710092d845d1b10 100644 --- a/contrib/dolphin/expected/tinyint_partition.out +++ b/contrib/dolphin/expected/tinyint_partition.out @@ -1,7 +1,5 @@ -drop database if exists tinyint_partition; -NOTICE: database "tinyint_partition" does not exist, skipping -create database tinyint_partition dbcompatibility 'b'; -\c tinyint_partition +create schema tinyint_partition; +set current_schema to 'tinyint_partition'; CREATE TABLE t1 ( col1 int1 NOT NULL, @@ -248,5 +246,15 @@ select * from start_end1; 1 (1 row) -\c postgres -drop database tinyint_partition; +drop schema tinyint_partition cascade; +NOTICE: drop cascades to 9 other objects +DETAIL: drop cascades to table t1 +drop cascades to table t2 +drop cascades to table t3 +drop cascades to table a1 +drop cascades to table a2 +drop cascades to table subpartition_01 +drop cascades to table subpartition_02 +drop cascades to table subpartition_03 +drop cascades to table start_end1 +reset current_schema; diff --git a/contrib/dolphin/expected/tinyint_smp_join_procedure.out b/contrib/dolphin/expected/tinyint_smp_join_procedure.out index b7bf750e5eddfefa84a117011a8390b2a81a7b7c..146b1fb6d6f894024519023f25a24687eee92396 100644 --- a/contrib/dolphin/expected/tinyint_smp_join_procedure.out +++ b/contrib/dolphin/expected/tinyint_smp_join_procedure.out @@ -1,7 +1,5 @@ -drop database if exists tinyint_smp; -NOTICE: database "tinyint_smp" does not exist, skipping -create database tinyint_smp dbcompatibility 'b'; -\c tinyint_smp +create schema tinyint_smp; +set current_schema to 'tinyint_smp'; set enable_opfusion = on; set opfusion_debug_mode = log; drop table if exists t1; @@ -16,7 +14,7 @@ explain(costs off, verbose) select * from t2 where a = 2; QUERY PLAN ------------------------------------------------------------------------------------ [No Bypass]reason: Bypass not executed because query's scan operator is not index. - Seq Scan on public.t2 + Seq Scan on tinyint_smp.t2 Output: a Filter: (t2.a = 2) (4 rows) @@ -26,15 +24,15 @@ explain(costs off, verbose) select * from t2 where a = 2 and t2.a in (select a f ------------------------------------------------------------------------------------ [No Bypass]reason: Bypass not executed because query's scan operator is not index. Nested Loop Semi Join - Output: public.t2.a - -> Seq Scan on public.t2 - Output: public.t2.a - Filter: ((public.t2.a = 2) AND (public.t2.a > 500)) + Output: tinyint_smp.t2.a + -> Seq Scan on tinyint_smp.t2 + Output: tinyint_smp.t2.a + Filter: ((tinyint_smp.t2.a = 2) AND (tinyint_smp.t2.a > 500)) -> Materialize - Output: public.t2.a - -> Seq Scan on public.t2 - Output: public.t2.a - Filter: ((public.t2.a > 500) AND (public.t2.a = 2)) + Output: tinyint_smp.t2.a + -> Seq Scan on tinyint_smp.t2 + Output: tinyint_smp.t2.a + Filter: ((tinyint_smp.t2.a > 500) AND (tinyint_smp.t2.a = 2)) (11 rows) set query_dop = 4; @@ -44,7 +42,7 @@ explain(costs off, verbose) select * from t2 where a = 2; [No Bypass]reason: Bypass not executed because query's scan operator is not index. Streaming(type: LOCAL GATHER dop: 1/4) Output: a - -> Seq Scan on public.t2 + -> Seq Scan on tinyint_smp.t2 Output: a Filter: (t2.a = 2) (6 rows) @@ -54,19 +52,19 @@ explain(costs off, verbose) select * from t2 where a = 2 and t2.a in (select a f ------------------------------------------------------------------------------------ [No Bypass]reason: Bypass not executed because query's scan operator is not index. Nested Loop Semi Join - Output: public.t2.a + Output: tinyint_smp.t2.a -> Streaming(type: LOCAL GATHER dop: 1/4) - Output: public.t2.a - -> Seq Scan on public.t2 - Output: public.t2.a - Filter: ((public.t2.a = 2) AND (public.t2.a > 500)) + Output: tinyint_smp.t2.a + -> Seq Scan on tinyint_smp.t2 + Output: tinyint_smp.t2.a + Filter: ((tinyint_smp.t2.a = 2) AND (tinyint_smp.t2.a > 500)) -> Materialize - Output: public.t2.a + Output: tinyint_smp.t2.a -> Streaming(type: LOCAL GATHER dop: 1/4) - Output: public.t2.a - -> Seq Scan on public.t2 - Output: public.t2.a - Filter: ((public.t2.a > 500) AND (public.t2.a = 2)) + Output: tinyint_smp.t2.a + -> Seq Scan on tinyint_smp.t2 + Output: tinyint_smp.t2.a + Filter: ((tinyint_smp.t2.a > 500) AND (tinyint_smp.t2.a = 2)) (15 rows) set query_dop = 1; @@ -83,13 +81,13 @@ explain(costs off, verbose) select * from join_1 join join_2; Output: join_1.a, join_2.a -> Streaming(type: LOCAL GATHER dop: 1/2) Output: join_1.a - -> Seq Scan on public.join_1 + -> Seq Scan on tinyint_smp.join_1 Output: join_1.a -> Materialize Output: join_2.a -> Streaming(type: LOCAL GATHER dop: 1/2) Output: join_2.a - -> Seq Scan on public.join_2 + -> Seq Scan on tinyint_smp.join_2 Output: join_2.a (12 rows) @@ -104,14 +102,14 @@ explain(costs off, verbose) select * from join_1 join join_2 on join_1.a = join_ -> Streaming(type: LOCAL REDISTRIBUTE dop: 2/2) Output: join_1.a Distribute Key: join_1.a - -> Seq Scan on public.join_1 + -> Seq Scan on tinyint_smp.join_1 Output: join_1.a -> Hash Output: join_2.a -> Streaming(type: LOCAL REDISTRIBUTE dop: 2/2) Output: join_2.a Distribute Key: join_2.a - -> Seq Scan on public.join_2 + -> Seq Scan on tinyint_smp.join_2 Output: join_2.a (17 rows) @@ -126,14 +124,14 @@ explain(costs off, verbose) select * from join_1 left join join_2 on join_1.a = -> Streaming(type: LOCAL REDISTRIBUTE dop: 2/2) Output: join_1.a Distribute Key: join_1.a - -> Seq Scan on public.join_1 + -> Seq Scan on tinyint_smp.join_1 Output: join_1.a -> Hash Output: join_2.a -> Streaming(type: LOCAL REDISTRIBUTE dop: 2/2) Output: join_2.a Distribute Key: join_2.a - -> Seq Scan on public.join_2 + -> Seq Scan on tinyint_smp.join_2 Output: join_2.a (17 rows) @@ -148,14 +146,14 @@ explain(costs off, verbose) select * from join_1 right join join_2 on join_1.a = -> Streaming(type: LOCAL REDISTRIBUTE dop: 2/2) Output: join_2.a Distribute Key: join_2.a - -> Seq Scan on public.join_2 + -> Seq Scan on tinyint_smp.join_2 Output: join_2.a -> Hash Output: join_1.a -> Streaming(type: LOCAL REDISTRIBUTE dop: 2/2) Output: join_1.a Distribute Key: join_1.a - -> Seq Scan on public.join_1 + -> Seq Scan on tinyint_smp.join_1 Output: join_1.a (17 rows) @@ -170,14 +168,14 @@ explain(costs off, verbose) select * from join_1 inner join join_2 on join_1.a = -> Streaming(type: LOCAL REDISTRIBUTE dop: 2/2) Output: join_1.a Distribute Key: join_1.a - -> Seq Scan on public.join_1 + -> Seq Scan on tinyint_smp.join_1 Output: join_1.a -> Hash Output: join_2.a -> Streaming(type: LOCAL REDISTRIBUTE dop: 2/2) Output: join_2.a Distribute Key: join_2.a - -> Seq Scan on public.join_2 + -> Seq Scan on tinyint_smp.join_2 Output: join_2.a (17 rows) @@ -192,14 +190,14 @@ explain(costs off, verbose) select /*+ nestloop(join_1 join_2)*/ * from join_1 l -> Streaming(type: LOCAL REDISTRIBUTE dop: 2/2) Output: join_1.a Distribute Key: join_1.a - -> Seq Scan on public.join_1 + -> Seq Scan on tinyint_smp.join_1 Output: join_1.a -> Materialize Output: join_2.a -> Streaming(type: LOCAL REDISTRIBUTE dop: 2/2) Output: join_2.a Distribute Key: join_2.a - -> Seq Scan on public.join_2 + -> Seq Scan on tinyint_smp.join_2 Output: join_2.a (17 rows) @@ -214,14 +212,14 @@ explain(costs off, verbose) select /*+ hashjoin(join_1 join_2)*/ * from join_1 l -> Streaming(type: LOCAL REDISTRIBUTE dop: 2/2) Output: join_1.a Distribute Key: join_1.a - -> Seq Scan on public.join_1 + -> Seq Scan on tinyint_smp.join_1 Output: join_1.a -> Hash Output: join_2.a -> Streaming(type: LOCAL REDISTRIBUTE dop: 2/2) Output: join_2.a Distribute Key: join_2.a - -> Seq Scan on public.join_2 + -> Seq Scan on tinyint_smp.join_2 Output: join_2.a (17 rows) @@ -237,14 +235,14 @@ WARNING: unused hint: MergeJoin(join_1 join_2) -> Streaming(type: LOCAL REDISTRIBUTE dop: 2/2) Output: join_1.a Distribute Key: join_1.a - -> Seq Scan on public.join_1 + -> Seq Scan on tinyint_smp.join_1 Output: join_1.a -> Hash Output: join_2.a -> Streaming(type: LOCAL REDISTRIBUTE dop: 2/2) Output: join_2.a Distribute Key: join_2.a - -> Seq Scan on public.join_2 + -> Seq Scan on tinyint_smp.join_2 Output: join_2.a (17 rows) @@ -275,5 +273,12 @@ select * from test1; -4 (2 rows) -\c postgres -drop database tinyint_smp; +drop schema tinyint_smp cascade; +NOTICE: drop cascades to 6 other objects +DETAIL: drop cascades to table t1 +drop cascades to table t2 +drop cascades to table join_1 +drop cascades to table join_2 +drop cascades to function test_p1(tinyint,tinyint) +drop cascades to table test1 +reset current_schema; diff --git a/contrib/dolphin/expected/uint_agg.out b/contrib/dolphin/expected/uint_agg.out index 68ef6d574e0422db5b29e4c8f205e1e77a4c5d24..7bfe7558f5d7f2e4b1476c28d72454f8721d93dc 100644 --- a/contrib/dolphin/expected/uint_agg.out +++ b/contrib/dolphin/expected/uint_agg.out @@ -1,7 +1,5 @@ -drop database if exists uint_agg; -NOTICE: database "uint_agg" does not exist, skipping -create database uint_agg dbcompatibility 'b'; -\c uint_agg +create schema uint_agg; +set current_schema to 'uint_agg'; --uint1 create table u1(a uint1, b int2); insert into u1 values(null, null),(255, 255),(0, 0),(255, 255),(null, null); @@ -411,7 +409,7 @@ explain(costs off, verbose) select avg(a), avg(b) from smp_test; Output: (avg(a)), (avg(b)) -> Aggregate Output: avg(a), avg(b) - -> Seq Scan on public.smp_test + -> Seq Scan on uint_agg.smp_test Output: a, b (8 rows) @@ -424,7 +422,7 @@ explain(costs off, verbose) select bit_and(a), bit_and(b) from smp_test; Output: (bit_and(a)), (bit_and(b)) -> Aggregate Output: bit_and(a), bit_and(b) - -> Seq Scan on public.smp_test + -> Seq Scan on uint_agg.smp_test Output: a, b (8 rows) @@ -437,20 +435,20 @@ explain(costs off, verbose) select bit_or(a), bit_or(b) from smp_test; Output: (bit_or(a)), (bit_or(b)) -> Aggregate Output: bit_or(a), bit_or(b) - -> Seq Scan on public.smp_test + -> Seq Scan on uint_agg.smp_test Output: a, b (8 rows) explain(costs off, verbose) select count(a), count(b) from smp_test; - QUERY PLAN ------------------------------------------------- + QUERY PLAN +------------------------------------------------- Aggregate Output: count((count(a))), count((count(b))) -> Streaming(type: LOCAL GATHER dop: 1/2) Output: (count(a)), (count(b)) -> Aggregate Output: count(a), count(b) - -> Seq Scan on public.smp_test + -> Seq Scan on uint_agg.smp_test Output: a, b (8 rows) @@ -459,33 +457,33 @@ explain(costs off, verbose) select count(distinct a), count(distinct b) from smp ------------------------------------------------ Aggregate Output: count(DISTINCT a), count(DISTINCT b) - -> Seq Scan on public.smp_test + -> Seq Scan on uint_agg.smp_test Output: a, b (4 rows) explain(costs off, verbose) select max(a), max(b) from smp_test; - QUERY PLAN ------------------------------------------------ + QUERY PLAN +------------------------------------------------- Aggregate Output: max((max(a))), max((max(b))) -> Streaming(type: LOCAL GATHER dop: 1/2) Output: (max(a)), (max(b)) -> Aggregate Output: max(a), max(b) - -> Seq Scan on public.smp_test + -> Seq Scan on uint_agg.smp_test Output: a, b (8 rows) explain(costs off, verbose) select min(a), min(b) from smp_test; - QUERY PLAN ------------------------------------------------ + QUERY PLAN +------------------------------------------------- Aggregate Output: min((min(a))), min((min(b))) -> Streaming(type: LOCAL GATHER dop: 1/2) Output: (min(a)), (min(b)) -> Aggregate Output: min(a), min(b) - -> Seq Scan on public.smp_test + -> Seq Scan on uint_agg.smp_test Output: a, b (8 rows) @@ -498,7 +496,7 @@ explain(costs off, verbose) select stddev(a), stddev(b) from smp_test; Output: (stddev(a)), (stddev(b)) -> Aggregate Output: stddev(a), stddev(b) - -> Seq Scan on public.smp_test + -> Seq Scan on uint_agg.smp_test Output: a, b (8 rows) @@ -511,7 +509,7 @@ explain(costs off, verbose) select stddev_pop(a), stddev_pop(b) from smp_test; Output: (stddev_pop(a)), (stddev_pop(b)) -> Aggregate Output: stddev_pop(a), stddev_pop(b) - -> Seq Scan on public.smp_test + -> Seq Scan on uint_agg.smp_test Output: a, b (8 rows) @@ -524,7 +522,7 @@ explain(costs off, verbose) select stddev_samp(a), stddev_samp(b) from smp_test; Output: (stddev_samp(a)), (stddev_samp(b)) -> Aggregate Output: stddev_samp(a), stddev_samp(b) - -> Seq Scan on public.smp_test + -> Seq Scan on uint_agg.smp_test Output: a, b (8 rows) @@ -537,7 +535,7 @@ explain(costs off, verbose) select sum(a), sum(b) from smp_test; Output: (sum(a)), (sum(b)) -> Aggregate Output: sum(a), sum(b) - -> Seq Scan on public.smp_test + -> Seq Scan on uint_agg.smp_test Output: a, b (8 rows) @@ -550,7 +548,7 @@ explain(costs off, verbose) select var_pop(a), var_pop(b) from smp_test; Output: (var_pop(a)), (var_pop(b)) -> Aggregate Output: var_pop(a), var_pop(b) - -> Seq Scan on public.smp_test + -> Seq Scan on uint_agg.smp_test Output: a, b (8 rows) @@ -563,7 +561,7 @@ explain(costs off, verbose) select var_samp(a), var_samp(b) from smp_test; Output: (var_samp(a)), (var_samp(b)) -> Aggregate Output: var_samp(a), var_samp(b) - -> Seq Scan on public.smp_test + -> Seq Scan on uint_agg.smp_test Output: a, b (8 rows) @@ -576,7 +574,7 @@ explain(costs off, verbose) select variance(a), variance(b) from smp_test; Output: (variance(a)), (variance(b)) -> Aggregate Output: variance(a), variance(b) - -> Seq Scan on public.smp_test + -> Seq Scan on uint_agg.smp_test Output: a, b (8 rows) @@ -585,7 +583,7 @@ explain(costs off, verbose) select listagg(a) within group(order by a) from smp_ -------------------------------------------------- Aggregate Output: listagg(a ) WITHIN GROUP ( ORDER BY a) - -> Seq Scan on public.smp_test + -> Seq Scan on uint_agg.smp_test Output: a, b (4 rows) @@ -594,9 +592,15 @@ explain(costs off, verbose) select listagg(a, ',') within group(order by a) from ------------------------------------------------------------- Aggregate Output: listagg(a, ','::text ) WITHIN GROUP ( ORDER BY a) - -> Seq Scan on public.smp_test + -> Seq Scan on uint_agg.smp_test Output: a, b (4 rows) -\c postgres -drop database uint_agg; +drop schema uint_agg cascade; +NOTICE: drop cascades to 5 other objects +DETAIL: drop cascades to table u1 +drop cascades to table u2 +drop cascades to table u4 +drop cascades to table u8 +drop cascades to table smp_test +reset current_schema; diff --git a/contrib/dolphin/expected/uint_and.out b/contrib/dolphin/expected/uint_and.out index 487796f5d8b11f0e11ab952c2ee2bbba0be49a31..04f5e9e3ae9c55bfc8edbc4f118f7ac9a39ed773 100644 --- a/contrib/dolphin/expected/uint_and.out +++ b/contrib/dolphin/expected/uint_and.out @@ -1,7 +1,5 @@ -drop database if exists uint_and; -NOTICE: database "uint_and" does not exist, skipping -create database uint_and dbcompatibility 'b'; -\c uint_and +create schema uint_and; +set current_schema to 'uint_and'; --uint8 select 18446744073709551615::uint8 & 0::int1; ?column? @@ -826,5 +824,5 @@ select 127::int1 & 1::uint8; 1 (1 row) -\c postgres -drop database uint_and +drop schema uint_and cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/uint_auto_increment.out b/contrib/dolphin/expected/uint_auto_increment.out index 71ae501b5f31ed4bf59378a9cf3bc5962bd4dc96..270d859981db1aa79e1bad41e89908da1fcbcb13 100644 --- a/contrib/dolphin/expected/uint_auto_increment.out +++ b/contrib/dolphin/expected/uint_auto_increment.out @@ -1,6 +1,5 @@ --- create b db -create database uint_auto_increment with dbcompatibility = 'B'; -\c uint_auto_increment +create schema uint_auto_increment; +set current_schema to 'uint_auto_increment'; -- test CREATE TABLE with AUTO_INCREMENT -- syntax error CREATE TABLE test_create_autoinc_err(id int unsigned auto_increment key, name varchar(200),a int unsigned); @@ -240,7 +239,7 @@ SELECT id, col FROM test_alter_autoinc ORDER BY 1, 2; SELECT pg_catalog.pg_get_tabledef('test_alter_autoinc'); pg_get_tabledef --------------------------------------------------------- - SET search_path = public; + + SET search_path = uint_auto_increment; + CREATE TABLE test_alter_autoinc ( + col uint4, + id uint4 AUTO_INCREMENT NOT NULL, + @@ -446,7 +445,7 @@ SELECT col FROM single_autoinc_pk ORDER BY 1; SELECT pg_catalog.pg_get_tabledef('single_autoinc_pk'); pg_get_tabledef --------------------------------------------------------- - SET search_path = public; + + SET search_path = uint_auto_increment; + CREATE TABLE single_autoinc_pk ( + col uint4 AUTO_INCREMENT NOT NULL, + CONSTRAINT single_autoinc_pk_pkey PRIMARY KEY (col)+ @@ -461,7 +460,7 @@ NOTICE: ALTER TABLE / ADD UNIQUE will create implicit index "test_alter_single_ SELECT pg_catalog.pg_get_tabledef('single_autoinc_pk'); pg_get_tabledef -------------------------------------------------------------------------------------------------------- - SET search_path = public; + + SET search_path = uint_auto_increment; + CREATE TABLE single_autoinc_pk ( + col uint4 AUTO_INCREMENT NOT NULL, + CONSTRAINT test_alter_single_autoinc_pk_u2 UNIQUE (col) + @@ -475,7 +474,7 @@ ALTER TABLE single_autoinc_pk DROP CONSTRAINT test_alter_single_autoinc_pk_u2; SELECT pg_catalog.pg_get_tabledef('single_autoinc_pk'); pg_get_tabledef ----------------------------------------------------------------------------------------------------- - SET search_path = public; + + SET search_path = uint_auto_increment; + CREATE TABLE single_autoinc_pk ( + col uint4 AUTO_INCREMENT NOT NULL, + CONSTRAINT test_alter_single_autoinc_pk_u1 UNIQUE (col) + @@ -614,7 +613,7 @@ SELECT col FROM single_autoinc_uk ORDER BY 1; SELECT pg_catalog.pg_get_tabledef('single_autoinc_uk'); pg_get_tabledef ------------------------------------------------------- - SET search_path = public; + + SET search_path = uint_auto_increment; + CREATE TABLE single_autoinc_uk ( + col uint4 AUTO_INCREMENT, + CONSTRAINT single_autoinc_uk_col_key UNIQUE (col)+ @@ -1607,7 +1606,7 @@ SELECT col FROM gtemp_single_autoinc ORDER BY 1; SELECT pg_catalog.pg_get_tabledef('gtemp_single_autoinc'); pg_get_tabledef ---------------------------------------------------------------------- - SET search_path = public; + + SET search_path = uint_auto_increment; + CREATE GLOBAL TEMPORARY TABLE gtemp_single_autoinc ( + col uint4 AUTO_INCREMENT NOT NULL, + CONSTRAINT gtemp_single_autoinc_pkey PRIMARY KEY (col) + @@ -1873,7 +1872,7 @@ SELECT col FROM unlog_single_autoinc ORDER BY 1; SELECT pg_catalog.pg_get_tabledef('unlog_single_autoinc'); pg_get_tabledef ------------------------------------------------------------ - SET search_path = public; + + SET search_path = uint_auto_increment; + CREATE UNLOGGED TABLE unlog_single_autoinc ( + col uint4 AUTO_INCREMENT NOT NULL, + CONSTRAINT unlog_single_autoinc_pkey PRIMARY KEY (col)+ @@ -2160,7 +2159,7 @@ SELECT col1 FROM test_part_autoinc_pk ORDER BY 1; SELECT pg_catalog.pg_get_tabledef('test_part_autoinc_pk'); pg_get_tabledef ----------------------------------------------------------------------------------- - SET search_path = public; + + SET search_path = uint_auto_increment; + CREATE TABLE test_part_autoinc_pk ( + col1 integer AUTO_INCREMENT NOT NULL, + col2 integer NOT NULL, + @@ -2475,7 +2474,7 @@ SELECT col1 FROM test_part_autoinc_unique ORDER BY 1; SELECT pg_catalog.pg_get_tabledef('test_part_autoinc_unique'); pg_get_tabledef -------------------------------------------------------------------------------------- - SET search_path = public; + + SET search_path = uint_auto_increment; + CREATE TABLE test_part_autoinc_unique ( + col1 integer AUTO_INCREMENT, + col2 integer NOT NULL, + @@ -3134,5 +3133,6 @@ SELECT col1,col2 FROM test_autoinc_insert_select ORDER BY 1; drop table test_autoinc_source; drop table test_autoinc_insert_select; -\c postgres -drop database if exists uint_auto_increment; +drop schema uint_auto_increment cascade; +NOTICE: drop cascades to table uint_auto_increment.test_create_autoinc_err +reset current_schema; diff --git a/contrib/dolphin/expected/uint_cast.out b/contrib/dolphin/expected/uint_cast.out index 2f77ac890116b2061b6a6c81caad857bd604f100..caeac0fcc868d441f33faf847672be2cfd564acb 100644 --- a/contrib/dolphin/expected/uint_cast.out +++ b/contrib/dolphin/expected/uint_cast.out @@ -1,7 +1,5 @@ -drop database if exists uint_cast; -NOTICE: database "uint_cast" does not exist, skipping -create database uint_cast dbcompatibility 'b'; -\c uint_cast +create schema uint_cast; +set current_schema to 'uint_cast'; select (-1)::bool::uint8; uint8 ------- @@ -1223,5 +1221,5 @@ select (1)::uint8::bool; t (1 row) -\c postgres -drop database uint_cast; +drop schema uint_cast cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/uint_cast2.out b/contrib/dolphin/expected/uint_cast2.out index 76beef4017bf5972b8701fe50ccf3f891df74d5d..1810ab11ad13398383a9955d55ec355b5de21514 100644 --- a/contrib/dolphin/expected/uint_cast2.out +++ b/contrib/dolphin/expected/uint_cast2.out @@ -1,7 +1,5 @@ -drop database if exists uint_cast2; -NOTICE: database "uint_cast2" does not exist, skipping -create database uint_cast2 dbcompatibility 'b'; -\c uint_cast2 +create schema uint_cast2; +set current_schema to 'uint_cast2'; drop table if exists t1 ; NOTICE: table "t1" does not exist, skipping create table t1(a uint8); @@ -451,5 +449,6 @@ insert into t1 values(255::uint1); insert into t1 values(256::uint1); ERROR: tinyint unsigned out of range CONTEXT: referenced column: a -\c postgres -drop database uint_cast2; +drop schema uint_cast2 cascade; +NOTICE: drop cascades to table t1 +reset current_schema; diff --git a/contrib/dolphin/expected/uint_div.out b/contrib/dolphin/expected/uint_div.out index aa6b0490273fc8ce65209db422fc60a3c4769033..a71da32abe479ea5c2532b66dc15785779192945 100644 --- a/contrib/dolphin/expected/uint_div.out +++ b/contrib/dolphin/expected/uint_div.out @@ -1,7 +1,5 @@ -drop database if exists uint_div; -NOTICE: database "uint_div" does not exist, skipping -create database uint_div dbcompatibility 'b'; -\c uint_div +create schema uint_div; +set current_schema to 'uint_div'; --uint8 select 18446744073709551615::uint8 / 0::int1; ?column? @@ -640,5 +638,5 @@ select 127::int1 / 1::uint8; 127 (1 row) -\c postgres -drop database uint_div +drop schema uint_div cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/uint_ignore.out b/contrib/dolphin/expected/uint_ignore.out index 1d072326634e302e626748101c5924b8c74d2641..f8c266744af395aed677d25d68f71167c8c07ecf 100644 --- a/contrib/dolphin/expected/uint_ignore.out +++ b/contrib/dolphin/expected/uint_ignore.out @@ -1,7 +1,5 @@ -drop database if exists uint_ignore; -NOTICE: database "uint_ignore" does not exist, skipping -create database uint_ignore dbcompatibility 'b'; -\c uint_ignore +create schema uint_ignore; +set current_schema to 'uint_ignore'; drop table if exists t1 ; NOTICE: table "t1" does not exist, skipping create table t1(a uint8); @@ -661,5 +659,6 @@ select * from t1; 255 (61 rows) -\c postgres -drop database uint_ignore; +drop schema uint_ignore cascade; +NOTICE: drop cascades to table t1 +reset current_schema; diff --git a/contrib/dolphin/expected/uint_in.out b/contrib/dolphin/expected/uint_in.out index 503f8cbbcea03f384690c765252428cbac4d2d56..f47b87c4e314871fdce982aa42decc573a622c30 100644 --- a/contrib/dolphin/expected/uint_in.out +++ b/contrib/dolphin/expected/uint_in.out @@ -1,7 +1,5 @@ -drop database if exists uint_in; -NOTICE: database "uint_in" does not exist, skipping -create database uint_in dbcompatibility 'b'; -\c uint_in +create schema uint_in; +set current_schema to 'uint_in'; create table t1(a uint1); create table t2(a uint2); create table t3(a uint4); @@ -62,5 +60,10 @@ select * from t4; 2 (2 rows) -\c postgres -drop database uint_in +drop schema uint_in cascade; +NOTICE: drop cascades to 4 other objects +DETAIL: drop cascades to table t1 +drop cascades to table t2 +drop cascades to table t3 +drop cascades to table t4 +reset current_schema; diff --git a/contrib/dolphin/expected/uint_index.out b/contrib/dolphin/expected/uint_index.out index b4fb77c697063f8ba33d619d0325f8b022c1e8d5..caf06afe6825f0fe258b677163ae3f8f706bcb77 100644 --- a/contrib/dolphin/expected/uint_index.out +++ b/contrib/dolphin/expected/uint_index.out @@ -1,7 +1,5 @@ -drop database if exists uint_index; -NOTICE: database "uint_index" does not exist, skipping -create database uint_index dbcompatibility 'b'; -\c uint_index +create schema uint_index; +set current_schema to 'uint_index'; create table t1(a uint1); insert into t1 select generate_series(1, 255); insert into t1 select generate_series(1, 255); @@ -18,7 +16,7 @@ analyze t1; explain(costs off, verbose)select * from t1 where a = 1::uint1; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: (t1.a = '1'::uint1) -> Bitmap Index Scan on idx1 @@ -28,7 +26,7 @@ explain(costs off, verbose)select * from t1 where a = 1::uint1; explain(costs off, verbose)select * from t1 where a = 1::uint2; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: (t1.a = '1'::uint2) -> Bitmap Index Scan on idx1 @@ -38,7 +36,7 @@ explain(costs off, verbose)select * from t1 where a = 1::uint2; explain(costs off, verbose)select * from t1 where a = 1::uint4; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: (t1.a = '1'::uint4) -> Bitmap Index Scan on idx1 @@ -48,7 +46,7 @@ explain(costs off, verbose)select * from t1 where a = 1::uint4; explain(costs off, verbose)select * from t1 where a = 1::uint8; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: (t1.a = '1'::uint8) -> Bitmap Index Scan on idx1 @@ -58,7 +56,7 @@ explain(costs off, verbose)select * from t1 where a = 1::uint8; explain(costs off, verbose)select * from t1 where a = 1::int1; QUERY PLAN ------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: (t1.a = '1'::tinyint) -> Bitmap Index Scan on idx1 @@ -68,7 +66,7 @@ explain(costs off, verbose)select * from t1 where a = 1::int1; explain(costs off, verbose)select * from t1 where a = 1::int2; QUERY PLAN ------------------------------------------ - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: (t1.a = 1::smallint) -> Bitmap Index Scan on idx1 @@ -76,9 +74,9 @@ explain(costs off, verbose)select * from t1 where a = 1::int2; (5 rows) explain(costs off, verbose)select * from t1 where a = 1::int4; - QUERY PLAN ---------------------------------- - Bitmap Heap Scan on public.t1 + QUERY PLAN +----------------------------------- + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: (t1.a = 1) -> Bitmap Index Scan on idx1 @@ -88,7 +86,7 @@ explain(costs off, verbose)select * from t1 where a = 1::int4; explain(costs off, verbose)select * from t1 where a = 1::int8; QUERY PLAN ---------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: (t1.a = 1::bigint) -> Bitmap Index Scan on idx1 @@ -99,7 +97,7 @@ explain(costs off, verbose)select * from t1 where a > 1::uint1 and a < 3::uint1; QUERY PLAN ------------------------------------------------------------- [Bypass] - Index Only Scan using idx1 on public.t1 + Index Only Scan using idx1 on uint_index.t1 Output: a Index Cond: ((t1.a > '1'::uint1) AND (t1.a < '3'::uint1)) (4 rows) @@ -107,7 +105,7 @@ explain(costs off, verbose)select * from t1 where a > 1::uint1 and a < 3::uint1; explain(costs off, verbose)select * from t1 where a > 1::uint2 and a < 3::uint2; QUERY PLAN ------------------------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: ((t1.a > '1'::uint2) AND (t1.a < '3'::uint2)) -> Bitmap Index Scan on idx1 @@ -117,7 +115,7 @@ explain(costs off, verbose)select * from t1 where a > 1::uint2 and a < 3::uint2; explain(costs off, verbose)select * from t1 where a > 1::uint4 and a < 3::uint4; QUERY PLAN ------------------------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: ((t1.a > '1'::uint4) AND (t1.a < '3'::uint4)) -> Bitmap Index Scan on idx1 @@ -127,7 +125,7 @@ explain(costs off, verbose)select * from t1 where a > 1::uint4 and a < 3::uint4; explain(costs off, verbose)select * from t1 where a > 1::uint8 and a < 3::uint8; QUERY PLAN ------------------------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: ((t1.a > '1'::uint8) AND (t1.a < '3'::uint8)) -> Bitmap Index Scan on idx1 @@ -137,7 +135,7 @@ explain(costs off, verbose)select * from t1 where a > 1::uint8 and a < 3::uint8; explain(costs off, verbose)select * from t1 where a > 1::int1 and a < 3::int1; QUERY PLAN ----------------------------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: ((t1.a > '1'::tinyint) AND (t1.a < '3'::tinyint)) -> Bitmap Index Scan on idx1 @@ -147,7 +145,7 @@ explain(costs off, verbose)select * from t1 where a > 1::int1 and a < 3::int1; explain(costs off, verbose)select * from t1 where a > 1::int2 and a < 3::int2; QUERY PLAN --------------------------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: ((t1.a > 1::smallint) AND (t1.a < 3::smallint)) -> Bitmap Index Scan on idx1 @@ -157,7 +155,7 @@ explain(costs off, verbose)select * from t1 where a > 1::int2 and a < 3::int2; explain(costs off, verbose)select * from t1 where a > 1::int4 and a < 3::int4; QUERY PLAN ------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: ((t1.a > 1) AND (t1.a < 3)) -> Bitmap Index Scan on idx1 @@ -167,7 +165,7 @@ explain(costs off, verbose)select * from t1 where a > 1::int4 and a < 3::int4; explain(costs off, verbose)select * from t1 where a > 1::int8 and a < 3::int8; QUERY PLAN ----------------------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: ((t1.a > 1::bigint) AND (t1.a < 3::bigint)) -> Bitmap Index Scan on idx1 @@ -177,7 +175,7 @@ explain(costs off, verbose)select * from t1 where a > 1::int8 and a < 3::int8; explain(costs off, verbose)select * from t1 where a >= 1::uint1 and a <= 3::uint1; QUERY PLAN --------------------------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: ((t1.a >= '1'::uint1) AND (t1.a <= '3'::uint1)) -> Bitmap Index Scan on idx1 @@ -187,7 +185,7 @@ explain(costs off, verbose)select * from t1 where a >= 1::uint1 and a <= 3::uint explain(costs off, verbose)select * from t1 where a >= 1::uint2 and a <= 3::uint2; QUERY PLAN --------------------------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: ((t1.a >= '1'::uint2) AND (t1.a <= '3'::uint2)) -> Bitmap Index Scan on idx1 @@ -197,7 +195,7 @@ explain(costs off, verbose)select * from t1 where a >= 1::uint2 and a <= 3::uint explain(costs off, verbose)select * from t1 where a >= 1::uint4 and a <= 3::uint4; QUERY PLAN --------------------------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: ((t1.a >= '1'::uint4) AND (t1.a <= '3'::uint4)) -> Bitmap Index Scan on idx1 @@ -207,7 +205,7 @@ explain(costs off, verbose)select * from t1 where a >= 1::uint4 and a <= 3::uint explain(costs off, verbose)select * from t1 where a >= 1::uint8 and a <= 3::uint8; QUERY PLAN --------------------------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: ((t1.a >= '1'::uint8) AND (t1.a <= '3'::uint8)) -> Bitmap Index Scan on idx1 @@ -217,7 +215,7 @@ explain(costs off, verbose)select * from t1 where a >= 1::uint8 and a <= 3::uint explain(costs off, verbose)select * from t1 where a >= 1::int1 and a <= 3::int1; QUERY PLAN ------------------------------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: ((t1.a >= '1'::tinyint) AND (t1.a <= '3'::tinyint)) -> Bitmap Index Scan on idx1 @@ -227,7 +225,7 @@ explain(costs off, verbose)select * from t1 where a >= 1::int1 and a <= 3::int1; explain(costs off, verbose)select * from t1 where a >= 1::int2 and a <= 3::int2; QUERY PLAN ----------------------------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: ((t1.a >= 1::smallint) AND (t1.a <= 3::smallint)) -> Bitmap Index Scan on idx1 @@ -237,7 +235,7 @@ explain(costs off, verbose)select * from t1 where a >= 1::int2 and a <= 3::int2; explain(costs off, verbose)select * from t1 where a >= 1::int4 and a <= 3::int4; QUERY PLAN --------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: ((t1.a >= 1) AND (t1.a <= 3)) -> Bitmap Index Scan on idx1 @@ -247,7 +245,7 @@ explain(costs off, verbose)select * from t1 where a >= 1::int4 and a <= 3::int4; explain(costs off, verbose)select * from t1 where a >= 1::int8 and a <= 3::int8; QUERY PLAN ------------------------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: ((t1.a >= 1::bigint) AND (t1.a <= 3::bigint)) -> Bitmap Index Scan on idx1 @@ -260,7 +258,7 @@ analyze t1; explain(costs off, verbose)select * from t1 where a = 1::uint1; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: (t1.a = '1'::uint1) -> Bitmap Index Scan on idx1 @@ -270,7 +268,7 @@ explain(costs off, verbose)select * from t1 where a = 1::uint1; explain(costs off, verbose)select * from t1 where a = 1::uint2; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: (t1.a = '1'::uint2) -> Bitmap Index Scan on idx1 @@ -280,7 +278,7 @@ explain(costs off, verbose)select * from t1 where a = 1::uint2; explain(costs off, verbose)select * from t1 where a = 1::uint4; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: (t1.a = '1'::uint4) -> Bitmap Index Scan on idx1 @@ -290,7 +288,7 @@ explain(costs off, verbose)select * from t1 where a = 1::uint4; explain(costs off, verbose)select * from t1 where a = 1::uint8; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: (t1.a = '1'::uint8) -> Bitmap Index Scan on idx1 @@ -300,7 +298,7 @@ explain(costs off, verbose)select * from t1 where a = 1::uint8; explain(costs off, verbose)select * from t1 where a = 1::int1; QUERY PLAN ------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: (t1.a = '1'::tinyint) -> Bitmap Index Scan on idx1 @@ -310,7 +308,7 @@ explain(costs off, verbose)select * from t1 where a = 1::int1; explain(costs off, verbose)select * from t1 where a = 1::int2; QUERY PLAN ------------------------------------------ - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: (t1.a = 1::smallint) -> Bitmap Index Scan on idx1 @@ -318,9 +316,9 @@ explain(costs off, verbose)select * from t1 where a = 1::int2; (5 rows) explain(costs off, verbose)select * from t1 where a = 1::int4; - QUERY PLAN ---------------------------------- - Bitmap Heap Scan on public.t1 + QUERY PLAN +----------------------------------- + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: (t1.a = 1) -> Bitmap Index Scan on idx1 @@ -330,7 +328,7 @@ explain(costs off, verbose)select * from t1 where a = 1::int4; explain(costs off, verbose)select * from t1 where a = 1::int8; QUERY PLAN ---------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: (t1.a = 1::bigint) -> Bitmap Index Scan on idx1 @@ -353,7 +351,7 @@ analyze t2; explain(costs off, verbose)select * from t2 where a = 1::uint1; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: (t2.a = '1'::uint8) -> Bitmap Index Scan on idx2 @@ -363,7 +361,7 @@ explain(costs off, verbose)select * from t2 where a = 1::uint1; explain(costs off, verbose)select * from t2 where a = 1::uint2; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: (t2.a = '1'::uint2) -> Bitmap Index Scan on idx2 @@ -373,7 +371,7 @@ explain(costs off, verbose)select * from t2 where a = 1::uint2; explain(costs off, verbose)select * from t2 where a = 1::uint4; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: (t2.a = '1'::uint4) -> Bitmap Index Scan on idx2 @@ -383,7 +381,7 @@ explain(costs off, verbose)select * from t2 where a = 1::uint4; explain(costs off, verbose)select * from t2 where a = 1::uint8; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: (t2.a = '1'::uint8) -> Bitmap Index Scan on idx2 @@ -393,7 +391,7 @@ explain(costs off, verbose)select * from t2 where a = 1::uint8; explain(costs off, verbose)select * from t2 where a = 1::int1; QUERY PLAN ---------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: (t2.a = 1::bigint) -> Bitmap Index Scan on idx2 @@ -403,7 +401,7 @@ explain(costs off, verbose)select * from t2 where a = 1::int1; explain(costs off, verbose)select * from t2 where a = 1::int2; QUERY PLAN ------------------------------------------ - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: (t2.a = 1::smallint) -> Bitmap Index Scan on idx2 @@ -411,9 +409,9 @@ explain(costs off, verbose)select * from t2 where a = 1::int2; (5 rows) explain(costs off, verbose)select * from t2 where a = 1::int4; - QUERY PLAN ---------------------------------- - Bitmap Heap Scan on public.t2 + QUERY PLAN +----------------------------------- + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: (t2.a = 1) -> Bitmap Index Scan on idx2 @@ -423,7 +421,7 @@ explain(costs off, verbose)select * from t2 where a = 1::int4; explain(costs off, verbose)select * from t2 where a = 1::int8; QUERY PLAN ---------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: (t2.a = 1::bigint) -> Bitmap Index Scan on idx2 @@ -433,7 +431,7 @@ explain(costs off, verbose)select * from t2 where a = 1::int8; explain(costs off, verbose)select * from t2 where a > 1::uint1 and a < 3::uint1; QUERY PLAN ------------------------------------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: ((t2.a > '1'::uint8) AND (t2.a < '3'::uint8)) -> Bitmap Index Scan on idx2 @@ -444,7 +442,7 @@ explain(costs off, verbose)select * from t2 where a > 1::uint2 and a < 3::uint2; QUERY PLAN ------------------------------------------------------------- [Bypass] - Index Only Scan using idx2 on public.t2 + Index Only Scan using idx2 on uint_index.t2 Output: a Index Cond: ((t2.a > '1'::uint2) AND (t2.a < '3'::uint2)) (4 rows) @@ -452,7 +450,7 @@ explain(costs off, verbose)select * from t2 where a > 1::uint2 and a < 3::uint2; explain(costs off, verbose)select * from t2 where a > 1::uint4 and a < 3::uint4; QUERY PLAN ------------------------------------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: ((t2.a > '1'::uint4) AND (t2.a < '3'::uint4)) -> Bitmap Index Scan on idx2 @@ -462,7 +460,7 @@ explain(costs off, verbose)select * from t2 where a > 1::uint4 and a < 3::uint4; explain(costs off, verbose)select * from t2 where a > 1::uint8 and a < 3::uint8; QUERY PLAN ------------------------------------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: ((t2.a > '1'::uint8) AND (t2.a < '3'::uint8)) -> Bitmap Index Scan on idx2 @@ -472,7 +470,7 @@ explain(costs off, verbose)select * from t2 where a > 1::uint8 and a < 3::uint8; explain(costs off, verbose)select * from t2 where a > 1::int1 and a < 3::int1; QUERY PLAN ----------------------------------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: ((t2.a > 1::bigint) AND (t2.a < 3::bigint)) -> Bitmap Index Scan on idx2 @@ -482,7 +480,7 @@ explain(costs off, verbose)select * from t2 where a > 1::int1 and a < 3::int1; explain(costs off, verbose)select * from t2 where a > 1::int2 and a < 3::int2; QUERY PLAN --------------------------------------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: ((t2.a > 1::smallint) AND (t2.a < 3::smallint)) -> Bitmap Index Scan on idx2 @@ -492,7 +490,7 @@ explain(costs off, verbose)select * from t2 where a > 1::int2 and a < 3::int2; explain(costs off, verbose)select * from t2 where a > 1::int4 and a < 3::int4; QUERY PLAN ------------------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: ((t2.a > 1) AND (t2.a < 3)) -> Bitmap Index Scan on idx2 @@ -502,7 +500,7 @@ explain(costs off, verbose)select * from t2 where a > 1::int4 and a < 3::int4; explain(costs off, verbose)select * from t2 where a > 1::int8 and a < 3::int8; QUERY PLAN ----------------------------------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: ((t2.a > 1::bigint) AND (t2.a < 3::bigint)) -> Bitmap Index Scan on idx2 @@ -512,7 +510,7 @@ explain(costs off, verbose)select * from t2 where a > 1::int8 and a < 3::int8; explain(costs off, verbose)select * from t2 where a >= 1::uint1 and a <= 3::uint1; QUERY PLAN --------------------------------------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: ((t2.a >= '1'::uint8) AND (t2.a <= '3'::uint8)) -> Bitmap Index Scan on idx2 @@ -522,7 +520,7 @@ explain(costs off, verbose)select * from t2 where a >= 1::uint1 and a <= 3::uint explain(costs off, verbose)select * from t2 where a >= 1::uint2 and a <= 3::uint2; QUERY PLAN --------------------------------------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: ((t2.a >= '1'::uint2) AND (t2.a <= '3'::uint2)) -> Bitmap Index Scan on idx2 @@ -532,7 +530,7 @@ explain(costs off, verbose)select * from t2 where a >= 1::uint2 and a <= 3::uint explain(costs off, verbose)select * from t2 where a >= 1::uint4 and a <= 3::uint4; QUERY PLAN --------------------------------------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: ((t2.a >= '1'::uint4) AND (t2.a <= '3'::uint4)) -> Bitmap Index Scan on idx2 @@ -542,7 +540,7 @@ explain(costs off, verbose)select * from t2 where a >= 1::uint4 and a <= 3::uint explain(costs off, verbose)select * from t2 where a >= 1::uint8 and a <= 3::uint8; QUERY PLAN --------------------------------------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: ((t2.a >= '1'::uint8) AND (t2.a <= '3'::uint8)) -> Bitmap Index Scan on idx2 @@ -552,7 +550,7 @@ explain(costs off, verbose)select * from t2 where a >= 1::uint8 and a <= 3::uint explain(costs off, verbose)select * from t2 where a >= 1::int1 and a <= 3::int1; QUERY PLAN ------------------------------------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: ((t2.a >= 1::bigint) AND (t2.a <= 3::bigint)) -> Bitmap Index Scan on idx2 @@ -562,7 +560,7 @@ explain(costs off, verbose)select * from t2 where a >= 1::int1 and a <= 3::int1; explain(costs off, verbose)select * from t2 where a >= 1::int2 and a <= 3::int2; QUERY PLAN ----------------------------------------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: ((t2.a >= 1::smallint) AND (t2.a <= 3::smallint)) -> Bitmap Index Scan on idx2 @@ -572,7 +570,7 @@ explain(costs off, verbose)select * from t2 where a >= 1::int2 and a <= 3::int2; explain(costs off, verbose)select * from t2 where a >= 1::int4 and a <= 3::int4; QUERY PLAN --------------------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: ((t2.a >= 1) AND (t2.a <= 3)) -> Bitmap Index Scan on idx2 @@ -582,7 +580,7 @@ explain(costs off, verbose)select * from t2 where a >= 1::int4 and a <= 3::int4; explain(costs off, verbose)select * from t2 where a >= 1::int8 and a <= 3::int8; QUERY PLAN ------------------------------------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: ((t2.a >= 1::bigint) AND (t2.a <= 3::bigint)) -> Bitmap Index Scan on idx2 @@ -595,7 +593,7 @@ analyze t2; explain(costs off, verbose)select * from t2 where a = 1::uint1; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: (t2.a = '1'::uint8) -> Bitmap Index Scan on idx2 @@ -605,7 +603,7 @@ explain(costs off, verbose)select * from t2 where a = 1::uint1; explain(costs off, verbose)select * from t2 where a = 1::uint2; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: (t2.a = '1'::uint2) -> Bitmap Index Scan on idx2 @@ -615,7 +613,7 @@ explain(costs off, verbose)select * from t2 where a = 1::uint2; explain(costs off, verbose)select * from t2 where a = 1::uint4; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: (t2.a = '1'::uint4) -> Bitmap Index Scan on idx2 @@ -625,7 +623,7 @@ explain(costs off, verbose)select * from t2 where a = 1::uint4; explain(costs off, verbose)select * from t2 where a = 1::uint8; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: (t2.a = '1'::uint8) -> Bitmap Index Scan on idx2 @@ -635,7 +633,7 @@ explain(costs off, verbose)select * from t2 where a = 1::uint8; explain(costs off, verbose)select * from t2 where a = 1::int1; QUERY PLAN ---------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: (t2.a = 1::bigint) -> Bitmap Index Scan on idx2 @@ -645,7 +643,7 @@ explain(costs off, verbose)select * from t2 where a = 1::int1; explain(costs off, verbose)select * from t2 where a = 1::int2; QUERY PLAN ------------------------------------------ - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: (t2.a = 1::smallint) -> Bitmap Index Scan on idx2 @@ -653,9 +651,9 @@ explain(costs off, verbose)select * from t2 where a = 1::int2; (5 rows) explain(costs off, verbose)select * from t2 where a = 1::int4; - QUERY PLAN ---------------------------------- - Bitmap Heap Scan on public.t2 + QUERY PLAN +----------------------------------- + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: (t2.a = 1) -> Bitmap Index Scan on idx2 @@ -665,7 +663,7 @@ explain(costs off, verbose)select * from t2 where a = 1::int4; explain(costs off, verbose)select * from t2 where a = 1::int8; QUERY PLAN ---------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: (t2.a = 1::bigint) -> Bitmap Index Scan on idx2 @@ -688,7 +686,7 @@ analyze t3; explain(costs off, verbose)select * from t3 where a = 1::uint1; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: (t3.a = '1'::uint8) -> Bitmap Index Scan on idx3 @@ -698,7 +696,7 @@ explain(costs off, verbose)select * from t3 where a = 1::uint1; explain(costs off, verbose)select * from t3 where a = 1::uint2; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: (t3.a = '1'::uint8) -> Bitmap Index Scan on idx3 @@ -708,7 +706,7 @@ explain(costs off, verbose)select * from t3 where a = 1::uint2; explain(costs off, verbose)select * from t3 where a = 1::uint4; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: (t3.a = '1'::uint4) -> Bitmap Index Scan on idx3 @@ -718,7 +716,7 @@ explain(costs off, verbose)select * from t3 where a = 1::uint4; explain(costs off, verbose)select * from t3 where a = 1::uint8; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: (t3.a = '1'::uint8) -> Bitmap Index Scan on idx3 @@ -728,7 +726,7 @@ explain(costs off, verbose)select * from t3 where a = 1::uint8; explain(costs off, verbose)select * from t3 where a = 1::int1; QUERY PLAN ---------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: (t3.a = 1::bigint) -> Bitmap Index Scan on idx3 @@ -738,7 +736,7 @@ explain(costs off, verbose)select * from t3 where a = 1::int1; explain(costs off, verbose)select * from t3 where a = 1::int2; QUERY PLAN ---------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: (t3.a = 1::bigint) -> Bitmap Index Scan on idx3 @@ -746,9 +744,9 @@ explain(costs off, verbose)select * from t3 where a = 1::int2; (5 rows) explain(costs off, verbose)select * from t3 where a = 1::int4; - QUERY PLAN ---------------------------------- - Bitmap Heap Scan on public.t3 + QUERY PLAN +----------------------------------- + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: (t3.a = 1) -> Bitmap Index Scan on idx3 @@ -758,7 +756,7 @@ explain(costs off, verbose)select * from t3 where a = 1::int4; explain(costs off, verbose)select * from t3 where a = 1::int8; QUERY PLAN ---------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: (t3.a = 1::bigint) -> Bitmap Index Scan on idx3 @@ -768,7 +766,7 @@ explain(costs off, verbose)select * from t3 where a = 1::int8; explain(costs off, verbose)select * from t3 where a > 1::uint1 and a < 3::uint1; QUERY PLAN ------------------------------------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: ((t3.a > '1'::uint8) AND (t3.a < '3'::uint8)) -> Bitmap Index Scan on idx3 @@ -778,7 +776,7 @@ explain(costs off, verbose)select * from t3 where a > 1::uint1 and a < 3::uint1; explain(costs off, verbose)select * from t3 where a > 1::uint2 and a < 3::uint2; QUERY PLAN ------------------------------------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: ((t3.a > '1'::uint8) AND (t3.a < '3'::uint8)) -> Bitmap Index Scan on idx3 @@ -789,7 +787,7 @@ explain(costs off, verbose)select * from t3 where a > 1::uint4 and a < 3::uint4; QUERY PLAN ------------------------------------------------------------- [Bypass] - Index Only Scan using idx3 on public.t3 + Index Only Scan using idx3 on uint_index.t3 Output: a Index Cond: ((t3.a > '1'::uint4) AND (t3.a < '3'::uint4)) (4 rows) @@ -797,7 +795,7 @@ explain(costs off, verbose)select * from t3 where a > 1::uint4 and a < 3::uint4; explain(costs off, verbose)select * from t3 where a > 1::uint8 and a < 3::uint8; QUERY PLAN ------------------------------------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: ((t3.a > '1'::uint8) AND (t3.a < '3'::uint8)) -> Bitmap Index Scan on idx3 @@ -807,7 +805,7 @@ explain(costs off, verbose)select * from t3 where a > 1::uint8 and a < 3::uint8; explain(costs off, verbose)select * from t3 where a > 1::int1 and a < 3::int1; QUERY PLAN ----------------------------------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: ((t3.a > 1::bigint) AND (t3.a < 3::bigint)) -> Bitmap Index Scan on idx3 @@ -817,7 +815,7 @@ explain(costs off, verbose)select * from t3 where a > 1::int1 and a < 3::int1; explain(costs off, verbose)select * from t3 where a > 1::int2 and a < 3::int2; QUERY PLAN ----------------------------------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: ((t3.a > 1::bigint) AND (t3.a < 3::bigint)) -> Bitmap Index Scan on idx3 @@ -827,7 +825,7 @@ explain(costs off, verbose)select * from t3 where a > 1::int2 and a < 3::int2; explain(costs off, verbose)select * from t3 where a > 1::int4 and a < 3::int4; QUERY PLAN ------------------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: ((t3.a > 1) AND (t3.a < 3)) -> Bitmap Index Scan on idx3 @@ -837,7 +835,7 @@ explain(costs off, verbose)select * from t3 where a > 1::int4 and a < 3::int4; explain(costs off, verbose)select * from t3 where a > 1::int8 and a < 3::int8; QUERY PLAN ----------------------------------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: ((t3.a > 1::bigint) AND (t3.a < 3::bigint)) -> Bitmap Index Scan on idx3 @@ -847,7 +845,7 @@ explain(costs off, verbose)select * from t3 where a > 1::int8 and a < 3::int8; explain(costs off, verbose)select * from t3 where a >= 1::uint1 and a <= 3::uint1; QUERY PLAN --------------------------------------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: ((t3.a >= '1'::uint8) AND (t3.a <= '3'::uint8)) -> Bitmap Index Scan on idx3 @@ -857,7 +855,7 @@ explain(costs off, verbose)select * from t3 where a >= 1::uint1 and a <= 3::uint explain(costs off, verbose)select * from t3 where a >= 1::uint2 and a <= 3::uint2; QUERY PLAN --------------------------------------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: ((t3.a >= '1'::uint8) AND (t3.a <= '3'::uint8)) -> Bitmap Index Scan on idx3 @@ -867,7 +865,7 @@ explain(costs off, verbose)select * from t3 where a >= 1::uint2 and a <= 3::uint explain(costs off, verbose)select * from t3 where a >= 1::uint4 and a <= 3::uint4; QUERY PLAN --------------------------------------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: ((t3.a >= '1'::uint4) AND (t3.a <= '3'::uint4)) -> Bitmap Index Scan on idx3 @@ -877,7 +875,7 @@ explain(costs off, verbose)select * from t3 where a >= 1::uint4 and a <= 3::uint explain(costs off, verbose)select * from t3 where a >= 1::uint8 and a <= 3::uint8; QUERY PLAN --------------------------------------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: ((t3.a >= '1'::uint8) AND (t3.a <= '3'::uint8)) -> Bitmap Index Scan on idx3 @@ -887,7 +885,7 @@ explain(costs off, verbose)select * from t3 where a >= 1::uint8 and a <= 3::uint explain(costs off, verbose)select * from t3 where a >= 1::int1 and a <= 3::int1; QUERY PLAN ------------------------------------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: ((t3.a >= 1::bigint) AND (t3.a <= 3::bigint)) -> Bitmap Index Scan on idx3 @@ -897,7 +895,7 @@ explain(costs off, verbose)select * from t3 where a >= 1::int1 and a <= 3::int1; explain(costs off, verbose)select * from t3 where a >= 1::int2 and a <= 3::int2; QUERY PLAN ------------------------------------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: ((t3.a >= 1::bigint) AND (t3.a <= 3::bigint)) -> Bitmap Index Scan on idx3 @@ -907,7 +905,7 @@ explain(costs off, verbose)select * from t3 where a >= 1::int2 and a <= 3::int2; explain(costs off, verbose)select * from t3 where a >= 1::int4 and a <= 3::int4; QUERY PLAN --------------------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: ((t3.a >= 1) AND (t3.a <= 3)) -> Bitmap Index Scan on idx3 @@ -917,7 +915,7 @@ explain(costs off, verbose)select * from t3 where a >= 1::int4 and a <= 3::int4; explain(costs off, verbose)select * from t3 where a >= 1::int8 and a <= 3::int8; QUERY PLAN ------------------------------------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: ((t3.a >= 1::bigint) AND (t3.a <= 3::bigint)) -> Bitmap Index Scan on idx3 @@ -930,7 +928,7 @@ analyze t3; explain(costs off, verbose)select * from t3 where a = 1::uint1; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: (t3.a = '1'::uint8) -> Bitmap Index Scan on idx3 @@ -940,7 +938,7 @@ explain(costs off, verbose)select * from t3 where a = 1::uint1; explain(costs off, verbose)select * from t3 where a = 1::uint2; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: (t3.a = '1'::uint8) -> Bitmap Index Scan on idx3 @@ -950,7 +948,7 @@ explain(costs off, verbose)select * from t3 where a = 1::uint2; explain(costs off, verbose)select * from t3 where a = 1::uint4; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: (t3.a = '1'::uint4) -> Bitmap Index Scan on idx3 @@ -960,7 +958,7 @@ explain(costs off, verbose)select * from t3 where a = 1::uint4; explain(costs off, verbose)select * from t3 where a = 1::uint8; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: (t3.a = '1'::uint8) -> Bitmap Index Scan on idx3 @@ -970,7 +968,7 @@ explain(costs off, verbose)select * from t3 where a = 1::uint8; explain(costs off, verbose)select * from t3 where a = 1::int1; QUERY PLAN ---------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: (t3.a = 1::bigint) -> Bitmap Index Scan on idx3 @@ -980,7 +978,7 @@ explain(costs off, verbose)select * from t3 where a = 1::int1; explain(costs off, verbose)select * from t3 where a = 1::int2; QUERY PLAN ---------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: (t3.a = 1::bigint) -> Bitmap Index Scan on idx3 @@ -988,9 +986,9 @@ explain(costs off, verbose)select * from t3 where a = 1::int2; (5 rows) explain(costs off, verbose)select * from t3 where a = 1::int4; - QUERY PLAN ---------------------------------- - Bitmap Heap Scan on public.t3 + QUERY PLAN +----------------------------------- + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: (t3.a = 1) -> Bitmap Index Scan on idx3 @@ -1000,7 +998,7 @@ explain(costs off, verbose)select * from t3 where a = 1::int4; explain(costs off, verbose)select * from t3 where a = 1::int8; QUERY PLAN ---------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: (t3.a = 1::bigint) -> Bitmap Index Scan on idx3 @@ -1023,7 +1021,7 @@ analyze t4; explain(costs off, verbose)select * from t4 where a = 1::uint1; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: (t4.a = '1'::uint8) -> Bitmap Index Scan on idx4 @@ -1033,7 +1031,7 @@ explain(costs off, verbose)select * from t4 where a = 1::uint1; explain(costs off, verbose)select * from t4 where a = 1::uint2; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: (t4.a = '1'::uint8) -> Bitmap Index Scan on idx4 @@ -1043,7 +1041,7 @@ explain(costs off, verbose)select * from t4 where a = 1::uint2; explain(costs off, verbose)select * from t4 where a = 1::uint4; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: (t4.a = '1'::uint8) -> Bitmap Index Scan on idx4 @@ -1053,7 +1051,7 @@ explain(costs off, verbose)select * from t4 where a = 1::uint4; explain(costs off, verbose)select * from t4 where a = 1::uint8; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: (t4.a = '1'::uint8) -> Bitmap Index Scan on idx4 @@ -1063,7 +1061,7 @@ explain(costs off, verbose)select * from t4 where a = 1::uint8; explain(costs off, verbose)select * from t4 where a = 1::int1; QUERY PLAN ---------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: (t4.a = 1::bigint) -> Bitmap Index Scan on idx4 @@ -1073,7 +1071,7 @@ explain(costs off, verbose)select * from t4 where a = 1::int1; explain(costs off, verbose)select * from t4 where a = 1::int2; QUERY PLAN ---------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: (t4.a = 1::bigint) -> Bitmap Index Scan on idx4 @@ -1083,7 +1081,7 @@ explain(costs off, verbose)select * from t4 where a = 1::int2; explain(costs off, verbose)select * from t4 where a = 1::int4; QUERY PLAN ---------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: (t4.a = 1::bigint) -> Bitmap Index Scan on idx4 @@ -1093,7 +1091,7 @@ explain(costs off, verbose)select * from t4 where a = 1::int4; explain(costs off, verbose)select * from t4 where a = 1::int8; QUERY PLAN ---------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: (t4.a = 1::bigint) -> Bitmap Index Scan on idx4 @@ -1104,7 +1102,7 @@ explain(costs off, verbose)select * from t4 where a > 1::uint1 and a < 3::uint1; QUERY PLAN ------------------------------------------------------------- [Bypass] - Index Only Scan using idx4 on public.t4 + Index Only Scan using idx4 on uint_index.t4 Output: a Index Cond: ((t4.a > '1'::uint8) AND (t4.a < '3'::uint8)) (4 rows) @@ -1113,7 +1111,7 @@ explain(costs off, verbose)select * from t4 where a > 1::uint2 and a < 3::uint2; QUERY PLAN ------------------------------------------------------------- [Bypass] - Index Only Scan using idx4 on public.t4 + Index Only Scan using idx4 on uint_index.t4 Output: a Index Cond: ((t4.a > '1'::uint8) AND (t4.a < '3'::uint8)) (4 rows) @@ -1122,7 +1120,7 @@ explain(costs off, verbose)select * from t4 where a > 1::uint4 and a < 3::uint4; QUERY PLAN ------------------------------------------------------------- [Bypass] - Index Only Scan using idx4 on public.t4 + Index Only Scan using idx4 on uint_index.t4 Output: a Index Cond: ((t4.a > '1'::uint8) AND (t4.a < '3'::uint8)) (4 rows) @@ -1131,7 +1129,7 @@ explain(costs off, verbose)select * from t4 where a > 1::uint8 and a < 3::uint8; QUERY PLAN ------------------------------------------------------------- [Bypass] - Index Only Scan using idx4 on public.t4 + Index Only Scan using idx4 on uint_index.t4 Output: a Index Cond: ((t4.a > '1'::uint8) AND (t4.a < '3'::uint8)) (4 rows) @@ -1139,7 +1137,7 @@ explain(costs off, verbose)select * from t4 where a > 1::uint8 and a < 3::uint8; explain(costs off, verbose)select * from t4 where a > 1::int1 and a < 3::int1; QUERY PLAN ----------------------------------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: ((t4.a > 1::bigint) AND (t4.a < 3::bigint)) -> Bitmap Index Scan on idx4 @@ -1149,7 +1147,7 @@ explain(costs off, verbose)select * from t4 where a > 1::int1 and a < 3::int1; explain(costs off, verbose)select * from t4 where a > 1::int2 and a < 3::int2; QUERY PLAN ----------------------------------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: ((t4.a > 1::bigint) AND (t4.a < 3::bigint)) -> Bitmap Index Scan on idx4 @@ -1159,7 +1157,7 @@ explain(costs off, verbose)select * from t4 where a > 1::int2 and a < 3::int2; explain(costs off, verbose)select * from t4 where a > 1::int4 and a < 3::int4; QUERY PLAN ----------------------------------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: ((t4.a > 1::bigint) AND (t4.a < 3::bigint)) -> Bitmap Index Scan on idx4 @@ -1169,7 +1167,7 @@ explain(costs off, verbose)select * from t4 where a > 1::int4 and a < 3::int4; explain(costs off, verbose)select * from t4 where a > 1::int8 and a < 3::int8; QUERY PLAN ----------------------------------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: ((t4.a > 1::bigint) AND (t4.a < 3::bigint)) -> Bitmap Index Scan on idx4 @@ -1179,7 +1177,7 @@ explain(costs off, verbose)select * from t4 where a > 1::int8 and a < 3::int8; explain(costs off, verbose)select * from t4 where a >= 1::uint1 and a <= 3::uint1; QUERY PLAN --------------------------------------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: ((t4.a >= '1'::uint8) AND (t4.a <= '3'::uint8)) -> Bitmap Index Scan on idx4 @@ -1189,7 +1187,7 @@ explain(costs off, verbose)select * from t4 where a >= 1::uint1 and a <= 3::uint explain(costs off, verbose)select * from t4 where a >= 1::uint2 and a <= 3::uint2; QUERY PLAN --------------------------------------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: ((t4.a >= '1'::uint8) AND (t4.a <= '3'::uint8)) -> Bitmap Index Scan on idx4 @@ -1199,7 +1197,7 @@ explain(costs off, verbose)select * from t4 where a >= 1::uint2 and a <= 3::uint explain(costs off, verbose)select * from t4 where a >= 1::uint4 and a <= 3::uint4; QUERY PLAN --------------------------------------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: ((t4.a >= '1'::uint8) AND (t4.a <= '3'::uint8)) -> Bitmap Index Scan on idx4 @@ -1209,7 +1207,7 @@ explain(costs off, verbose)select * from t4 where a >= 1::uint4 and a <= 3::uint explain(costs off, verbose)select * from t4 where a >= 1::uint8 and a <= 3::uint8; QUERY PLAN --------------------------------------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: ((t4.a >= '1'::uint8) AND (t4.a <= '3'::uint8)) -> Bitmap Index Scan on idx4 @@ -1219,7 +1217,7 @@ explain(costs off, verbose)select * from t4 where a >= 1::uint8 and a <= 3::uint explain(costs off, verbose)select * from t4 where a >= 1::int1 and a <= 3::int1; QUERY PLAN ------------------------------------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: ((t4.a >= 1::bigint) AND (t4.a <= 3::bigint)) -> Bitmap Index Scan on idx4 @@ -1229,7 +1227,7 @@ explain(costs off, verbose)select * from t4 where a >= 1::int1 and a <= 3::int1; explain(costs off, verbose)select * from t4 where a >= 1::int2 and a <= 3::int2; QUERY PLAN ------------------------------------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: ((t4.a >= 1::bigint) AND (t4.a <= 3::bigint)) -> Bitmap Index Scan on idx4 @@ -1239,7 +1237,7 @@ explain(costs off, verbose)select * from t4 where a >= 1::int2 and a <= 3::int2; explain(costs off, verbose)select * from t4 where a >= 1::int4 and a <= 3::int4; QUERY PLAN ------------------------------------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: ((t4.a >= 1::bigint) AND (t4.a <= 3::bigint)) -> Bitmap Index Scan on idx4 @@ -1249,7 +1247,7 @@ explain(costs off, verbose)select * from t4 where a >= 1::int4 and a <= 3::int4; explain(costs off, verbose)select * from t4 where a >= 1::int8 and a <= 3::int8; QUERY PLAN ------------------------------------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: ((t4.a >= 1::bigint) AND (t4.a <= 3::bigint)) -> Bitmap Index Scan on idx4 @@ -1262,7 +1260,7 @@ analyze t4; explain(costs off, verbose)select * from t4 where a = 1::uint1; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: (t4.a = '1'::uint8) -> Bitmap Index Scan on idx4 @@ -1272,7 +1270,7 @@ explain(costs off, verbose)select * from t4 where a = 1::uint1; explain(costs off, verbose)select * from t4 where a = 1::uint2; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: (t4.a = '1'::uint8) -> Bitmap Index Scan on idx4 @@ -1282,7 +1280,7 @@ explain(costs off, verbose)select * from t4 where a = 1::uint2; explain(costs off, verbose)select * from t4 where a = 1::uint4; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: (t4.a = '1'::uint8) -> Bitmap Index Scan on idx4 @@ -1292,7 +1290,7 @@ explain(costs off, verbose)select * from t4 where a = 1::uint4; explain(costs off, verbose)select * from t4 where a = 1::uint8; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: (t4.a = '1'::uint8) -> Bitmap Index Scan on idx4 @@ -1302,7 +1300,7 @@ explain(costs off, verbose)select * from t4 where a = 1::uint8; explain(costs off, verbose)select * from t4 where a = 1::int1; QUERY PLAN ---------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: (t4.a = 1::bigint) -> Bitmap Index Scan on idx4 @@ -1312,7 +1310,7 @@ explain(costs off, verbose)select * from t4 where a = 1::int1; explain(costs off, verbose)select * from t4 where a = 1::int2; QUERY PLAN ---------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: (t4.a = 1::bigint) -> Bitmap Index Scan on idx4 @@ -1322,7 +1320,7 @@ explain(costs off, verbose)select * from t4 where a = 1::int2; explain(costs off, verbose)select * from t4 where a = 1::int4; QUERY PLAN ---------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: (t4.a = 1::bigint) -> Bitmap Index Scan on idx4 @@ -1332,12 +1330,17 @@ explain(costs off, verbose)select * from t4 where a = 1::int4; explain(costs off, verbose)select * from t4 where a = 1::int8; QUERY PLAN ---------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: (t4.a = 1::bigint) -> Bitmap Index Scan on idx4 Index Cond: (t4.a = 1::bigint) (5 rows) -\c postgres -drop database uint_index; +drop schema uint_index cascade; +NOTICE: drop cascades to 4 other objects +DETAIL: drop cascades to table t1 +drop cascades to table t2 +drop cascades to table t3 +drop cascades to table t4 +reset current_schema; diff --git a/contrib/dolphin/expected/uint_join.out b/contrib/dolphin/expected/uint_join.out index 62691e8e17d421042c8a358a1d2774d2ddf12414..961986b9ba59c179142cc29eb6841b2e7a33dd39 100644 --- a/contrib/dolphin/expected/uint_join.out +++ b/contrib/dolphin/expected/uint_join.out @@ -1,7 +1,5 @@ -drop database if exists uint_join; -NOTICE: database "uint_join" does not exist, skipping -create database uint_join dbcompatibility 'b'; -\c uint_join +create schema uint_join; +set current_schema to 'uint_join'; create table t1(a int2, b uint2); create table t2(a uint4, b uint4); insert into t1 values(1, 1); @@ -103,5 +101,8 @@ select /*+ mergejoin(t1 t2)*/ * from t1 join t2; -1 | 1 | 3 | 1 (9 rows) -\c postgres -drop database uint_join; +drop schema uint_join cascade; +NOTICE: drop cascades to 2 other objects +DETAIL: drop cascades to table t1 +drop cascades to table t2 +reset current_schema; diff --git a/contrib/dolphin/expected/uint_mi.out b/contrib/dolphin/expected/uint_mi.out index 0fa0b418a1240556622a7262c94c50cb1ab3b157..5666918d188d3ebe07ca87b5cd58468c38ce0734 100644 --- a/contrib/dolphin/expected/uint_mi.out +++ b/contrib/dolphin/expected/uint_mi.out @@ -1,7 +1,5 @@ -drop database if exists uint_mi; -NOTICE: database "uint_mi" does not exist, skipping -create database uint_mi dbcompatibility 'b'; -\c uint_mi +create schema uint_mi; +set current_schema to 'uint_mi'; --uint8 select 18446744073709551615::uint8 - 0::int1; ?column? @@ -772,5 +770,5 @@ select 0::int1 - 1::uint4; ERROR: int unsigned out of range select 0::int1 - 1::uint8; ERROR: bigint unsigned out of range -\c postgres -drop database uint_mi +drop schema uint_mi cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/uint_mod.out b/contrib/dolphin/expected/uint_mod.out index 2a4b86aa75417710998678a2da05206df387949c..6d09e47c7f409a3af1262594dc7cb54b3c80a2c4 100644 --- a/contrib/dolphin/expected/uint_mod.out +++ b/contrib/dolphin/expected/uint_mod.out @@ -1,7 +1,5 @@ -drop database if exists uint_mod; -NOTICE: database "uint_mod" does not exist, skipping -create database uint_mod dbcompatibility 'b'; -\c uint_mod +create schema uint_mod; +set current_schema to 'uint_mod'; --uint8 select 18446744073709551615::uint8 % 0::int1; ?column? @@ -1000,5 +998,5 @@ select 127::int1 % 1::uint8; 0 (1 row) -\c postgres -drop database uint_mod +drop schema uint_mod cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/uint_mod2.out b/contrib/dolphin/expected/uint_mod2.out index 8adac953d9c3399fe841d4038f40abbb4d6a3492..9d137cc3ae4cd7a918ecff4c8c75dcd34107e884 100644 --- a/contrib/dolphin/expected/uint_mod2.out +++ b/contrib/dolphin/expected/uint_mod2.out @@ -1,7 +1,5 @@ -drop database if exists uint_mod2; -NOTICE: database "uint_mod2" does not exist, skipping -create database uint_mod2 dbcompatibility 'b'; -\c uint_mod2 +create schema uint_mod2; +set current_schema to 'uint_mod2'; --uint8 select 18446744073709551615::uint8 mod 0::int1; b_mod @@ -1000,5 +998,5 @@ select 127::int1 mod 1::uint8; 0 (1 row) -\c postgres -drop database uint_mod2 +drop schema uint_mod2 cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/uint_mul.out b/contrib/dolphin/expected/uint_mul.out index 636c606ce515d265cdf9874d0c750ee44365b437..0024bc6ebdb3d23187b44da4562792b59d85151e 100644 --- a/contrib/dolphin/expected/uint_mul.out +++ b/contrib/dolphin/expected/uint_mul.out @@ -1,7 +1,5 @@ -drop database if exists uint_mul; -NOTICE: database "uint_mul" does not exist, skipping -create database uint_mul dbcompatibility 'b'; -\c uint_mul +create schema uint_mul; +set current_schema to 'uint_mul'; --uint8 select 18446744073709551615::uint8 * 0::int1; ?column? @@ -476,5 +474,5 @@ select 127::int1 * 1::uint8; 127 (1 row) -\c postgres -drop database uint_mul +drop schema uint_mul cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/uint_numeric.out b/contrib/dolphin/expected/uint_numeric.out index de5f8b9cb52c1f82ebfbcb9006582c2e316f70b6..14faf565e3c54d7dc8b200d3752b1802451b856c 100644 --- a/contrib/dolphin/expected/uint_numeric.out +++ b/contrib/dolphin/expected/uint_numeric.out @@ -1,7 +1,5 @@ -drop database if exists uint_numeric; -NOTICE: database "uint_numeric" does not exist, skipping -create database uint_numeric dbcompatibility 'b'; -\c uint_numeric +create schema uint_numeric; +set current_schema to 'uint_numeric'; select (-1)::numeric::uint1; uint1 ------- @@ -323,5 +321,11 @@ CONTEXT: referenced column: a insert into t4 select c from num; ERROR: bigint unsigned out of range CONTEXT: referenced column: a -\c postgres -drop database uint_numeric; +drop schema uint_numeric cascade; +NOTICE: drop cascades to 5 other objects +DETAIL: drop cascades to table t1 +drop cascades to table t2 +drop cascades to table t3 +drop cascades to table t4 +drop cascades to table num +reset current_schema; diff --git a/contrib/dolphin/expected/uint_operator.out b/contrib/dolphin/expected/uint_operator.out index b10f4e8b1c2279628d975f49ce9cfd01c0ed6191..cd4b59a888dcbd5303bec018cdb70db2c20b8359 100644 --- a/contrib/dolphin/expected/uint_operator.out +++ b/contrib/dolphin/expected/uint_operator.out @@ -1,7 +1,5 @@ -drop database if exists uint_operator; -NOTICE: database "uint_operator" does not exist, skipping -create database uint_operator dbcompatibility 'b'; -\c uint_operator +create schema uint_operator; +set current_schema to 'uint_operator'; -- > select 1::uint1 > 1::uint1; ?column? @@ -1622,5 +1620,5 @@ select ~0::uint8; 18446744073709551615 (1 row) -\c postgres -drop database uint_operator; +drop schema uint_operator cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/uint_or.out b/contrib/dolphin/expected/uint_or.out index 0951e4fef0743ff03945428d50761acc91ecb209..ef6ff85e7ee3108f8b95578329b7fe553a48bade 100644 --- a/contrib/dolphin/expected/uint_or.out +++ b/contrib/dolphin/expected/uint_or.out @@ -1,7 +1,5 @@ -drop database if exists uint_or; -NOTICE: database "uint_or" does not exist, skipping -create database uint_or dbcompatibility 'b'; -\c uint_or +create schema uint_or; +set current_schema to 'uint_or'; --uint8 select 18446744073709551615::uint8 | 0::int1; ?column? @@ -826,5 +824,5 @@ select 127::int1 | 1::uint8; 127 (1 row) -\c postgres -drop database uint_or +drop schema uint_or cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/uint_partition.out b/contrib/dolphin/expected/uint_partition.out index 268403779781c64e7f5f5c047481978e8a1ea9c1..32c12030ee2538ff1c9bc8ae4226cf4d97640de4 100644 --- a/contrib/dolphin/expected/uint_partition.out +++ b/contrib/dolphin/expected/uint_partition.out @@ -1,7 +1,5 @@ -drop database if exists uint_partition; -NOTICE: database "uint_partition" does not exist, skipping -create database uint_partition dbcompatibility 'b'; -\c uint_partition +create schema uint_partition; +set current_schema to 'uint_partition'; CREATE TABLE t1 ( col1 uint4 NOT NULL, @@ -255,5 +253,18 @@ insert into t_unsigned_0030_7 values(1); create table t_unsigned_0030_8(col01 bigint unsigned) partition by range(col01)(partition p start(1) end(255) every(50)); insert into t_unsigned_0030_8 values(1); -\c postgres -drop database uint_partition; +drop schema uint_partition cascade; +NOTICE: drop cascades to 12 other objects +DETAIL: drop cascades to table t1 +drop cascades to table t2 +drop cascades to table t3 +drop cascades to table a1 +drop cascades to table a2 +drop cascades to table subpartition_01 +drop cascades to table subpartition_02 +drop cascades to table subpartition_03 +drop cascades to table t_unsigned_0030_5 +drop cascades to table t_unsigned_0030_6 +drop cascades to table t_unsigned_0030_7 +drop cascades to table t_unsigned_0030_8 +reset current_schema; diff --git a/contrib/dolphin/expected/uint_pl.out b/contrib/dolphin/expected/uint_pl.out index 8be8bfe6cd14ce2a0d9a2b6ee4ab9aa5c62df2bc..d6943bf9a8f3354d63ece4636ed097de80054bcc 100644 --- a/contrib/dolphin/expected/uint_pl.out +++ b/contrib/dolphin/expected/uint_pl.out @@ -1,7 +1,5 @@ -drop database if exists uint_add; -NOTICE: database "uint_add" does not exist, skipping -create database uint_add dbcompatibility 'b'; -\c uint_add +create schema uint_add; +set current_schema to 'uint_add'; --uint8 select 18446744073709551615::uint8 + 0::int1; ?column? @@ -956,5 +954,5 @@ select 127::int1 + 4294967295::uint4; ERROR: int unsigned out of range select 127::int1 + 18446744073709551615::uint8; ERROR: bigint unsigned out of range -\c postgres -drop database uint_add +drop schema uint_add cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/uint_procedure_col_bypass.out b/contrib/dolphin/expected/uint_procedure_col_bypass.out index 22e73d6c8aab1f84627b8e9cae824d034e76b482..034baab418c25cacbfc6039062be4a7db70eb1dd 100644 --- a/contrib/dolphin/expected/uint_procedure_col_bypass.out +++ b/contrib/dolphin/expected/uint_procedure_col_bypass.out @@ -1,7 +1,5 @@ -drop database if exists uint_procedure_col; -NOTICE: database "uint_procedure_col" does not exist, skipping -create database uint_procedure_col dbcompatibility 'b'; -\c uint_procedure_col +create schema uint_procedure_col; +set current_schema to 'uint_procedure_col'; create procedure test_p1(uint2, uint4) SHIPPABLE VOLATILE as @@ -46,36 +44,40 @@ explain(costs off, verbose) insert into bypass values(1, 10); QUERY PLAN ----------------------------------------- [Bypass] - Insert on public.bypass + Insert on uint_procedure_col.bypass -> Result Output: '1'::uint2, '10'::uint4 (4 rows) explain(costs off, verbose) select b from bypass where a = 1; - QUERY PLAN ---------------------------- - Seq Scan on public.bypass + QUERY PLAN +--------------------------------------- + Seq Scan on uint_procedure_col.bypass Output: b Filter: (bypass.a = 1) (3 rows) explain(costs off, verbose) delete from bypass where b = 10; - QUERY PLAN ---------------------------------- - Delete on public.bypass - -> Seq Scan on public.bypass + QUERY PLAN +--------------------------------------------- + Delete on uint_procedure_col.bypass + -> Seq Scan on uint_procedure_col.bypass Output: ctid Filter: (bypass.b = 10) (4 rows) explain(costs off, verbose) update bypass set b = b + 1 where a = 1; - QUERY PLAN ----------------------------------- - Update on public.bypass - -> Seq Scan on public.bypass + QUERY PLAN +--------------------------------------------- + Update on uint_procedure_col.bypass + -> Seq Scan on uint_procedure_col.bypass Output: a, (b + 1), ctid Filter: (bypass.a = 1) (4 rows) -\c postgres -drop database uint_procedure_col; +drop schema uint_procedure_col cascade; +NOTICE: drop cascades to 3 other objects +DETAIL: drop cascades to function test_p1(uint2,uint4) +drop cascades to table test1 +drop cascades to table bypass +reset current_schema; diff --git a/contrib/dolphin/expected/uint_smp.out b/contrib/dolphin/expected/uint_smp.out index 0c6bf9787e340ec43ac6d677e71cd24d27080f37..39470ac7162120df44ff84d334faf8c5f1f66291 100644 --- a/contrib/dolphin/expected/uint_smp.out +++ b/contrib/dolphin/expected/uint_smp.out @@ -1,7 +1,5 @@ -drop database if exists uint_smp; -NOTICE: database "uint_smp" does not exist, skipping -create database uint_smp dbcompatibility 'b'; -\c uint_smp +create schema uint_smp; +set current_schema to 'uint_smp'; set enable_opfusion = on; set opfusion_debug_mode = log; drop table if exists t2 ; @@ -23,7 +21,7 @@ explain(costs off, verbose) select * from t2 where a = 2; QUERY PLAN ------------------------------------------------------------------------------------ [No Bypass]reason: Bypass not executed because query's scan operator is not index. - Seq Scan on public.t2 + Seq Scan on uint_smp.t2 Output: a Filter: (t2.a = 2) (4 rows) @@ -33,15 +31,15 @@ explain(costs off, verbose) select * from t2 where a = 2 and t2.a in (select a f ------------------------------------------------------------------------------------ [No Bypass]reason: Bypass not executed because query's scan operator is not index. Nested Loop Semi Join - Output: public.t2.a - -> Seq Scan on public.t2 - Output: public.t2.a - Filter: ((public.t2.a = 2) AND (public.t2.a > 500)) + Output: uint_smp.t2.a + -> Seq Scan on uint_smp.t2 + Output: uint_smp.t2.a + Filter: ((uint_smp.t2.a = 2) AND (uint_smp.t2.a > 500)) -> Materialize - Output: public.t2.a - -> Seq Scan on public.t2 - Output: public.t2.a - Filter: ((public.t2.a > 500) AND (public.t2.a = 2)) + Output: uint_smp.t2.a + -> Seq Scan on uint_smp.t2 + Output: uint_smp.t2.a + Filter: ((uint_smp.t2.a > 500) AND (uint_smp.t2.a = 2)) (11 rows) set query_dop = 4; @@ -49,7 +47,7 @@ explain(costs off, verbose) select * from t2 where a = 2; QUERY PLAN ------------------------------------------------------------------------------------ [No Bypass]reason: Bypass not executed because query's scan operator is not index. - Seq Scan on public.t2 + Seq Scan on uint_smp.t2 Output: a Filter: (t2.a = 2) (4 rows) @@ -59,15 +57,15 @@ explain(costs off, verbose) select * from t2 where a = 2 and t2.a in (select a f ------------------------------------------------------------------------------------ [No Bypass]reason: Bypass not executed because query's scan operator is not index. Nested Loop Semi Join - Output: public.t2.a - -> Seq Scan on public.t2 - Output: public.t2.a - Filter: ((public.t2.a = 2) AND (public.t2.a > 500)) + Output: uint_smp.t2.a + -> Seq Scan on uint_smp.t2 + Output: uint_smp.t2.a + Filter: ((uint_smp.t2.a = 2) AND (uint_smp.t2.a > 500)) -> Materialize - Output: public.t2.a - -> Seq Scan on public.t2 - Output: public.t2.a - Filter: ((public.t2.a > 500) AND (public.t2.a = 2)) + Output: uint_smp.t2.a + -> Seq Scan on uint_smp.t2 + Output: uint_smp.t2.a + Filter: ((uint_smp.t2.a > 500) AND (uint_smp.t2.a = 2)) (11 rows) set query_dop = 1; @@ -85,7 +83,7 @@ explain(costs off, verbose) select * from t2 where a = 2; QUERY PLAN ------------------------------------------------------------------------------------ [No Bypass]reason: Bypass not executed because query's scan operator is not index. - Seq Scan on public.t2 + Seq Scan on uint_smp.t2 Output: a Filter: (t2.a = 2) (4 rows) @@ -95,15 +93,15 @@ explain(costs off, verbose) select * from t2 where a = 2 and t2.a in (select a f ------------------------------------------------------------------------------------ [No Bypass]reason: Bypass not executed because query's scan operator is not index. Nested Loop Semi Join - Output: public.t2.a - -> Seq Scan on public.t2 - Output: public.t2.a - Filter: ((public.t2.a = 2) AND (public.t2.a > 500)) + Output: uint_smp.t2.a + -> Seq Scan on uint_smp.t2 + Output: uint_smp.t2.a + Filter: ((uint_smp.t2.a = 2) AND (uint_smp.t2.a > 500)) -> Materialize - Output: public.t2.a - -> Seq Scan on public.t2 - Output: public.t2.a - Filter: ((public.t2.a > 500) AND (public.t2.a = 2)) + Output: uint_smp.t2.a + -> Seq Scan on uint_smp.t2 + Output: uint_smp.t2.a + Filter: ((uint_smp.t2.a > 500) AND (uint_smp.t2.a = 2)) (11 rows) set query_dop = 4; @@ -111,7 +109,7 @@ explain(costs off, verbose) select * from t2 where a = 2; QUERY PLAN ------------------------------------------------------------------------------------ [No Bypass]reason: Bypass not executed because query's scan operator is not index. - Seq Scan on public.t2 + Seq Scan on uint_smp.t2 Output: a Filter: (t2.a = 2) (4 rows) @@ -121,19 +119,19 @@ explain(costs off, verbose) select * from t2 where a = 2 and t2.a in (select a f ------------------------------------------------------------------------------------ [No Bypass]reason: Bypass not executed because query's scan operator is not index. Nested Loop Semi Join - Output: public.t2.a + Output: uint_smp.t2.a -> Streaming(type: LOCAL GATHER dop: 1/4) - Output: public.t2.a - -> Seq Scan on public.t2 - Output: public.t2.a - Filter: ((public.t2.a = 2) AND (public.t2.a > 500)) + Output: uint_smp.t2.a + -> Seq Scan on uint_smp.t2 + Output: uint_smp.t2.a + Filter: ((uint_smp.t2.a = 2) AND (uint_smp.t2.a > 500)) -> Materialize - Output: public.t2.a + Output: uint_smp.t2.a -> Streaming(type: LOCAL GATHER dop: 1/4) - Output: public.t2.a - -> Seq Scan on public.t2 - Output: public.t2.a - Filter: ((public.t2.a > 500) AND (public.t2.a = 2)) + Output: uint_smp.t2.a + -> Seq Scan on uint_smp.t2 + Output: uint_smp.t2.a + Filter: ((uint_smp.t2.a > 500) AND (uint_smp.t2.a = 2)) (15 rows) set query_dop = 1; @@ -145,7 +143,7 @@ explain(costs off, verbose) select * from t2 where a = 2; QUERY PLAN ------------------------------------------------------------------------------------ [No Bypass]reason: Bypass not executed because query's scan operator is not index. - Seq Scan on public.t2 + Seq Scan on uint_smp.t2 Output: a Filter: (t2.a = 2) (4 rows) @@ -155,15 +153,15 @@ explain(costs off, verbose) select * from t2 where a = 2 and t2.a in (select a f ------------------------------------------------------------------------------------ [No Bypass]reason: Bypass not executed because query's scan operator is not index. Nested Loop Semi Join - Output: public.t2.a - -> Seq Scan on public.t2 - Output: public.t2.a - Filter: ((public.t2.a = 2) AND (public.t2.a < 500)) + Output: uint_smp.t2.a + -> Seq Scan on uint_smp.t2 + Output: uint_smp.t2.a + Filter: ((uint_smp.t2.a = 2) AND (uint_smp.t2.a < 500)) -> Materialize - Output: public.t2.a - -> Seq Scan on public.t2 - Output: public.t2.a - Filter: ((public.t2.a < 500) AND (public.t2.a = 2)) + Output: uint_smp.t2.a + -> Seq Scan on uint_smp.t2 + Output: uint_smp.t2.a + Filter: ((uint_smp.t2.a < 500) AND (uint_smp.t2.a = 2)) (11 rows) set query_dop = 4; @@ -173,7 +171,7 @@ explain(costs off, verbose) select * from t2 where a = 2; [No Bypass]reason: Bypass not executed because query's scan operator is not index. Streaming(type: LOCAL GATHER dop: 1/4) Output: a - -> Seq Scan on public.t2 + -> Seq Scan on uint_smp.t2 Output: a Filter: (t2.a = 2) (6 rows) @@ -183,19 +181,19 @@ explain(costs off, verbose) select * from t2 where a = 2 and t2.a in (select a f ------------------------------------------------------------------------------------ [No Bypass]reason: Bypass not executed because query's scan operator is not index. Nested Loop Semi Join - Output: public.t2.a + Output: uint_smp.t2.a -> Streaming(type: LOCAL GATHER dop: 1/4) - Output: public.t2.a - -> Seq Scan on public.t2 - Output: public.t2.a - Filter: ((public.t2.a = 2) AND (public.t2.a < 500)) + Output: uint_smp.t2.a + -> Seq Scan on uint_smp.t2 + Output: uint_smp.t2.a + Filter: ((uint_smp.t2.a = 2) AND (uint_smp.t2.a < 500)) -> Materialize - Output: public.t2.a + Output: uint_smp.t2.a -> Streaming(type: LOCAL GATHER dop: 1/4) - Output: public.t2.a - -> Seq Scan on public.t2 - Output: public.t2.a - Filter: ((public.t2.a < 500) AND (public.t2.a = 2)) + Output: uint_smp.t2.a + -> Seq Scan on uint_smp.t2 + Output: uint_smp.t2.a + Filter: ((uint_smp.t2.a < 500) AND (uint_smp.t2.a = 2)) (15 rows) set query_dop = 1; @@ -207,25 +205,25 @@ explain(costs off, verbose) select * from t2 where a = 2; QUERY PLAN ------------------------------------------------------------------------------------ [No Bypass]reason: Bypass not executed because query's scan operator is not index. - Seq Scan on public.t2 + Seq Scan on uint_smp.t2 Output: a Filter: (t2.a = 2::bigint) (4 rows) explain(costs off, verbose) select * from t2 where a = 2 and t2.a in (select a from t2 where a < 500); - QUERY PLAN ------------------------------------------------------------------------------------- + QUERY PLAN +--------------------------------------------------------------------------------------- [No Bypass]reason: Bypass not executed because query's scan operator is not index. Nested Loop Semi Join - Output: public.t2.a - -> Seq Scan on public.t2 - Output: public.t2.a - Filter: ((public.t2.a = 2::bigint) AND (public.t2.a < 500::bigint)) + Output: uint_smp.t2.a + -> Seq Scan on uint_smp.t2 + Output: uint_smp.t2.a + Filter: ((uint_smp.t2.a = 2::bigint) AND (uint_smp.t2.a < 500::bigint)) -> Materialize - Output: public.t2.a - -> Seq Scan on public.t2 - Output: public.t2.a - Filter: ((public.t2.a < 500::bigint) AND (public.t2.a = 2::bigint)) + Output: uint_smp.t2.a + -> Seq Scan on uint_smp.t2 + Output: uint_smp.t2.a + Filter: ((uint_smp.t2.a < 500::bigint) AND (uint_smp.t2.a = 2::bigint)) (11 rows) set query_dop = 4; @@ -235,29 +233,29 @@ explain(costs off, verbose) select * from t2 where a = 2; [No Bypass]reason: Bypass not executed because query's scan operator is not index. Streaming(type: LOCAL GATHER dop: 1/4) Output: a - -> Seq Scan on public.t2 + -> Seq Scan on uint_smp.t2 Output: a Filter: (t2.a = 2::bigint) (6 rows) explain(costs off, verbose) select * from t2 where a = 2 and t2.a in (select a from t2 where a < 500); - QUERY PLAN ------------------------------------------------------------------------------------------ + QUERY PLAN +--------------------------------------------------------------------------------------------- [No Bypass]reason: Bypass not executed because query's scan operator is not index. Nested Loop Semi Join - Output: public.t2.a + Output: uint_smp.t2.a -> Streaming(type: LOCAL GATHER dop: 1/4) - Output: public.t2.a - -> Seq Scan on public.t2 - Output: public.t2.a - Filter: ((public.t2.a = 2::bigint) AND (public.t2.a < 500::bigint)) + Output: uint_smp.t2.a + -> Seq Scan on uint_smp.t2 + Output: uint_smp.t2.a + Filter: ((uint_smp.t2.a = 2::bigint) AND (uint_smp.t2.a < 500::bigint)) -> Materialize - Output: public.t2.a + Output: uint_smp.t2.a -> Streaming(type: LOCAL GATHER dop: 1/4) - Output: public.t2.a - -> Seq Scan on public.t2 - Output: public.t2.a - Filter: ((public.t2.a < 500::bigint) AND (public.t2.a = 2::bigint)) + Output: uint_smp.t2.a + -> Seq Scan on uint_smp.t2 + Output: uint_smp.t2.a + Filter: ((uint_smp.t2.a < 500::bigint) AND (uint_smp.t2.a = 2::bigint)) (15 rows) set query_dop = 1; @@ -274,13 +272,13 @@ explain(costs off, verbose) select * from join_1 join join_2; Output: join_1.a, join_2.a -> Streaming(type: LOCAL GATHER dop: 1/2) Output: join_1.a - -> Seq Scan on public.join_1 + -> Seq Scan on uint_smp.join_1 Output: join_1.a -> Materialize Output: join_2.a -> Streaming(type: LOCAL GATHER dop: 1/2) Output: join_2.a - -> Seq Scan on public.join_2 + -> Seq Scan on uint_smp.join_2 Output: join_2.a (12 rows) @@ -292,13 +290,13 @@ explain(costs off, verbose) select * from join_1 join join_2 on join_1.a = join_ Hash Cond: (join_1.a = join_2.a) -> Streaming(type: LOCAL GATHER dop: 1/2) Output: join_1.a - -> Seq Scan on public.join_1 + -> Seq Scan on uint_smp.join_1 Output: join_1.a -> Hash Output: join_2.a -> Streaming(type: LOCAL GATHER dop: 1/2) Output: join_2.a - -> Seq Scan on public.join_2 + -> Seq Scan on uint_smp.join_2 Output: join_2.a (13 rows) @@ -310,13 +308,13 @@ explain(costs off, verbose) select * from join_1 left join join_2 on join_1.a = Hash Cond: (join_1.a = join_2.a) -> Streaming(type: LOCAL GATHER dop: 1/2) Output: join_1.a - -> Seq Scan on public.join_1 + -> Seq Scan on uint_smp.join_1 Output: join_1.a -> Hash Output: join_2.a -> Streaming(type: LOCAL GATHER dop: 1/2) Output: join_2.a - -> Seq Scan on public.join_2 + -> Seq Scan on uint_smp.join_2 Output: join_2.a (13 rows) @@ -328,13 +326,13 @@ explain(costs off, verbose) select * from join_1 right join join_2 on join_1.a = Hash Cond: (join_1.a = join_2.a) -> Streaming(type: LOCAL GATHER dop: 1/2) Output: join_1.a - -> Seq Scan on public.join_1 + -> Seq Scan on uint_smp.join_1 Output: join_1.a -> Hash Output: join_2.a -> Streaming(type: LOCAL GATHER dop: 1/2) Output: join_2.a - -> Seq Scan on public.join_2 + -> Seq Scan on uint_smp.join_2 Output: join_2.a (13 rows) @@ -346,13 +344,13 @@ explain(costs off, verbose) select * from join_1 inner join join_2 on join_1.a = Hash Cond: (join_1.a = join_2.a) -> Streaming(type: LOCAL GATHER dop: 1/2) Output: join_1.a - -> Seq Scan on public.join_1 + -> Seq Scan on uint_smp.join_1 Output: join_1.a -> Hash Output: join_2.a -> Streaming(type: LOCAL GATHER dop: 1/2) Output: join_2.a - -> Seq Scan on public.join_2 + -> Seq Scan on uint_smp.join_2 Output: join_2.a (13 rows) @@ -364,13 +362,13 @@ explain(costs off, verbose) select /*+ nestloop(join_1 join_2)*/ * from join_1 l Join Filter: (join_1.a = join_2.a) -> Streaming(type: LOCAL GATHER dop: 1/2) Output: join_1.a - -> Seq Scan on public.join_1 + -> Seq Scan on uint_smp.join_1 Output: join_1.a -> Materialize Output: join_2.a -> Streaming(type: LOCAL GATHER dop: 1/2) Output: join_2.a - -> Seq Scan on public.join_2 + -> Seq Scan on uint_smp.join_2 Output: join_2.a (13 rows) @@ -382,13 +380,13 @@ explain(costs off, verbose) select /*+ hashjoin(join_1 join_2)*/ * from join_1 l Hash Cond: (join_1.a = join_2.a) -> Streaming(type: LOCAL GATHER dop: 1/2) Output: join_1.a - -> Seq Scan on public.join_1 + -> Seq Scan on uint_smp.join_1 Output: join_1.a -> Hash Output: join_2.a -> Streaming(type: LOCAL GATHER dop: 1/2) Output: join_2.a - -> Seq Scan on public.join_2 + -> Seq Scan on uint_smp.join_2 Output: join_2.a (13 rows) @@ -401,15 +399,19 @@ WARNING: unused hint: MergeJoin(join_1 join_2) Hash Cond: (join_1.a = join_2.a) -> Streaming(type: LOCAL GATHER dop: 1/2) Output: join_1.a - -> Seq Scan on public.join_1 + -> Seq Scan on uint_smp.join_1 Output: join_1.a -> Hash Output: join_2.a -> Streaming(type: LOCAL GATHER dop: 1/2) Output: join_2.a - -> Seq Scan on public.join_2 + -> Seq Scan on uint_smp.join_2 Output: join_2.a (13 rows) -\c postgres -drop database uint_smp; +drop schema uint_smp cascade; +NOTICE: drop cascades to 3 other objects +DETAIL: drop cascades to table t2 +drop cascades to table join_1 +drop cascades to table join_2 +reset current_schema; diff --git a/contrib/dolphin/expected/uint_sql_mode.out b/contrib/dolphin/expected/uint_sql_mode.out index 799f2dfee7e424ffed4f793e4aae8c313bfd629f..dd0c107f99de8736622b4769e4e70f0833c7e28f 100644 --- a/contrib/dolphin/expected/uint_sql_mode.out +++ b/contrib/dolphin/expected/uint_sql_mode.out @@ -1,7 +1,5 @@ -drop database if exists uint_sql_mode; -NOTICE: database "uint_sql_mode" does not exist, skipping -create database uint_sql_mode dbcompatibility 'b'; -\c uint_sql_mode +create schema uint_sql_mode; +set current_schema to 'uint_sql_mode'; set dolphin.sql_mode = ''; select (-1)::bool::uint8; uint8 @@ -2210,5 +2208,6 @@ select * from t1; 255 (61 rows) -\c postgres -drop database uint_sql_mode; +drop schema uint_sql_mode cascade; +NOTICE: drop cascades to table t1 +reset current_schema; diff --git a/contrib/dolphin/expected/uint_xor.out b/contrib/dolphin/expected/uint_xor.out index 5941ad609c2fd556ae3be613de2a2d901905aef1..03bb8ad15c6023681f9b73df0b72fde5f19b6698 100644 --- a/contrib/dolphin/expected/uint_xor.out +++ b/contrib/dolphin/expected/uint_xor.out @@ -1,7 +1,5 @@ -drop database if exists uint_xor; -NOTICE: database "uint_xor" does not exist, skipping -create database uint_xor dbcompatibility 'b'; -\c uint_xor +create schema uint_xor; +set current_schema to 'uint_xor'; --uint8 select 18446744073709551615::uint8 # 0::int1; ?column? @@ -826,5 +824,5 @@ select 127::int1 # 1::uint8; 126 (1 row) -\c postgres -drop database uint_xor +drop schema uint_xor cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/upsert.out b/contrib/dolphin/expected/upsert.out index 45723f1e41e10d87557031c33aba2a2da4b802ee..ba1aaae7469a71dcc2df4da8df92e2c1c22dd482 100644 --- a/contrib/dolphin/expected/upsert.out +++ b/contrib/dolphin/expected/upsert.out @@ -1,7 +1,5 @@ -drop database if exists upsert; -NOTICE: database "upsert" does not exist, skipping -create database upsert dbcompatibility 'b'; -\c upsert +create schema upsert; +set current_schema to 'upsert'; --normal test --primary key and unique on multiple column create table normal_01(c1 int, c2 int, c3 int, c4 int unique, c5 int primary key, unique(c2,c3)); @@ -805,5 +803,28 @@ select * from subpartition_03; 2 | 2 | 1 | 1 (1 row) -\c postgres -drop database upsert +drop schema upsert cascade; +NOTICE: drop cascades to 22 other objects +DETAIL: drop cascades to table normal_01 +drop cascades to table normal_02 +drop cascades to table normal_03 +drop cascades to table normal_04 +drop cascades to table ustore_01 +drop cascades to table ustore_02 +drop cascades to table ustore_03 +drop cascades to table ustore_04 +drop cascades to table t6 +drop cascades to table segment_01 +drop cascades to table segment_02 +drop cascades to table segment_03 +drop cascades to table segment_04 +drop cascades to table partition_01 +drop cascades to table partition_02 +drop cascades to table partition_03 +drop cascades to table partition_04 +drop cascades to table partition_05 +drop cascades to table partition_06 +drop cascades to table subpartition_01 +drop cascades to table subpartition_02 +drop cascades to table subpartition_03 +reset current_schema; diff --git a/contrib/dolphin/expected/use_dbname.out b/contrib/dolphin/expected/use_dbname.out index 619af9f7bee84b12f73d996483f4345106dbb669..ce3dafd26076c0242f28dcce7adb8f97ea09c9b1 100644 --- a/contrib/dolphin/expected/use_dbname.out +++ b/contrib/dolphin/expected/use_dbname.out @@ -1,7 +1,5 @@ -drop database if exists use_dbname; -NOTICE: database "use_dbname" does not exist, skipping -create database use_dbname dbcompatibility 'b'; -\c use_dbname +create schema use_dbname; +set current_schema to 'use_dbname'; CREATE schema db1; CREATE schema db2; USE db1; @@ -35,5 +33,5 @@ select a from test; db1 (1 row) -\c postgres -drop database if exists use_dbname; +drop schema use_dbname cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/vec_engine.out b/contrib/dolphin/expected/vec_engine.out index 3d104f1ab94bdf1b5e170a6739564d8136935c8d..7c0a8e7cda32fc79aa797e5efcc2ba1e61d38413 100644 --- a/contrib/dolphin/expected/vec_engine.out +++ b/contrib/dolphin/expected/vec_engine.out @@ -1,5 +1,5 @@ -create database vec_engine_test dbcompatibility 'b'; -\c vec_engine_test +create schema vec_engine_test; +set current_schema to 'vec_engine_test'; CREATE TABLE customer ( c_custkey integer NOT NULL, c_name character varying(25) NOT NULL, @@ -128,5 +128,12 @@ explain (costs off) select Filter: (r_name = 'ASIA'::bpchar) (23 rows) -\c postgres -drop database vec_engine_test; +drop schema vec_engine_test cascade; +NOTICE: drop cascades to 6 other objects +DETAIL: drop cascades to table customer +drop cascades to table orders +drop cascades to table lineitem +drop cascades to table supplier +drop cascades to table nation +drop cascades to table region +reset current_schema; diff --git a/contrib/dolphin/expected/zerofill.out b/contrib/dolphin/expected/zerofill.out index 7450c909c3e54e076b507781e20a7840ed03d87f..e25ca0ef5e4e5aa258663e21591499d7e460e98f 100644 --- a/contrib/dolphin/expected/zerofill.out +++ b/contrib/dolphin/expected/zerofill.out @@ -1,7 +1,5 @@ -drop database if exists db_zerofill; -NOTICE: database "db_zerofill" does not exist, skipping -create database db_zerofill dbcompatibility 'b'; -\c db_zerofill +create schema db_zerofill; +set current_schema to 'db_zerofill'; create table t1_zerofill ( a int(5) zerofill, b integer(5) zerofill unsigned, @@ -14,7 +12,7 @@ create table t1_zerofill ( i int4 unsigned zerofill, j int8 zerofill); \d+ t1_zerofill - Table "public.t1_zerofill" + Table "db_zerofill.t1_zerofill" Column | Type | Modifiers | Storage | Stats target | Description --------+-------+-----------+---------+--------------+------------- a | uint4 | | plain | | @@ -39,5 +37,6 @@ create table t2_zerofill (a double precision zerofill); ERROR: syntax error at or near "zerofill" LINE 1: create table t2_zerofill (a double precision zerofill); ^ -\c postgres -drop database if exists db_zerofill; +drop schema db_zerofill cascade; +NOTICE: drop cascades to table t1_zerofill +reset current_schema; diff --git a/contrib/dolphin/parallel_schedule_dolphin b/contrib/dolphin/parallel_schedule_dolphin index cddfff106fbe384b4748bbad2d9c1900056ea857..1c6982c341dc410a621ef999e7dfcd84f9186178 100644 --- a/contrib/dolphin/parallel_schedule_dolphin +++ b/contrib/dolphin/parallel_schedule_dolphin @@ -4,36 +4,27 @@ test: keyword_ignore_test/ignore_no_matched_partition keyword_ignore_test/ignore test: string_func_test/db_b_ascii_test string_func_test/db_b_left_right_test string_func_test/db_b_quote_test string_func_test/db_b_string_length_test string_func_test/db_b_substr_test string_func_test/db_b_trim_test string_func_test/db_b_insert_test -test: ast b_compatibility_time_type db_b_new_gram_test - -test: test_condition vec_engine - -test: group_concat_test +test: ast b_compatibility_time_type db_b_new_gram_test group_concat_test test_condition vec_engine test: db_b_parser1 db_b_parser2 db_b_parser3 db_b_parser4 second_microsecond -test: db_b_plpgsql_test default_guc describe explain_desc +test: db_b_plpgsql_test default_guc describe explain_desc kill set_password network test: empty_value_list empty_value_lists empty_value_support_value test: greatest_least join_without_on mysqlmode_fullgroup mysqlmode_strict mysqlmode_strict2 none_strict_warning test_alter_table -test: kill set_password network - # must be single test group, cause other connection will affect the result test: test_shows test: test_shows_1 test_shows_2 -test: test_shows_3 test_shows_4 - -test: test_shows_5 - -test: test_optimize +# must be single test group, cause other connection will affect the result +test: test_shows_3 -test: test_set_charset +test: test_shows_4 test_shows_5 -test: nvarchar regexp upsert zerofill +test: nvarchar regexp upsert zerofill test_set_charset test_optimize test: test_binary test_blob test_datatype test_fixed test_mysql_enum @@ -43,10 +34,6 @@ test: builtin_funcs/bin builtin_funcs/char builtin_funcs/char_length builtin_fun test: builtin_funcs/db_b_hex builtin_funcs/db_b_if builtin_funcs/elt builtin_funcs/field builtin_funcs/find_in_set builtin_funcs/soundex builtin_funcs/space builtin_funcs/make_set builtin_funcs/between builtin_funcs/not_between -test: revoke - -test: option - test: uint_agg uint_and uint_cast uint_cast2 uint_div uint_mi uint_mod uint_mod2 uint_mul uint_numeric uint_operator uint_or uint_partition uint_pl uint_xor test: uint_procedure_col_bypass uint_smp uint_index uint_sql_mode uint_ignore uint_join signed_unsigned_cast uint_in implicit_cast uint_auto_increment @@ -77,24 +64,14 @@ test: create_function_test/m_type_create_proc test: create_function_test/deterministic create_function_test/language_sql create_function_test/sql_options create_function_test/sql_security create_function_test/call_function create_function_test/commentsharp create_function_test/single_line_proc -test: b_do_statment +test: b_do_statment revoke option test_table_index test_float_double_real_double_precision_MD single_line_trigger prefixkey_index test: alter_function_test/alter_function alter_function_test/alter_procedure alter_function_test/language_sql replace_test/replace -test: network2 use_dbname show_create view_definer_test insert_set show_create_database show_variables +test: network2 use_dbname show_create view_definer_test insert_set show_create_database show_variables b_auto_increment test: partition_test1 partition_test2 partition_test3 partition_test4 partition_maxvalue_test -test: test_table_index - -test: test_float_double_real_double_precision_MD - -test: single_line_trigger - -test: db_b_date_time_functions - -test: prefixkey_index b_auto_increment - test: builtin_funcs/b_compatibility_time_funcs builtin_funcs/b_compatibility_time_funcs2 builtin_funcs/b_compatibility_time_funcs3 # case sensitive test, do not insert test case */ @@ -133,35 +110,22 @@ test: test_mysql_operator test_op_xor_unsignedint test_op_blob test_op_xor_boola test: json_array json_object json_quote json_contains json_contains_path json_extract json_unquote json_keys json_search json_array_append -test: db_b_date_time_functions2 -test: show b_comments - -test: ansi_quotes_start - -test: pl_debugger_server pl_debugger_client +test: show b_comments ansi_quotes_start -test: ansi_quotes_test +test: pl_debugger_server pl_debugger_client load load2 flush ansi_quotes_test -test: db_b_date_time_functions3 +test: db_b_date_time_functions db_b_date_time_functions2 db_b_date_time_functions3 db_b_date_time_functions4 test: json_array_insert json_insert json_merge_patch json_merge_preserve json_remove json_replace json_set json_depth -test: flush - test: oct string_func_test/db_b_from_base64_test string_func_test/test_substring_index string_func_test/db_b_ord_test -test: db_b_date_time_functions4 - -test: load load2 - test: like_default_test conv_cast_test read_only_guc_test test: string_func_test/db_b_to_base64_test string_func_test/db_b_unhex_test bit_count test_current_user test: test_schema connection_id test_system_user test_bit_xor -test: builtin_funcs/cast any_value_test - -test: default_function get_b_database +test: builtin_funcs/cast any_value_test default_function get_b_database test: json_type json_pretty json_valid json_length json_objectagg json_arrayagg json_operator json_storage_size diff --git a/contrib/dolphin/sql/alter_function_test/alter_function.sql b/contrib/dolphin/sql/alter_function_test/alter_function.sql index f346b962ed8674e466261c8927a01166b459b24d..356373cc0909979f3b9af4b8e6636466d1be4ef2 100755 --- a/contrib/dolphin/sql/alter_function_test/alter_function.sql +++ b/contrib/dolphin/sql/alter_function_test/alter_function.sql @@ -1,6 +1,5 @@ -drop database if exists db_alter_func_1; -create database db_alter_func_1 dbcompatibility 'B'; -\c db_alter_func_1 +create schema db_alter_func_1; +set current_schema to 'db_alter_func_1'; CREATE FUNCTION f1 (s CHAR(20)) RETURNS int AS $$ select 1 $$ ; @@ -103,5 +102,5 @@ select f5() ; select * from t1; -\c postgres -drop database db_alter_func_1; +drop schema db_alter_func_1 cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/alter_function_test/alter_procedure.sql b/contrib/dolphin/sql/alter_function_test/alter_procedure.sql index 33a9d2e878719783af3d2a0c418c5ca378b33c59..912f17bd9563495da2cddf52f87f33c59a5f2a40 100755 --- a/contrib/dolphin/sql/alter_function_test/alter_procedure.sql +++ b/contrib/dolphin/sql/alter_function_test/alter_procedure.sql @@ -1,6 +1,5 @@ -drop database if exists db_alter_func_2; -create database db_alter_func_2 dbcompatibility 'B'; -\c db_alter_func_2 +create schema db_alter_func_2; +set current_schema to 'db_alter_func_2'; CREATE OR REPLACE PROCEDURE proc1() AS @@ -50,5 +49,5 @@ END; -- 修改不存在的存储过程 ALTER PROCEDURE proc2 READS SQL DATA; -\c postgres -drop database db_alter_func_2; +drop schema db_alter_func_2 cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/alter_function_test/language_sql.sql b/contrib/dolphin/sql/alter_function_test/language_sql.sql index e34993d5cb6edee328b924074ef266764f90a538..f5b92fa5df480ae9bd105b65ef944699adf904bc 100644 --- a/contrib/dolphin/sql/alter_function_test/language_sql.sql +++ b/contrib/dolphin/sql/alter_function_test/language_sql.sql @@ -1,6 +1,5 @@ -drop database if exists db_alter_func_sql; -create database db_alter_func_sql dbcompatibility 'B'; -\c db_alter_func_sql +create schema db_alter_func_sql; +set current_schema to 'db_alter_func_sql'; -- test func @@ -173,5 +172,5 @@ call pro_2(1,2,'a'); call pro_3(1,2,'a'); -\c postgres -drop database db_alter_func_sql; +drop schema db_alter_func_sql cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/ansi_quotes_start.sql b/contrib/dolphin/sql/ansi_quotes_start.sql index bde70e0aa5f70b2617758ed9f7707612764dc96e..8855fe711118c95566eb2e46618f981500d422ae 100644 --- a/contrib/dolphin/sql/ansi_quotes_start.sql +++ b/contrib/dolphin/sql/ansi_quotes_start.sql @@ -1,2 +1 @@ -CREATE DATABASE test_ansi_quotes DBCOMPATIBILITY 'B'; -\c test_ansi_quotes +create schema test_ansi_quotes; diff --git a/contrib/dolphin/sql/ansi_quotes_test.sql b/contrib/dolphin/sql/ansi_quotes_test.sql index e9a65a3b2113a3d857fe0e53b88abdd119af0b7a..fbd53ccf00f953a3aeb89cc6d208fbc84f162d97 100644 --- a/contrib/dolphin/sql/ansi_quotes_test.sql +++ b/contrib/dolphin/sql/ansi_quotes_test.sql @@ -1,4 +1,4 @@ -\c test_ansi_quotes +set current_schema to 'test_ansi_quotes'; CREATE TABLE test_quotes (a text); show dolphin.sql_mode; @@ -38,5 +38,5 @@ select "test" != "test"; select * from test_quotes where a like "%test%"; select * from test_quotes where a = "test1"; -\c postgres -DROP DATABASE test_ansi_quotes; \ No newline at end of file +drop schema test_ansi_quotes cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/any_value_test.sql b/contrib/dolphin/sql/any_value_test.sql index be091aa1af31ff399973c9691ac693770195b881..678619afe4640f788fba552161b58b5d12106c03 100644 --- a/contrib/dolphin/sql/any_value_test.sql +++ b/contrib/dolphin/sql/any_value_test.sql @@ -1,6 +1,5 @@ -drop DATABASE if exists any_value_test; -CREATE DATABASE any_value_test dbcompatibility 'B'; -\c any_value_test; +create schema any_value_test; +set current_schema to 'any_value_test'; --test int type create table test_int1(a tinyint, b int); @@ -85,5 +84,5 @@ insert into test_blob_bytea values(2,'abcd',E'\\xeabc'); select any_value(b) from test_blob_bytea group by a; select any_value(c) from test_blob_bytea group by a; -\c postgres; -drop DATABASE if exists any_value_test; \ No newline at end of file +drop schema any_value_test cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/ast.sql b/contrib/dolphin/sql/ast.sql index 2d0abb4be5cd2ff8e63e7ddf1f4591947d52c4ae..471d153e39bc7a615a80a7284740fe78c1e5cdcf 100644 --- a/contrib/dolphin/sql/ast.sql +++ b/contrib/dolphin/sql/ast.sql @@ -1,11 +1,10 @@ -drop database if exists ast_test; -create database ast_test dbcompatibility 'b'; -\c ast_test +create schema ast_test; +set current_schema to 'ast_test'; ast select * from test; ast create table test(id int); ast create table test(id int(5)); ast USE "custcomcenter"; ast select 1;select 1; ast select 1;ast select 1; -\c postgres -drop database ast_test; \ No newline at end of file +drop schema ast_test cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/b_comments.sql b/contrib/dolphin/sql/b_comments.sql index 0852f1faccb39d65d97b82fe4b873a7ce4676cee..3233dfd4ed5a7a14bb6f3137917641455c464d71 100644 --- a/contrib/dolphin/sql/b_comments.sql +++ b/contrib/dolphin/sql/b_comments.sql @@ -1,4 +1,6 @@ /* unsupported */ +create database b_comments dbcompatibility 'A'; +\c b_comments create schema b_comments; set search_path to 'b_comments'; create table test_row(a int not null comment 'test_row.a'); @@ -32,10 +34,9 @@ create procedure test_alter_procedure(int,int) as begin select $1 + $2;end; / alter procedure test_alter_procedure(int,int) comment 'test_alter_procedure'; drop schema b_comments cascade; +\c contrib_regression +drop database b_comments; - -create database b_comments dbcompatibility 'B'; -\c b_comments create schema b_comments; set search_path to 'b_comments'; /* unsupported */ @@ -177,6 +178,4 @@ from pg_description pd on pd.objoid = pc.oid where pc.relname = 'uq_0034'; drop schema b_comments cascade; -reset search_path; -\c postgres -drop database b_comments; \ No newline at end of file +reset search_path; \ No newline at end of file diff --git a/contrib/dolphin/sql/b_do_statment.sql b/contrib/dolphin/sql/b_do_statment.sql index ddd26ed1753e0075e34273857fd441bd16aade1a..170107f1fc5c8defc344e804e19a1c77d639a13e 100644 --- a/contrib/dolphin/sql/b_do_statment.sql +++ b/contrib/dolphin/sql/b_do_statment.sql @@ -1,5 +1,5 @@ -create database db_do_stmt dbcompatibility = 'B'; -\c db_do_stmt +create schema db_do_stmt; +set current_schema to 'db_do_stmt'; create table t1 (a int); insert into t1 values(1),(4),(7); @@ -49,5 +49,5 @@ LANGUAGE SQL; do sin(a) from t1; -\c regress -drop database db_do_stmt; +drop schema db_do_stmt cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/bit_count.sql b/contrib/dolphin/sql/bit_count.sql index ff4e2324c54c3cedd197799da13c5a5a4bcc374d..ede31c8e35e62e635d08389fa4824e7f931ee80a 100644 --- a/contrib/dolphin/sql/bit_count.sql +++ b/contrib/dolphin/sql/bit_count.sql @@ -1,6 +1,5 @@ -drop database if exists test_bit_count; -create database test_bit_count dbcompatibility 'b'; -\c test_bit_count +create schema test_bit_count; +set current_schema to 'test_bit_count'; -- 测试数字,字符串,二进制输入 SELECT bit_count(29); @@ -51,5 +50,5 @@ select bit_count(b'1111111111111111111111111111111111111111111111111111111111111 select bit_count(b'1111111111111111111111111111111111111111111111111111111111111111'); select bit_count(b'10000000111111111111111111111111111111111111111111111111111111111111111'); -\c postgres -drop database test_bit_count; \ No newline at end of file +drop schema test_bit_count cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/builtin_funcs/b_compatibility_time_funcs.sql b/contrib/dolphin/sql/builtin_funcs/b_compatibility_time_funcs.sql index b6c4fb0f0cd798775059d938cd790c580c2b7a10..1a76bdb38e8b10843c8cbdd4719fbef65949e6da 100644 --- a/contrib/dolphin/sql/builtin_funcs/b_compatibility_time_funcs.sql +++ b/contrib/dolphin/sql/builtin_funcs/b_compatibility_time_funcs.sql @@ -1,9 +1,5 @@ --- --- Test All Time function under 'b' compatibility --- -drop database if exists b_time_funcs; -create database b_time_funcs dbcompatibility 'b'; -\c b_time_funcs +create schema b_time_funcs; +set current_schema to 'b_time_funcs'; create table func_test(functionName varchar(256),result varchar(256)); truncate table func_test; @@ -243,5 +239,5 @@ insert into insert_subdate(date_col, datetime_col) values (subdate('2021-1-1', 1 drop table insert_subdate; select * from func_test; -\c postgres -drop database if exists b_time_funcs; +drop schema b_time_funcs cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/builtin_funcs/b_compatibility_time_funcs2.sql b/contrib/dolphin/sql/builtin_funcs/b_compatibility_time_funcs2.sql index fa1c66d3193c73ad81a2db719115393b36da24dc..cbf12430992b3d58f4103fab15bae6d608f98b25 100644 --- a/contrib/dolphin/sql/builtin_funcs/b_compatibility_time_funcs2.sql +++ b/contrib/dolphin/sql/builtin_funcs/b_compatibility_time_funcs2.sql @@ -2,9 +2,8 @@ -- Test Time functions(Stage 2) under 'b' compatibility -- Contains subtime()、timediff()、time()、time_format()、timestamp()、timestampadd() -- -drop database if exists b_time_funcs2; -create database b_time_funcs2 dbcompatibility 'b'; -\c b_time_funcs2 +create schema b_time_funcs2; +set current_schema to 'b_time_funcs2'; create table func_test2(functionName varchar(256),result varchar(256)); truncate table func_test2; @@ -516,5 +515,5 @@ drop table t1; drop table t2; select * from func_test2; -\c postgres -drop database if exists b_time_funcs2; +drop schema b_time_funcs2 cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/builtin_funcs/b_compatibility_time_funcs3.sql b/contrib/dolphin/sql/builtin_funcs/b_compatibility_time_funcs3.sql index c66032810befbd4c23fd4411ea1bd9034f8dfc2c..0eb658af7cdb74b4119e532e8c370f475af7a0dc 100644 --- a/contrib/dolphin/sql/builtin_funcs/b_compatibility_time_funcs3.sql +++ b/contrib/dolphin/sql/builtin_funcs/b_compatibility_time_funcs3.sql @@ -2,9 +2,8 @@ -- Test Time functions(Stage 3) under 'b' compatibility -- Contains to_days(), to_seconds(), unix_timestamp(), utc_date(), utc_time()、timestampadd() -- -drop database if exists b_time_funcs3; -create database b_time_funcs3 dbcompatibility 'b'; -\c b_time_funcs3 +create schema b_time_funcs3; +set current_schema to 'b_time_funcs3'; create table func_test3(functionName varchar(256),result varchar(256)); truncate table func_test3; @@ -189,5 +188,5 @@ insert into func_test3(functionName, result) values('UTC_TIMESTAMP(6)', UTC_TIME insert into func_test3(functionName, result) values('UTC_TIMESTAMP(-1)', UTC_TIMESTAMP(-1)); select * from func_test3; -\c postgres -drop database if exists b_time_funcs3; +drop schema b_time_funcs3 cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/builtin_funcs/between.sql b/contrib/dolphin/sql/builtin_funcs/between.sql index a83fa336935a2ebafa8d8aad8d75d10e526d5540..3bdd701f3654462bb2a688d4bbd30d5c1cc0334f 100644 --- a/contrib/dolphin/sql/builtin_funcs/between.sql +++ b/contrib/dolphin/sql/builtin_funcs/between.sql @@ -1,6 +1,5 @@ -drop database if exists db_between; -create database db_between dbcompatibility 'B'; -\c db_between +create schema db_between; +set current_schema to 'db_between'; select 2 between 2 and 23; select 2.1 between 2.1 and 12.3; select true between false and true; @@ -85,5 +84,5 @@ select * from t_between_and_0023; select distinct c_town from t_between_and_0023 where c_town between 'b' and 'n'; select distinct c_town from t_between_and_0023 where c_town between 'b' and 'nz'; drop table t_between_and_0023; -\c postgres -drop database if exists db_between; \ No newline at end of file +drop schema db_between cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/builtin_funcs/bin.sql b/contrib/dolphin/sql/builtin_funcs/bin.sql index 99aa2957f9ae659fcfb66fe8e484166661c03509..b6cb0af9f9a5d5abf7e5b38790c9fcd935bfa2bd 100644 --- a/contrib/dolphin/sql/builtin_funcs/bin.sql +++ b/contrib/dolphin/sql/builtin_funcs/bin.sql @@ -1,6 +1,5 @@ -drop database if exists db_bin; -create database db_bin dbcompatibility 'B'; -\c db_bin +create schema db_bin; +set current_schema to 'db_bin'; select bin(1); select bin(0); select bin('2'); @@ -11,5 +10,5 @@ select bin(null); select bin(true); select bin(false); select bin('测试'); -\c postgres -drop database if exists db_bin; +drop schema db_bin cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/builtin_funcs/cast.sql b/contrib/dolphin/sql/builtin_funcs/cast.sql index f46343b5da47d85f05adcf4333151f11d4273390..dc462042c0caede15db1590a8f3feeae9a57c9f3 100644 --- a/contrib/dolphin/sql/builtin_funcs/cast.sql +++ b/contrib/dolphin/sql/builtin_funcs/cast.sql @@ -1,6 +1,5 @@ -drop database if exists db_cast; -create database db_cast dbcompatibility 'B'; -\c db_cast +create schema db_cast; +set current_schema to 'db_cast'; select cast('$2'::money as unsigned); select cast(cast('$2'::money as unsigned) as money); @@ -10,5 +9,5 @@ select cast('2022-11-10 18:03:20'::timestamp as unsigned); select cast(current_timestamp::timestamp as unsigned); select cast(cast('2022-11-10 18:03:20'::timestamp as unsigned) as timestamp); -\c postgres -drop database if exists db_cast; +drop schema db_cast cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/builtin_funcs/char.sql b/contrib/dolphin/sql/builtin_funcs/char.sql index c00e8a6c133eb42e87e763daf54dedede3985c73..8f92ffbe461f802a501c99badd8bcf5b2e0a6214 100644 --- a/contrib/dolphin/sql/builtin_funcs/char.sql +++ b/contrib/dolphin/sql/builtin_funcs/char.sql @@ -1,6 +1,5 @@ -drop database if exists db_char; -create database db_char dbcompatibility 'B'; -\c db_char +create schema db_char; +set current_schema to 'db_char'; select char(67,66,67); select char('65','66','67'); select char('A','B','C'); @@ -22,5 +21,5 @@ select char('hiu78','-156nfjl',175.99,'测试'); select char('侧四',-156,55.99,'ceshi'); select char('hi测试u158','ceshi',135.99,146); select char('hiu158','测试',125.99,146); -\c postgres -drop database if exists db_char; +drop schema db_char cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/builtin_funcs/char_length.sql b/contrib/dolphin/sql/builtin_funcs/char_length.sql index 9d1b4b2dab63332b402abbdcee3770b05c40eb01..444beccc7f79e9c475a2abc5f730a4cbb2674dfb 100644 --- a/contrib/dolphin/sql/builtin_funcs/char_length.sql +++ b/contrib/dolphin/sql/builtin_funcs/char_length.sql @@ -1,6 +1,5 @@ -drop database if exists db_char_length; -create database db_char_length dbcompatibility 'B'; -\c db_char_length +create schema db_char_length; +set current_schema to 'db_char_length'; select char_length(1234); select char_length(1234.5); @@ -18,8 +17,8 @@ select char_length('测试'); select char_length('测试123'); select char_length(true); -\c postgres -drop database if exists db_char_length; +drop schema db_char_length cascade; +reset current_schema; drop database if exists db_char_length_gbk; create database db_char_length_gbk dbcompatibility 'B' encoding 'GBK' LC_CTYPE 'zh_CN.gbk' lc_collate 'zh_CN.gbk'; \c db_char_length_gbk diff --git a/contrib/dolphin/sql/builtin_funcs/character_length.sql b/contrib/dolphin/sql/builtin_funcs/character_length.sql index 67019aef78d97e79c0e5859fc1344623dd3afcae..9c142fb65e2df74757047acb65103e4dfc235f08 100644 --- a/contrib/dolphin/sql/builtin_funcs/character_length.sql +++ b/contrib/dolphin/sql/builtin_funcs/character_length.sql @@ -1,6 +1,5 @@ -drop database if exists db_character_length; -create database db_character_length dbcompatibility 'B'; -\c db_character_length +create schema db_character_length; +set current_schema to 'db_character_length'; select character_length(1234); select character_length(1234.5); @@ -18,8 +17,8 @@ select character_length('测试'); select character_length('测试123.45'); select character_length(true); -\c postgres -drop database if exists db_character_length; +drop schema db_character_length cascade; +reset current_schema; drop database if exists db_character_length_gbk; create database db_character_length_gbk dbcompatibility 'B' encoding 'GBK' LC_CTYPE 'zh_CN.gbk' lc_collate 'zh_CN.gbk'; \c db_character_length_gbk diff --git a/contrib/dolphin/sql/builtin_funcs/conv.sql b/contrib/dolphin/sql/builtin_funcs/conv.sql index e70fc6f62805ba1bda803689835fde9d925c7e1e..f8d09b2479a8f1e9a6324d09d7c68549c32f38af 100644 --- a/contrib/dolphin/sql/builtin_funcs/conv.sql +++ b/contrib/dolphin/sql/builtin_funcs/conv.sql @@ -1,6 +1,5 @@ -drop database if exists db_conv; -create database db_conv dbcompatibility 'B'; -\c db_conv +create schema db_conv; +set current_schema to 'db_conv'; select conv('a',16,2); select conv('6e',18,8); @@ -97,5 +96,5 @@ select conv(-9544646155975628532428411,10,-10); select conv(-9544646155975628532428411,-10,10); select conv(-9544646155975628532428411,-10,-10); -\c postgres -drop database if exists db_conv; +drop schema db_conv cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/builtin_funcs/convert.sql b/contrib/dolphin/sql/builtin_funcs/convert.sql index ffed2d6d9623cccb7f825824549aa3a60e836377..c899480bc6a8207ff610059e1e9eb2273d31af02 100644 --- a/contrib/dolphin/sql/builtin_funcs/convert.sql +++ b/contrib/dolphin/sql/builtin_funcs/convert.sql @@ -1,6 +1,5 @@ -drop database if exists db_convert; -create database db_convert dbcompatibility 'B'; -\c db_convert +create schema db_convert; +set current_schema to 'db_convert'; select convert(1 using 'utf8'); select convert('1' using 'utf8'); select convert('a' using 'utf8'); @@ -14,5 +13,5 @@ select convert('测试' using 'utf8'); select convert('测试' using utf8); select convert(11.1, decimal(10,3)); select convert(1 using decimal(10,3)); -\c postgres -drop database if exists db_convert; +drop schema db_convert cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/builtin_funcs/crc32.sql b/contrib/dolphin/sql/builtin_funcs/crc32.sql index 7edbac123c039fd812d4eac7dd59b921c05516c8..3f814c446aadc0aa2d90fa259ec60bf6c5899738 100644 --- a/contrib/dolphin/sql/builtin_funcs/crc32.sql +++ b/contrib/dolphin/sql/builtin_funcs/crc32.sql @@ -1,6 +1,5 @@ -drop database if exists db_crc32; -create database db_crc32 dbcompatibility 'B'; -\c db_crc32 +create schema db_crc32; +set current_schema to 'db_crc32'; select crc32('abc'); select crc32(''); @@ -8,5 +7,5 @@ select crc32(1); select crc32(10),crc32(-3.1415926),crc32(1.339E5),crc32('ab57'),crc32('HAF47'); -\c postgres -drop database if exists db_crc32; +drop schema db_crc32 cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/builtin_funcs/db_b_format.sql b/contrib/dolphin/sql/builtin_funcs/db_b_format.sql index 092ae8abfbf42207dc415171bd00f6148ff43987..8284272f04ac8e9488b364680b1082d56b8a861e 100644 --- a/contrib/dolphin/sql/builtin_funcs/db_b_format.sql +++ b/contrib/dolphin/sql/builtin_funcs/db_b_format.sql @@ -1,6 +1,5 @@ -drop database if exists db_db_b_format; -create database db_db_b_format dbcompatibility 'B'; -\c db_db_b_format +create schema db_db_b_format; +set current_schema to 'db_db_b_format'; -- test for b_compatibility_mode = false select format(1234.456, 2); select format(1234.456, 2, 'en_US'); @@ -137,8 +136,8 @@ select format('%s, %s', variadic array[true, false]::text[]); select format('%2$s, %1$s', variadic array['first', 'second']); select format('%2$s, %1$s', variadic array[1, 2]); -\c postgres -drop database db_db_b_format; +drop schema db_db_b_format cascade; +reset current_schema; -- test for A compatibility to ensure the original functionality is good. create database db_db_b_format dbcompatibility 'A'; diff --git a/contrib/dolphin/sql/builtin_funcs/db_b_hex.sql b/contrib/dolphin/sql/builtin_funcs/db_b_hex.sql index 42636b6b5fee856af44e386bd32309a55169ce86..94d41415bf8df807cf4934dc172411f17950edd9 100644 --- a/contrib/dolphin/sql/builtin_funcs/db_b_hex.sql +++ b/contrib/dolphin/sql/builtin_funcs/db_b_hex.sql @@ -1,6 +1,5 @@ -drop database if exists db_b_hex; -create database db_b_hex dbcompatibility 'B'; -\c db_b_hex +create schema db_b_hex; +set current_schema to 'db_b_hex'; select hex(int1(255)); select hex(int1(256)); @@ -56,5 +55,5 @@ create table bytea_to_hex_test(c1 bytea); insert into bytea_to_hex_test values (E'\\xDEADBEEF'); select hex(c1) from bytea_to_hex_test; -\c postgres -drop database if exists db_b_hex; +drop schema db_b_hex cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/builtin_funcs/db_b_if.sql b/contrib/dolphin/sql/builtin_funcs/db_b_if.sql index 278b90afb5b65f8c8baacf34edf5045a00bcc6be..f860e1ab8228cdaf95f0c3753dc1425a96810b48 100644 --- a/contrib/dolphin/sql/builtin_funcs/db_b_if.sql +++ b/contrib/dolphin/sql/builtin_funcs/db_b_if.sql @@ -1,6 +1,6 @@ -drop database if exists db_b_if; -create database db_b_if dbcompatibility 'B'; -\c db_b_if +create schema db_b_if; +set current_schema to 'db_b_if'; + select if(TRUE, 1, 2); select if(FALSE, 1, 2); @@ -77,5 +77,5 @@ select if (true, 1.1::float8, true) as a, if (false, 1.1::float8, true) as b; -- numeric to boolean select if (true, 2.2::numeric(10, 2), true) as a, if (false, 2.2::numeric(10, 2), true) as b; -\c postgres -drop database if exists db_b_if; \ No newline at end of file +drop schema db_b_if cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/builtin_funcs/elt.sql b/contrib/dolphin/sql/builtin_funcs/elt.sql index d424482fa3c545be9ea14d2b96176f620de3bca8..fcc3b5633676484105e8518dfaac0c857d04fc8f 100644 --- a/contrib/dolphin/sql/builtin_funcs/elt.sql +++ b/contrib/dolphin/sql/builtin_funcs/elt.sql @@ -1,6 +1,5 @@ -drop database if exists db_elt; -create database db_elt dbcompatibility 'B'; -\c db_elt +create schema db_elt; +set current_schema to 'db_elt'; select elt(1,1); select elt(-1,1); select elt(1.2,'a'); @@ -19,5 +18,5 @@ select elt('1',1); select elt('1',1.2); select elt(1,'a','b'); select elt(1,'a',2); -\c postgres -drop database if exists db_elt; +drop schema db_elt cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/builtin_funcs/field.sql b/contrib/dolphin/sql/builtin_funcs/field.sql index 323a952892493c91a0c9b5aada4b3d33db08905b..8496445c1447c979535db4a01a64a89fafac69d5 100644 --- a/contrib/dolphin/sql/builtin_funcs/field.sql +++ b/contrib/dolphin/sql/builtin_funcs/field.sql @@ -1,6 +1,5 @@ -drop database if exists db_field; -create database db_field dbcompatibility 'B'; -\c db_field +create schema db_field; +set current_schema to 'db_field'; select field(4,1,2,3,4); select field(2,1.1,2.1,3.1); @@ -18,5 +17,5 @@ select field('sdhfgs','dhgf', '2'); select field('sdhfgs',null,1,'dhgf', '2'); select field('测试',null,1,'dhgf', '2'); select field(' ','@',null,' ','',' '); -\c postgres -drop database if exists db_field; +drop schema db_field cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/builtin_funcs/find_in_set.sql b/contrib/dolphin/sql/builtin_funcs/find_in_set.sql index 24e647ed6dffc5169cef6f7d2bed5e8e0b517a8c..0be1793e12710235fb2b2885cb4be85e1c8d1789 100644 --- a/contrib/dolphin/sql/builtin_funcs/find_in_set.sql +++ b/contrib/dolphin/sql/builtin_funcs/find_in_set.sql @@ -1,6 +1,5 @@ -drop database if exists db_find_in_set; -create database db_find_in_set dbcompatibility 'B'; -\c db_find_in_set +create schema db_find_in_set; +set current_schema to 'db_find_in_set'; select find_in_set(1,'a,1,c'); select find_in_set(1,'true,1,c'); select find_in_set(1.2,'a,1.2,c'); @@ -16,5 +15,5 @@ select find_in_set('1','1,1.2,c,qwee,1212,1.1,12,qw'); select find_in_set(1,'1,1.2,c,qwee,1212,1.1,12,qw'); select find_in_set(1,'1.1,1.2,c,qwee,1212,1.1,12,1'); select find_in_set(1.1,'a,1.2,c,qwee,1212,1.1'); -\c postgres -drop database if exists db_find_in_set; +drop schema db_find_in_set cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/builtin_funcs/make_set.sql b/contrib/dolphin/sql/builtin_funcs/make_set.sql index c16ff7cf251ebd4c92e218857c506b86b6f04fc3..498dee6725375a9b114bf3cccc4efcc6615a4ea4 100644 --- a/contrib/dolphin/sql/builtin_funcs/make_set.sql +++ b/contrib/dolphin/sql/builtin_funcs/make_set.sql @@ -1,6 +1,5 @@ -drop database if exists make_set; -create database make_set dbcompatibility 'b'; -\c make_set +create schema make_set; +set current_schema to 'make_set'; set dolphin.sql_mode = ''; select make_set(3, 'a', 'b', 'c'); select make_set(2,'a','b','c','d','e'); @@ -35,6 +34,6 @@ select make_set(-4294967296*1024*1024*1024,'1','2','3','4','5','6','7','8','9',' select make_set(3, true, false); select make_set(3,01/02/03, false, true, false); -\c postgres -drop database make_set +drop schema make_set cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/builtin_funcs/not_between.sql b/contrib/dolphin/sql/builtin_funcs/not_between.sql index 686b79c3645aed2461e022ad14cc718d0d5f7b1d..8c73ad3bbc3dae38e03fb935a82508e511e8bbab 100644 --- a/contrib/dolphin/sql/builtin_funcs/not_between.sql +++ b/contrib/dolphin/sql/builtin_funcs/not_between.sql @@ -1,6 +1,5 @@ -drop database if exists db_not_between; -create database db_not_between dbcompatibility 'B'; -\c db_not_between +create schema db_not_between; +set current_schema to 'db_not_between'; select 2 not between 2 and 23; select 2.1 not between 2.1 and 12.3; select true not between false and true; @@ -30,5 +29,5 @@ select b'1111111111111111111111111' not between 0 and 999999999; select 0 not between '测' and '15.2'; select 1 not between '测1' and '1'; select 1 not between '1测' and '1'; -\c postgres -drop database if exists db_not_between; \ No newline at end of file +drop schema db_not_between cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/builtin_funcs/soundex.sql b/contrib/dolphin/sql/builtin_funcs/soundex.sql index 84995e7baaeb132ac445a719f85fe0cb1e85a017..b1dfee3f6bb909b50ba53f381920e17a7043b758 100644 --- a/contrib/dolphin/sql/builtin_funcs/soundex.sql +++ b/contrib/dolphin/sql/builtin_funcs/soundex.sql @@ -1,6 +1,5 @@ -drop database if exists db_soundex; -create database db_soundex dbcompatibility 'B'; -\c db_soundex +create schema db_soundex; +set current_schema to 'db_soundex'; select soundex('abc'); select soundex(10); select soundex('afdsbfdlsafs'); @@ -42,5 +41,5 @@ select soundex('ш'); select soundex('я такая шчасліваяwjdkadskdjk'); select soundex('测T测h测试o测masёння я такая шчасліваяhello'); -\c postgres -drop database if exists db_soundex; +drop schema db_soundex cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/builtin_funcs/space.sql b/contrib/dolphin/sql/builtin_funcs/space.sql index 129339df7dc247d894166fe970d7b12caa4625f1..c025fc16bca593454fb551fa83e82931a50146dd 100644 --- a/contrib/dolphin/sql/builtin_funcs/space.sql +++ b/contrib/dolphin/sql/builtin_funcs/space.sql @@ -1,6 +1,5 @@ -drop database if exists db_space; -create database db_space dbcompatibility 'B'; -\c db_space +create schema db_space; +set current_schema to 'db_space'; select space('a'); select space(10); select space(-1); @@ -12,5 +11,5 @@ select space(true); select space(1,2,3); select space(2147483647111); select space(b'111'); -\c postgres -drop database if exists db_space; +drop schema db_space cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/connection_id.sql b/contrib/dolphin/sql/connection_id.sql index 704b05caa094164d269c6f13ad1540c248c10260..ebcac8e8f89944a4bc3e8f0152ebab44e0e8041b 100644 --- a/contrib/dolphin/sql/connection_id.sql +++ b/contrib/dolphin/sql/connection_id.sql @@ -1,9 +1,8 @@ -drop database if exists test_connection_id; -create database test_connection_id dbcompatibility 'b'; -\c test_connection_id +create schema test_connection_id; +set current_schema to 'test_connection_id'; -- 测试返回连接的ID SELECT CONNECTION_ID(); -\c postgres -drop database test_connection_id; \ No newline at end of file +drop schema test_connection_id cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/conv_cast_test.sql b/contrib/dolphin/sql/conv_cast_test.sql index 1270eed678f29671fdd8100123f10715e2b1c911..578ae62f240f3cdb73e9b29130f012f944182a30 100755 --- a/contrib/dolphin/sql/conv_cast_test.sql +++ b/contrib/dolphin/sql/conv_cast_test.sql @@ -1,8 +1,5 @@ --- b compatibility case -drop database if exists conv_cast_test; --- create database conv_cast_test dbcompatibility 'b'; -create database conv_cast_test with DBCOMPATIBILITY = 'B'; -\c conv_cast_test +create schema conv_cast_test; +set current_schema to 'conv_cast_test'; select conv(-211111111111111111111111111111111111111111111111111111111177777,10,8); select conv(-366666666666666666666666666666666666666, 10, 8); @@ -73,5 +70,5 @@ select cast(b'111111111111111111111111111111111111111111111111111111111111111' a select cast(b'11111111111111111111111111111111111111111111111111111111111111111' as unsigned); select cast(b'11111111111111111111111111111111111111111111111111111111111111111' as signed); -\c postgres -drop database conv_cast_test; +drop schema conv_cast_test cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/create_function_test/call_function.sql b/contrib/dolphin/sql/create_function_test/call_function.sql index 8b2a75336043030d12979ebd9e1dfe8fada2ef13..4deeecf546ed2526d1687a7a7b742a3cd2e00a93 100755 --- a/contrib/dolphin/sql/create_function_test/call_function.sql +++ b/contrib/dolphin/sql/create_function_test/call_function.sql @@ -1,6 +1,6 @@ -drop database if exists db_func_call1; -create database db_func_call1 dbcompatibility 'B'; -\c db_func_call1 +create schema db_func_call1; +set current_schema to 'db_func_call1'; + CREATE FUNCTION f1 (s CHAR(20)) RETURNS int CONTAINS SQL AS $$ select 1 $$ ; @@ -59,6 +59,6 @@ END; $$ LANGUAGE plpgsql; call f_3(); -\c postgres -drop database if exists db_func_call1; +drop schema db_func_call1 cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/create_function_test/commentsharp.sql b/contrib/dolphin/sql/create_function_test/commentsharp.sql index 328c5dc3c5dbdc656dec25d97c08f757cd9eddab..656796a92babb612ab65cd1789b7b2e77802a17e 100644 --- a/contrib/dolphin/sql/create_function_test/commentsharp.sql +++ b/contrib/dolphin/sql/create_function_test/commentsharp.sql @@ -1,6 +1,5 @@ -drop database if exists db_comment_sharp; -create database db_comment_sharp dbcompatibility 'B'; -\c db_comment_sharp +create schema db_comment_sharp; +set current_schema to 'db_comment_sharp'; create table t1(a int,b int); @@ -155,6 +154,5 @@ drop table t_test2; drop table t_test3; -\c postgres - -drop database if exists db_comment_sharp; +drop schema db_comment_sharp cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/create_function_test/deterministic.sql b/contrib/dolphin/sql/create_function_test/deterministic.sql index 21dd80c3c2b02f8196abbfedde69f12e185de188..5f361198136cb8c03a93db074938ca42a9988ae6 100755 --- a/contrib/dolphin/sql/create_function_test/deterministic.sql +++ b/contrib/dolphin/sql/create_function_test/deterministic.sql @@ -1,6 +1,5 @@ -drop database if exists db_func_1; -create database db_func_1 dbcompatibility 'B'; -\c db_func_1 +create schema db_func_1; +set current_schema to 'db_func_1'; CREATE FUNCTION f1 (s CHAR(20)) RETURNS int NOT DETERMINISTIC AS $$ select 1 $$ ; @@ -22,6 +21,6 @@ call f3(3); call f4(4); -\c postgres -drop database if exists db_func_1; +drop schema db_func_1 cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/create_function_test/language_sql.sql b/contrib/dolphin/sql/create_function_test/language_sql.sql index a6d8ae9490431baed3a06d0a24e40f65c71bd41c..fac8993abf3c9cb83a2156b7449d8691f7ae18b0 100755 --- a/contrib/dolphin/sql/create_function_test/language_sql.sql +++ b/contrib/dolphin/sql/create_function_test/language_sql.sql @@ -1,6 +1,5 @@ -drop database if exists db_func_2; -create database db_func_2 dbcompatibility 'B'; -\c db_func_2 +create schema db_func_2; +set current_schema to 'db_func_2'; CREATE FUNCTION f1 (s CHAR(20)) RETURNS int AS $$ select 1 $$ ; @@ -144,5 +143,5 @@ call f4(4); -\c postgres -drop database if exists db_func_2; +drop schema db_func_2 cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/create_function_test/m_type_create_proc.sql b/contrib/dolphin/sql/create_function_test/m_type_create_proc.sql index 6700eed4172896fec16a667cfb9fea653661c907..45192a0bb5f9af68b12cfce6ed9aa2e9ddc67829 100644 --- a/contrib/dolphin/sql/create_function_test/m_type_create_proc.sql +++ b/contrib/dolphin/sql/create_function_test/m_type_create_proc.sql @@ -1,6 +1,5 @@ -drop database if exists m_create_proc_type; -create database m_create_proc_type dbcompatibility 'B'; -\c m_create_proc_type +create schema m_create_proc_type; +set current_schema to 'm_create_proc_type'; --test create procedure delimiter // @@ -275,7 +274,6 @@ call doempty(); -\c postgres - -drop database m_create_proc_type; +drop schema m_create_proc_type cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/create_function_test/single_line_proc.sql b/contrib/dolphin/sql/create_function_test/single_line_proc.sql index 3e81d412abc680894c65ce69322303a77eda15c7..9f54cdda16a3a7b0888fa7152d42952355e313d4 100644 --- a/contrib/dolphin/sql/create_function_test/single_line_proc.sql +++ b/contrib/dolphin/sql/create_function_test/single_line_proc.sql @@ -1,7 +1,5 @@ - -drop database if exists db_func_call_2; -create database db_func_call_2 dbcompatibility 'B'; -\c db_func_call_2 +create schema db_func_call_2; +set current_schema to 'db_func_call_2'; create table t1 (a int); @@ -151,7 +149,6 @@ create procedure proc33 () select z from tz; -\c regress - -drop database db_func_call_2; +drop schema db_func_call_2 cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/create_function_test/sql_options.sql b/contrib/dolphin/sql/create_function_test/sql_options.sql index d19ccb1a36f05a42822661ec4347adfba238841f..f35d336129f859546cf5584dbb50d8062e365382 100755 --- a/contrib/dolphin/sql/create_function_test/sql_options.sql +++ b/contrib/dolphin/sql/create_function_test/sql_options.sql @@ -1,7 +1,5 @@ - -drop database if exists db_func_3; -create database db_func_3 dbcompatibility 'B'; -\c db_func_3 +create schema db_func_3; +set current_schema to 'db_func_3'; CREATE FUNCTION f1 (s CHAR(20)) RETURNS int CONTAINS SQL AS $$ select 1 $$ ; @@ -76,6 +74,6 @@ call f3(3); call f4(4); -\c postgres -drop database if exists db_func_3; +drop schema db_func_3 cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/create_function_test/sql_security.sql b/contrib/dolphin/sql/create_function_test/sql_security.sql index a57d52c779463fecd36feaecde3bea52711e9ef1..aa2d97100d619c0d41bbec1496b2dddc80ca7ea3 100755 --- a/contrib/dolphin/sql/create_function_test/sql_security.sql +++ b/contrib/dolphin/sql/create_function_test/sql_security.sql @@ -1,6 +1,5 @@ -drop database if exists db_func_4; -create database db_func_4 dbcompatibility 'B'; -\c db_func_4 +create schema db_func_4; +set current_schema to 'db_func_4'; CREATE FUNCTION f1 (s CHAR(20)) RETURNS int SQL SECURITY DEFINER AS $$ select 1 $$ ; @@ -22,5 +21,5 @@ call f3(3); call f4(4); -\c postgres -drop database if exists db_func_4; +drop schema db_func_4 cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/db_b_date_time_functions.sql b/contrib/dolphin/sql/db_b_date_time_functions.sql index f7de7ea4169b6f5b2d74174128d7b5eb175dc5c5..983e9e21ede41ddee9a971e66f60de659f3bc8a2 100644 --- a/contrib/dolphin/sql/db_b_date_time_functions.sql +++ b/contrib/dolphin/sql/db_b_date_time_functions.sql @@ -1,7 +1,5 @@ ----- b compatibility case -drop database if exists b_datetime_func_test; -create database b_datetime_func_test dbcompatibility 'b'; -\c b_datetime_func_test +create schema b_datetime_func_test1; +set current_schema to 'b_datetime_func_test1'; set datestyle = 'ISO,ymd'; set time zone "Asia/Shanghai"; -- test part-one function @@ -155,5 +153,5 @@ insert into test_datetime values(sysdate(0)); insert into test_datetime values(sysdate(6)); select * from test_datetime; drop table test_datetime; -\c contrib_regression -DROP DATABASE b_datetime_func_test; \ No newline at end of file +drop schema b_datetime_func_test1 cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/db_b_date_time_functions2.sql b/contrib/dolphin/sql/db_b_date_time_functions2.sql index c6e398298dce69442b3d21116f7b79118088e4cb..88100d7ea234bad93c208298aab2987121096caf 100644 --- a/contrib/dolphin/sql/db_b_date_time_functions2.sql +++ b/contrib/dolphin/sql/db_b_date_time_functions2.sql @@ -1,7 +1,5 @@ ----- b compatibility case -drop database if exists b_datetime_func_test; -create database b_datetime_func_test dbcompatibility 'b'; -\c b_datetime_func_test +create schema b_datetime_func_test2; +set current_schema to 'b_datetime_func_test2'; set datestyle = 'ISO,ymd'; set time zone "Asia/Shanghai"; create table test(funcname text, result text); @@ -612,5 +610,5 @@ insert into test values('yearweek(''0000-12-31 22:59:59.9999995'', 0)', yearweek -- 结果 select * from test order by funcname; drop table test; -\c contrib_regression -DROP DATABASE b_datetime_func_test; \ No newline at end of file +drop schema b_datetime_func_test2 cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/db_b_date_time_functions3.sql b/contrib/dolphin/sql/db_b_date_time_functions3.sql index 858b88750a72468b85f6141c38cacb986ed55420..fe8113231c625d34634296b83ea934d9faf5b33c 100644 --- a/contrib/dolphin/sql/db_b_date_time_functions3.sql +++ b/contrib/dolphin/sql/db_b_date_time_functions3.sql @@ -1,7 +1,5 @@ ----- b compatibility case -drop database if exists b_datetime_func_test; -create database b_datetime_func_test dbcompatibility 'b'; -\c b_datetime_func_test +create schema b_datetime_func_test3; +set current_schema to 'b_datetime_func_test3'; set datestyle = 'ISO,ymd'; set time zone "Asia/Shanghai"; create table test(funcname text, result text); @@ -478,5 +476,5 @@ insert into test values('addtime(''10000-1-1 00:00:00'', ''00:00:00'')', addtime select * from test order by funcname; drop table test; -\c contrib_regression -DROP DATABASE b_datetime_func_test; \ No newline at end of file +drop schema b_datetime_func_test3 cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/db_b_date_time_functions4.sql b/contrib/dolphin/sql/db_b_date_time_functions4.sql index 8124955fdd8700049357144b12588aa2d624c44e..cb802756e0fa9a4e419b277887e2042f484f9fa7 100644 --- a/contrib/dolphin/sql/db_b_date_time_functions4.sql +++ b/contrib/dolphin/sql/db_b_date_time_functions4.sql @@ -1,7 +1,5 @@ ----- b compatibility case -drop database if exists b_datetime_func_test; -create database b_datetime_func_test dbcompatibility 'b'; -\c b_datetime_func_test +create schema b_datetime_func_test4; +set current_schema to 'b_datetime_func_test4'; set datestyle = 'ISO,ymd'; set time zone "Asia/Shanghai"; create table test(funcname text, result text); @@ -376,5 +374,5 @@ insert into test values('str_to_date(''200454 Monday'', ''%X%V %W'')', str_to_da -- 结果 select * from test order by funcname; drop table test; -\c contrib_regression -DROP DATABASE b_datetime_func_test; \ No newline at end of file +drop schema b_datetime_func_test4 cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/db_b_new_gram_test.sql b/contrib/dolphin/sql/db_b_new_gram_test.sql index 65f1d728f4a34282b54e8b831a349e32b52bf68c..c2dcedef4efff1022b23fae9b84cea7a78b4c492 100644 --- a/contrib/dolphin/sql/db_b_new_gram_test.sql +++ b/contrib/dolphin/sql/db_b_new_gram_test.sql @@ -1,6 +1,5 @@ -drop database if exists db_b_new_gram_test; -create database db_b_new_gram_test dbcompatibility 'B'; -\c db_b_new_gram_test +create schema db_b_new_gram_test; +set current_schema to 'db_b_new_gram_test'; -- CREATE TABLE engine test CREATE TABLE test_engine_1 (a int) engine = InnoDB; @@ -389,9 +388,8 @@ SELECT COUNT(*) FROM t_ctas_new; DROP TABLE t_ctas_new; DROP TABLE t_ctas; -drop database if exists test_m; -create database test_m dbcompatibility 'b'; -\c test_m +create schema test_m; +set current_schema to 'test_m'; create table test_unique( f1 int, f2 int, @@ -470,6 +468,6 @@ select * from ignore_range_range partition (p_201901, p_201905_a); select * from ignore_range_range partition (p_201901, p_201905_b); drop table ignore_range_range; -\c postgres -drop database if exists test_m; -drop database db_b_new_gram_test; +drop schema test_m cascade; +drop schema db_b_new_gram_test cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/db_b_parser1.sql b/contrib/dolphin/sql/db_b_parser1.sql index c5244c9feb8b17cb54c0711c8b930863aa581238..2787daba80ea03120b68b3dafca24271591d2ab3 100644 --- a/contrib/dolphin/sql/db_b_parser1.sql +++ b/contrib/dolphin/sql/db_b_parser1.sql @@ -1,6 +1,5 @@ -drop database if exists db_b_parser1; -create database db_b_parser1 dbcompatibility 'b'; -\c db_b_parser1 +create schema db_b_parser1; +set current_schema to 'db_b_parser1'; select 'bbbbb' regexp '^([bc])\1*$' as t, 'bbbbb' not regexp '^([bc])\1*$' as t2, 'bbbbb' rlike '^([bc])\1*$' as t; select 'ccc' regexp '^([bc])\1*$' as t, 'ccc' not regexp '^([bc])\1*$' as t2, 'ccc' rlike '^([bc])\1*$' as t; select 'xxx' regexp '^([bc])\1*$' as f, 'xxx' not regexp '^([bc])\1*$' as f2, 'xxx' rlike '^([bc])\1*$' as f; @@ -64,5 +63,5 @@ select '-12.3abc' rlike true; select '-12.3abc' rlike false; select '-12.3abc' rlike 'null'; -\c postgres -drop database if exists db_b_parser1; \ No newline at end of file +drop schema db_b_parser1 cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/db_b_parser2.sql b/contrib/dolphin/sql/db_b_parser2.sql index e1727aec244be075aae326471aab97e178fcda34..78d462fd043cefd62a33d5a8597938b76469405e 100644 --- a/contrib/dolphin/sql/db_b_parser2.sql +++ b/contrib/dolphin/sql/db_b_parser2.sql @@ -1,6 +1,5 @@ -drop database if exists db_b_parser2; -create database db_b_parser2 dbcompatibility 'b'; -\c db_b_parser2 +create schema db_b_parser2; +set current_schema to 'db_b_parser2'; --验证DAYOFMONTH() DAYOFWEEK() DAYOFYEAR() HOUR() MICROSECOND() MINUTE() QUARTER() SECOND() WEEKDAY() WEEKOFYEAR() YEAR() select DAYOFMONTH(datetime '2021-11-4 16:30:44.341191'); @@ -159,5 +158,5 @@ insert into fchar_test values('零一二三四五六七八九十'); select fchar,length(fchar) from fchar_test order by 1,2; drop table fchar_test; -\c postgres -drop database if exists db_b_parser2; \ No newline at end of file +drop schema db_b_parser2 cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/db_b_parser3.sql b/contrib/dolphin/sql/db_b_parser3.sql index 3f2bcbcc6fcd1a9fbb79cb70b53debcd487041c6..6788c244480939c7b01c8ddec546ea8efecc70b8 100644 --- a/contrib/dolphin/sql/db_b_parser3.sql +++ b/contrib/dolphin/sql/db_b_parser3.sql @@ -1,6 +1,5 @@ -drop database if exists db_b_parser3; -create database db_b_parser3 dbcompatibility 'b'; -\c db_b_parser3 +create schema db_b_parser3; +set current_schema to 'db_b_parser3'; --测试点一:验证lcase函数 select lcase('ABc'), lcase('哈哈'), lcase('123456'),lcase('哈市&%%¥#'),lcase(null); @@ -118,5 +117,5 @@ select acos(11); select acos(1.000001); select acos(-1.000001); -\c postgres -drop database if exists db_b_parser3; \ No newline at end of file +drop schema db_b_parser3 cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/db_b_parser4.sql b/contrib/dolphin/sql/db_b_parser4.sql index 8bdd7a6c930b5d7f21d2846a41f7dc8df0b2e558..ded92180b7ffc604638497d88e4e89473146a46c 100644 --- a/contrib/dolphin/sql/db_b_parser4.sql +++ b/contrib/dolphin/sql/db_b_parser4.sql @@ -1,6 +1,5 @@ -drop database if exists db_b_parser4; -create database db_b_parser4 dbcompatibility 'b'; -\c db_b_parser4 +create schema db_b_parser4; +set current_schema to 'db_b_parser4'; --验证text类型 drop table if exists tb_db_b_parser_0001; create table tb_db_b_parser_0001(a text(10),b tinytext,c mediumtext,d longtext); @@ -30,5 +29,5 @@ drop table if exists tb_default_double; drop table if exists tb_real_float; -\c postgres -drop database if exists db_b_parser4; \ No newline at end of file +drop schema db_b_parser4 cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/db_b_plpgsql_test.sql b/contrib/dolphin/sql/db_b_plpgsql_test.sql index 209788734ff222fd3d867469991dac771b89c97b..ec0a980abc2d353963f6dbeb8169a38da33b5907 100644 --- a/contrib/dolphin/sql/db_b_plpgsql_test.sql +++ b/contrib/dolphin/sql/db_b_plpgsql_test.sql @@ -1,6 +1,5 @@ -drop database if exists db_b_plpgsql_test; -create database db_b_plpgsql_test dbcompatibility 'b'; -\c db_b_plpgsql_test +create schema db_b_plpgsql_test; +set current_schema to 'db_b_plpgsql_test'; create table tb_b_grammar_0038(a text(10)) engine = 表1; @@ -44,5 +43,5 @@ end; SELECT * from proc_01(); SELECT * from tb_b_grammar_0038; -\c postgres -drop database if exists db_b_plpgsql_test; \ No newline at end of file +drop schema db_b_plpgsql_test cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/db_b_rename_user_test.sql b/contrib/dolphin/sql/db_b_rename_user_test.sql index 1334cbba63767718dd8cd028dddca7a5fd8618aa..82e532ff8dbc9ba9b594207918fe2ad9e045c712 100644 --- a/contrib/dolphin/sql/db_b_rename_user_test.sql +++ b/contrib/dolphin/sql/db_b_rename_user_test.sql @@ -1,6 +1,5 @@ -drop database if exists db_b_rename_user_test; -create database db_b_rename_user_test dbcompatibility 'b'; -\c db_b_rename_user_test +create schema db_b_rename_user_test; +set current_schema to 'db_b_rename_user_test'; CREATE USER user1 WITH ENCRYPTED PASSWORD 'user1@1234'; CREATE USER user2 WITH ENCRYPTED PASSWORD 'user2@1234'; @@ -35,5 +34,5 @@ drop user user4; drop user user5; drop user user6; -\c postgres -drop database if exists db_b_rename_user_test; \ No newline at end of file +drop schema db_b_rename_user_test cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/default_guc.sql b/contrib/dolphin/sql/default_guc.sql index 1fa1aae467a39140c3da7aaee7a1bb6097ad90e6..9213fb5d3af50ee56ec07ada45134973b3dc1745 100644 --- a/contrib/dolphin/sql/default_guc.sql +++ b/contrib/dolphin/sql/default_guc.sql @@ -1,6 +1,5 @@ -drop database if exists default_guc; -create database default_guc dbcompatibility 'b'; -\c default_guc +create schema default_guc; +set current_schema to 'default_guc'; show behavior_compat_options; select 0.123; @@ -13,5 +12,5 @@ select 0.123; select md5(0.123); select md5('0.123'); -\c postgres -drop database if exists default_guc; \ No newline at end of file +drop schema default_guc cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/describe.sql b/contrib/dolphin/sql/describe.sql index 3cfed17de06c5f0473b2eca002478a36c0f5df86..e1234a4be09e61fc37317cb7cbf297d933320877 100644 --- a/contrib/dolphin/sql/describe.sql +++ b/contrib/dolphin/sql/describe.sql @@ -1,6 +1,5 @@ -drop database if exists db_describe; -create database db_describe dbcompatibility 'b'; -\c db_describe +create schema db_describe; +set current_schema to 'db_describe'; CREATE TABLE test2 ( id int PRIMARY KEY @@ -51,9 +50,9 @@ primary key (a) ); desc test; describe test; -desc public.test; +desc db_describe.test; desc sc.test; desc public.test4; desc sc.test4; -\c postgres -drop database if exists db_describe; \ No newline at end of file +drop schema db_describe cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/empty_value_lists.sql b/contrib/dolphin/sql/empty_value_lists.sql index a7fb2799d371fd6d9e3c3a54791535baad490b94..4f5dd8d400824d58858e622e5b604a7fc122a5fe 100644 --- a/contrib/dolphin/sql/empty_value_lists.sql +++ b/contrib/dolphin/sql/empty_value_lists.sql @@ -1,8 +1,5 @@ --- b compatibility case -drop database if exists empty_value_lists; -create database empty_value_lists dbcompatibility 'b'; - -\c empty_value_lists +create schema empty_value_lists; +set current_schema to 'empty_value_lists'; create table test1(num int); create table test2(num int default 3); @@ -191,5 +188,5 @@ select * from m3; insert into m4 values(),(); select * from m4; -\c postgres -drop database if exists empty_value_lists; +drop schema empty_value_lists cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/empty_value_support_value.sql b/contrib/dolphin/sql/empty_value_support_value.sql index 3317e5899808afccd9decfa2b365c5a9bb50163a..d0e2fb9150fce2ff5127f1b963fa11e89a265af1 100644 --- a/contrib/dolphin/sql/empty_value_support_value.sql +++ b/contrib/dolphin/sql/empty_value_support_value.sql @@ -1,7 +1,5 @@ -drop database if exists empty_value_support_value; -create database empty_value_support_value dbcompatibility 'b'; - -\c empty_value_support_value +create schema empty_value_support_value; +set current_schema to 'empty_value_support_value'; create table test1(num int not null); insert into test1 value(); insert into test1 value(),(); @@ -11,5 +9,5 @@ select * from test1; insert into test1 value(),(); select * from test1; -\c postgres -drop database if exists empty_value_support_value; \ No newline at end of file +drop schema empty_value_support_value cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/explain_desc.sql b/contrib/dolphin/sql/explain_desc.sql index e6a6761343b332439039e133465a49648631ef0b..e1352759047b2728eb0f8a45f2e6a1c2321019d8 100644 --- a/contrib/dolphin/sql/explain_desc.sql +++ b/contrib/dolphin/sql/explain_desc.sql @@ -1,12 +1,12 @@ -create database db_explain_desc with dbcompatibility 'B'; -\c db_explain_desc +create schema db_explain_desc; +set current_schema to 'db_explain_desc'; create table ed_t(c1 int, c2 varchar(100), c3 int default 10); insert into ed_t values(generate_series(1, 10), 'hello', 100); -- 1.use explain to query table's info explain ed_t; -explain public.ed_t; +explain db_explain_desc.ed_t; -- 2.use desc to query plan info desc select c1, c2, c3 from ed_t; @@ -44,6 +44,5 @@ explain format='TraDitional' delete from ed_t where c1 < 5; drop table ed_t; -\c postgres - -drop database db_explain_desc; +drop schema db_explain_desc cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/export_set.sql b/contrib/dolphin/sql/export_set.sql index 5031a2af805b02b89d5f1d56ff5c072ef4f81264..01706a27bc8eb41a5e1b766cf3f6ed5d50fff785 100644 --- a/contrib/dolphin/sql/export_set.sql +++ b/contrib/dolphin/sql/export_set.sql @@ -1,6 +1,5 @@ -drop database if exists export_set; -create database export_set dbcompatibility 'b'; -\c export_set +create schema export_set; +set current_schema to 'export_set'; -- 测试缺省值 SELECT EXPORT_SET(5,'Y','N',',',5); @@ -33,5 +32,5 @@ SELECT EXPORT_SET(5,'YYYYYYYYYYYYYYYY','N',',',5); SELECT EXPORT_SET(5,'Y','NNNNNNNNNNNNNNN',',',5); SELECT EXPORT_SET(5,'Y','N',',,,,,,,,,,,,',5); -\c postgres -drop database if exists export_set; \ No newline at end of file +drop schema export_set cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/float_numeric_test/db_b_float_round_test.sql b/contrib/dolphin/sql/float_numeric_test/db_b_float_round_test.sql index 35579f8c0929b01acb111b8082cc442b2a8d541a..a1357df2e3a0ce74e6a6b6ec92e51b31427a31dc 100644 --- a/contrib/dolphin/sql/float_numeric_test/db_b_float_round_test.sql +++ b/contrib/dolphin/sql/float_numeric_test/db_b_float_round_test.sql @@ -1,4 +1,7 @@ -- the test case of A format +drop database if exists test; +create database test dbcompatibility 'A'; +\c test SELECT 2.5::float4::int1; SELECT 2.5::float8::int1; SELECT 2.5::int1; @@ -28,10 +31,12 @@ SELECT 3.5::int8; SELECT dround(2.5); SELECT dround(3.5); +\c contrib_regression +drop database test; + -- the test case of dolphin plugin -drop database if exists test; -create database test dbcompatibility 'B'; -\c test +create schema test; +set current_schema to 'test'; SELECT 2.5::float4::int1; SELECT 2.5::float8::int1; @@ -62,5 +67,5 @@ SELECT 3.5::int8; SELECT dround(2.5); SELECT dround(3.5); -\c postgres -drop database test; +drop schema test cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/float_numeric_test/db_b_log_test.sql b/contrib/dolphin/sql/float_numeric_test/db_b_log_test.sql index 40b3035379f7724a710ef5ee0fc59fe62eec0dbd..14fab25c5f5a85fac6545c9e74b51d62207ebcfc 100644 --- a/contrib/dolphin/sql/float_numeric_test/db_b_log_test.sql +++ b/contrib/dolphin/sql/float_numeric_test/db_b_log_test.sql @@ -1,8 +1,15 @@ -SELECT LOG(10); drop database if exists db_b_log_test; -create database db_b_log_test dbcompatibility 'B'; +create database db_b_log_test dbcompatibility 'A'; \c db_b_log_test +SELECT LOG(10); + +\c contrib_regression +drop database db_b_log_test; + +create schema db_b_log_test; +set current_schema to 'db_b_log_test'; + SELECT LOG(10); SELECT LOG10(100); SELECT LOG2(64); @@ -47,5 +54,5 @@ select log(b'111'::int); select log2(b'111'::int); select log10(b'111'::int); -\c postgres -drop database db_b_log_test; +drop schema db_b_log_test cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/float_numeric_test/db_b_sqrt_test.sql b/contrib/dolphin/sql/float_numeric_test/db_b_sqrt_test.sql index a78b7ec7e0151a792ed5c6b9d01561b461406239..444604ae4ef42cb08ffc0a94a117693fef34dba3 100644 --- a/contrib/dolphin/sql/float_numeric_test/db_b_sqrt_test.sql +++ b/contrib/dolphin/sql/float_numeric_test/db_b_sqrt_test.sql @@ -1,9 +1,15 @@ +drop database if exists db_b_sqrt_test; +create database db_b_sqrt_test dbcompatibility 'A'; +\c db_b_sqrt_test + SELECT SQRT(64); SELECT SQRT(-64); -drop database if exists db_b_sqrt_test; -create database db_b_sqrt_test dbcompatibility 'B'; -\c db_b_sqrt_test +\c contrib_regression +drop database db_b_sqrt_test; + +create schema db_b_sqrt_test; +set current_schema to 'db_b_sqrt_test'; SELECT SQRT(64); SELECT SQRT(-64); @@ -16,5 +22,5 @@ select sqrt(b'111'); select sqrt(7); select sqrt(b'111'::int); -\c postgres -drop database db_b_sqrt_test; +drop schema db_b_sqrt_test cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/flush.sql b/contrib/dolphin/sql/flush.sql index df630c45390c7884fc23839bd29bc6f0aa11ae6c..ee5a8023eee09a8e6dfe8ecaabb787168ce53fdb 100644 --- a/contrib/dolphin/sql/flush.sql +++ b/contrib/dolphin/sql/flush.sql @@ -1,6 +1,5 @@ -drop database if exists db_flush; -create database db_flush dbcompatibility 'b'; -\c db_flush +create schema db_flush; +set current_schema to 'db_flush'; FLUSH BINARY LOGS; -\c postgres -drop database if exists db_flush; +drop schema db_flush cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/get_b_database.sql b/contrib/dolphin/sql/get_b_database.sql index d40ba04d425738a0c2e66c955e8430f8d4387689..6cc8f4cb485118043cb401bba0614a08eec6f9b5 100644 --- a/contrib/dolphin/sql/get_b_database.sql +++ b/contrib/dolphin/sql/get_b_database.sql @@ -1,6 +1,5 @@ -drop database if exists get_db; -create database get_db dbcompatibility 'b'; -\c get_db +create schema get_db; +set current_schema to 'get_db'; select database(); create schema testdb; use testdb; @@ -9,5 +8,5 @@ create schema testdb1; select database(); use testdb1; select database(); -\c postgres -drop database if exists get_db; +drop schema get_db cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/greatest_least.sql b/contrib/dolphin/sql/greatest_least.sql index f9633f1cc7db10bea658bd1bcc55f6f3dc248125..2dd043c0f386aca09086e0db5396ad294d1a4556 100644 --- a/contrib/dolphin/sql/greatest_least.sql +++ b/contrib/dolphin/sql/greatest_least.sql @@ -1,6 +1,5 @@ -drop database if exists greatest_least; -create database greatest_least dbcompatibility 'b'; -\c greatest_least +create schema greatest_least; +set current_schema to 'greatest_least'; --return null if input include null select GREATEST(null,1,2), GREATEST(null,1,2) is null; select GREATEST(1,2); @@ -8,5 +7,5 @@ select GREATEST(1,2); select LEAST(null,1,2), LEAST(null,1,2) is null; select LEAST(1,2); -\c postgres -drop database if exists greatest_least; +drop schema greatest_least cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/group_concat_test.sql b/contrib/dolphin/sql/group_concat_test.sql index 2cf9db19b485c54f7ea490efefff3d96d8d580ae..3887d46c9562efa15bd04a9d5235cf5d4218dc41 100644 --- a/contrib/dolphin/sql/group_concat_test.sql +++ b/contrib/dolphin/sql/group_concat_test.sql @@ -1,5 +1,5 @@ -create database t dbcompatibility 'B'; -\c t; +create schema t; +set current_schema to 't'; create table t(id text, v text); insert into t(id, v) values('1','a'),('2','b'),('1','c'),('2','d'); @@ -10,5 +10,5 @@ select * from tmp_table; set explain_perf_mode=pretty; explain verbose select id, group_concat(VARIADIC ARRAY[id,':',v] order by id) as title from t group by id; -\c postgres -drop database t; +drop schema t cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/if_not_exists_test.sql b/contrib/dolphin/sql/if_not_exists_test.sql index 359ca7a189b2c30a8f668429a5426db76eaa4d5e..41cd131b68252f00510278d9d11a9f06da19c072 100644 --- a/contrib/dolphin/sql/if_not_exists_test.sql +++ b/contrib/dolphin/sql/if_not_exists_test.sql @@ -1,6 +1,5 @@ -drop database if exists test_if_not_exists; -create database test_if_not_exists dbcompatibility 'B'; -\c test_if_not_exists +create schema test_if_not_exists; +set current_schema to 'test_if_not_exists'; CREATE USER ZZZ WITH PASSWORD 'openGauss@123'; CREATE USER ZZZ WITH PASSWORD 'openGauss@123'; @@ -8,5 +7,5 @@ CREATE USER IF NOT EXISTS ZZZ WITH PASSWORD 'openGauss@123'; DROP USER ZZZ; CREATE USER IF NOT EXISTS ZZZ WITH PASSWORD 'openGauss@123'; -\c postgres -drop database test_if_not_exists; +drop schema test_if_not_exists cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/implicit_cast.sql b/contrib/dolphin/sql/implicit_cast.sql index 9ec07b4d30071f797b6ce74d19ca6aee247fdb7f..b179db7ff888cf4c4450ca3f8c26faa8c7f083c3 100644 --- a/contrib/dolphin/sql/implicit_cast.sql +++ b/contrib/dolphin/sql/implicit_cast.sql @@ -1,6 +1,5 @@ -drop database if exists implicit_cast; -create database implicit_cast dbcompatibility 'b'; -\c implicit_cast +create schema implicit_cast; +set current_schema to 'implicit_cast'; select 1::int1 % 1::float4; select 1::int2 % 1::float4; @@ -62,5 +61,5 @@ select 1::int2 | 1::text; select 1::int4 | 1::text; select 1::int8 | 1::text; -\c postgres -drop database if exists implicit_cast; \ No newline at end of file +drop schema implicit_cast cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/insert_set.sql b/contrib/dolphin/sql/insert_set.sql index b921a0466b5d4d2353634dc43367cd422661e1fa..b58257333c5455f77e081aadb99284e6511c1223 100644 --- a/contrib/dolphin/sql/insert_set.sql +++ b/contrib/dolphin/sql/insert_set.sql @@ -1,6 +1,5 @@ -drop database if exists insert_set; -create database insert_set dbcompatibility 'B'; -\c insert_set +create schema insert_set; +set current_schema to 'insert_set'; create table test_figure(tinyint tinyint, smallint smallint, integer integer, binary_integer binary_integer, bigint bigint); insert into test_figure set bigint = 7234134, binary_integer = 1011101, integer = 10000, smallint = 1, tinyint = 3; @@ -40,5 +39,5 @@ select * from test_error; insert into test_error set name = 23; select * from test_error; -\c postgres -drop database insert_set; +drop schema insert_set cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/join_without_on.sql b/contrib/dolphin/sql/join_without_on.sql index 9a00342a0b18666ee7f49cd89d0c0f2c8293a05f..37a78999f7e8c72c2e2f5a6c5fb23f82fc8e4bf9 100644 --- a/contrib/dolphin/sql/join_without_on.sql +++ b/contrib/dolphin/sql/join_without_on.sql @@ -1,6 +1,5 @@ -drop database if exists join_without_on; -create database join_without_on dbcompatibility 'b'; -\c join_without_on +create schema join_without_on; +set current_schema to 'join_without_on'; CREATE TABLE J1_TBL ( i integer, @@ -61,5 +60,5 @@ SELECT * FROM J1_TBL JOIN J2_TBL JOIN J3_TBL ON J1_TBL.i = J3_TBL.i; SELECT * FROM J1_TBL JOIN J2_TBL JOIN J3_TBL JOIN J4_TBL ON J1_TBL.i = J4_TBL.i; SELECT * FROM J1_TBL JOIN J2_TBL INNER JOIN J3_TBL INNER JOIN J4_TBL ON J1_TBL.i = J4_TBL.i; -\c postgres -drop database if exists join_without_on; \ No newline at end of file +drop schema join_without_on cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/json_array.sql b/contrib/dolphin/sql/json_array.sql index 0b9dc13a3cb3f05f6f9f8a94d4f4096ebaadc587..f794eec1a780e59e4e52c0043b07621078902797 100644 --- a/contrib/dolphin/sql/json_array.sql +++ b/contrib/dolphin/sql/json_array.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_array; -create database test_json_array dbcompatibility 'B'; -\c test_json_array +create schema test_json_array; +set current_schema to 'test_json_array'; select json_array(1,2,3,4); select json_array(1,3,4); @@ -19,5 +18,5 @@ insert into dataa(name) value(json_array('sjy')); select name from dataa; drop table dataa; -\c postgres -drop database if exists test_json_array; \ No newline at end of file +drop schema test_json_array cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/json_array_append.sql b/contrib/dolphin/sql/json_array_append.sql index 1486501f6946124a14fa11af07d922d14c43a511..92635fa1fbda1d655fc666929503c5d195513be4 100644 --- a/contrib/dolphin/sql/json_array_append.sql +++ b/contrib/dolphin/sql/json_array_append.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_array_append; -create database test_json_array_append dbcompatibility'B'; -\c test_json_array_append +create schema test_json_array_append; +set current_schema to 'test_json_array_append'; select JSON_ARRAY_APPEND('[1, [2, 3]]', '$[1]', 4); select JSON_ARRAY_APPEND('[1, [2, 3]]', '$[0]', 4); @@ -37,5 +36,5 @@ select JSON_ARRAY_APPEND('[1, [2, 3]]', '$[*]', 4); select JSON_ARRAY_APPEND('[1, [2, 3]]', '$[*]', 日); select JSON_ARRAY_APPEND('[1, [2, 3]]', ' ', 4); -\c postgres -drop database if exists test_json_array_append; \ No newline at end of file +drop schema test_json_array_append cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/json_array_insert.sql b/contrib/dolphin/sql/json_array_insert.sql index a8440e389df8226be94eb1dc6c08ed502b3267ac..361eda9f3eab4e27af5ce6366b80e2662bdb240c 100644 --- a/contrib/dolphin/sql/json_array_insert.sql +++ b/contrib/dolphin/sql/json_array_insert.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_array_insert; -create database test_json_array_insert dbcompatibility 'B'; -\c test_json_array_insert +create schema test_json_array_insert; +set current_schema to 'test_json_array_insert'; SELECT JSON_ARRAY_INSERT('[1, [2, 3], {"a": [4, 5]}]', '$[0]', 0); SELECT JSON_ARRAY_INSERT('[1, [2, 3], {"a": [4, 5]}]', '$[2]', 4); @@ -45,5 +44,5 @@ SELECT JSON_ARRAY_INSERT('[1, [2, 3]]', '$..1', 4); SELECT JSON_ARRAY_INSERT('[1, [2, 3]]', '$[*]', 4); SELECT JSON_ARRAY_INSERT('[1, [2, 3]]', ' ', 4); -\c postgres -drop database if exists test_json_array_insert; \ No newline at end of file +drop schema test_json_array_insert cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/json_arrayagg.sql b/contrib/dolphin/sql/json_arrayagg.sql index 8a575ead408933f703665595d4afe5c65cd23af7..163cbbd4439a997a3ee2f79e68d5ba37d66e8209 100644 --- a/contrib/dolphin/sql/json_arrayagg.sql +++ b/contrib/dolphin/sql/json_arrayagg.sql @@ -1,6 +1,5 @@ -drop database if exists json_arrayagg_test; -create database json_arrayagg_test dbcompatibility 'B'; -\c json_arrayagg_test +create schema json_arrayagg_test; +set current_schema to 'json_arrayagg_test'; -- create table for test create table City(District varchar(30), Name varchar(30), Population int); insert into City values ('Capital Region','Canberra',322723); @@ -48,5 +47,5 @@ insert into time_table values(20221204, 3); select json_arrayagg(b) from time_table; select json_arrayagg(a) from time_table; -\c postgres -drop database json_arrayagg_test; +drop schema json_arrayagg_test cascade; +reset json_arrayagg_test; diff --git a/contrib/dolphin/sql/json_contains.sql b/contrib/dolphin/sql/json_contains.sql index f55312267bcc8ef4c1f18164d6ba2c3f488ce4f1..32ff372f039b116ff565e5ecc1ba7692ac63652a 100644 --- a/contrib/dolphin/sql/json_contains.sql +++ b/contrib/dolphin/sql/json_contains.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_contains; -create database test_json_contains dbcompatibility 'b'; -\c test_json_contains +create schema test_json_contains; +set current_schema to 'test_json_contains'; select json_contains('1',null); select json_contains(null,'1'); @@ -87,5 +86,5 @@ insert into json_contains_test values('[1,2,3,4]','[2,4]','$'); select *, json_contains(target, candidate, path) from json_contains_test; drop table json_contains_test; -\c postgres; -drop database if exists test_json_contains; \ No newline at end of file +drop schema test_json_contains cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/json_contains_path.sql b/contrib/dolphin/sql/json_contains_path.sql index e41aed8010727f53e1abb3b5637d8203eb0e0db5..b40af56f2bc1f901e33237c67af781567cd52377 100644 --- a/contrib/dolphin/sql/json_contains_path.sql +++ b/contrib/dolphin/sql/json_contains_path.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_contains_path; -create database test_json_contains_path dbcompatibility 'b'; -\c test_json_contains_path +create schema test_json_contains_path; +set current_schema to 'test_json_contains_path'; select json_contains_path(null,'one','$[0]'); select json_contains_path('[1,2,3]',null,'$[0]'); @@ -40,5 +39,5 @@ insert into json_contains_path_test values('{"a": 1, "b": 2, "c": {"d": 4}}', 'a select *, json_contains_path(target, mode, '$.a.d', '$.c.d') from json_contains_path_test; drop table json_contains_path_test; -\c postgres; -drop database if exists test_json_contains_path; \ No newline at end of file +drop schema test_json_contains_path cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/json_depth.sql b/contrib/dolphin/sql/json_depth.sql index b6ef54c913a834bfe3f5dddf65b28369f589e821..63b2cb4cffa3e0a28b6bc9031d276faadf5cd901 100644 --- a/contrib/dolphin/sql/json_depth.sql +++ b/contrib/dolphin/sql/json_depth.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_depth; -create database test_json_depth dbcompatibility 'B'; -\c test_json_depth +create schema test_json_depth; +set current_schema to 'test_json_depth'; select json_depth('{}'); select json_depth('[]'); @@ -32,5 +31,5 @@ select *from test1; select json_depth(data) from test1; drop table test1; -\c postgres -drop database if exists test_json_depth; \ No newline at end of file +drop schema test_json_depth cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/json_extract.sql b/contrib/dolphin/sql/json_extract.sql index 92708000d5459e99f12905a61934e4f0f0803f60..6a4b30d9ff02fe53c4867766e543e7a586a9539e 100644 --- a/contrib/dolphin/sql/json_extract.sql +++ b/contrib/dolphin/sql/json_extract.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_extract; -create database test_json_extract dbcompatibility'B'; -\c test_json_extract +create schema test_json_extract; +set current_schema to 'test_json_extract'; select json_extract('{"a": "lihua"}', '$.a'); select json_extract('{"a"}', '$.a'); @@ -31,5 +30,5 @@ insert into test values (json_extract('{"a": 43, "b": {"c": true}}', '$.b')); select * from test; -\c postgres -drop database if exists test_json_extract; \ No newline at end of file +drop schema test_json_extract cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/json_insert.sql b/contrib/dolphin/sql/json_insert.sql index bdd23b53e22c5bfa38d675a3813a850e95bb3aa0..d29879ef7e8b4cba5eca81296e9c6de86e3ed926 100644 --- a/contrib/dolphin/sql/json_insert.sql +++ b/contrib/dolphin/sql/json_insert.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_insert; -create database test_json_insert dbcompatibility'B'; -\c test_json_insert +create schema test_json_insert; +set current_schema to 'test_json_insert'; -- test for basic functionality of json_replace select JSON_INSERT('{"a": 43}', '$.b', 55); @@ -75,5 +74,5 @@ insert into test values (JSON_INSERT('{"a": 43, "b": {"c": true}}', '$.b[4]', 'Test')); select * from test; -\c postgres -drop database if exists test_json_insert; \ No newline at end of file +drop schema test_json_insert cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/json_keys.sql b/contrib/dolphin/sql/json_keys.sql index c9e41353ac39942cb36f9340f299dce9bcea2077..9f04e729d83e91c3c87ff8694956441aa600579f 100644 --- a/contrib/dolphin/sql/json_keys.sql +++ b/contrib/dolphin/sql/json_keys.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_keys; -create database test_json_keys dbcompatibility'B'; -\c test_json_keys +create schema test_json_keys; +set current_schema to 'test_json_keys'; SELECT JSON_KEYS('{"a":"t1"}'); SELECT JSON_KEYS('{"a":"t1","b":"t2"}'); @@ -31,5 +30,5 @@ create table student(name json); insert into student (name) value(json_keys('{"a":123,"b":{"c":"qwe"}}')); select name from student; -\c postgres -drop database if exists test_json_keys; \ No newline at end of file +drop schema test_json_keys cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/json_length.sql b/contrib/dolphin/sql/json_length.sql index 58fbccf70829a5800231f9ebf243b17327345c4a..238e655dc570be8b549ffff80e3226cd342661d2 100644 --- a/contrib/dolphin/sql/json_length.sql +++ b/contrib/dolphin/sql/json_length.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_length; -create database test_json_length dbcompatibility 'B'; -\c test_json_length +create schema test_json_length; +set current_schema to 'test_json_length'; select json_length(NULL); select json_length('NULL'); @@ -52,5 +51,5 @@ insert into test values select * from test; drop table test; -\c postgres; -drop database if exists test_json_length; +drop schema test_json_length cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/json_merge_patch.sql b/contrib/dolphin/sql/json_merge_patch.sql index 63315a582206df363dece3058b5d0da63aa8f387..24b3778e637827aeaa0cc1da795d47c1ec375465 100644 --- a/contrib/dolphin/sql/json_merge_patch.sql +++ b/contrib/dolphin/sql/json_merge_patch.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_merge_patch; -create database test_json_merge_patch dbcompatibility 'B'; -\c test_json_merge_patch +create schema test_json_merge_patch; +set current_schema to 'test_json_merge_patch'; select json_merge_patch(NULL); select json_merge_patch(NULL,NULL); @@ -75,5 +74,5 @@ insert into test1 values json_merge_patch('[{"a":"abc"},"bcd"]','{"1":"jks"}')); select * from test1; -\c postgres; -drop database if exists test_json_merge_patch; \ No newline at end of file +drop schema test_json_merge_patch cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/json_merge_preserve.sql b/contrib/dolphin/sql/json_merge_preserve.sql index b97eb59e63a9449e1c2a68d999b37794b2702876..6ca736134b17cbac5e4e7570c534f00ba5da7333 100644 --- a/contrib/dolphin/sql/json_merge_preserve.sql +++ b/contrib/dolphin/sql/json_merge_preserve.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_merge_preserve; -create database test_json_merge_preserve dbcompatibility 'B'; -\c test_json_merge_preserve +create schema test_json_merge_preserve; +set current_schema to 'test_json_merge_preserve'; select json_merge_preserve(NULL); select json_merge_preserve(NULL,NULL); @@ -75,5 +74,5 @@ insert into test1 values json_merge_preserve('[{"a":"abc"},"bcd"]','{"1":"jks"}')); select * from test1; -\c postgres; -drop database if exists test_json_merge_preserve; \ No newline at end of file +drop schema test_json_merge_preserve cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/json_object.sql b/contrib/dolphin/sql/json_object.sql index 87972138945fe5f8adac6ea98ba2efe7e60e5640..a61e73efa2896e27f8c06815201e2823efb8ff0e 100644 --- a/contrib/dolphin/sql/json_object.sql +++ b/contrib/dolphin/sql/json_object.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_object; -create database test_json_object dbcompatibility 'B'; -\c test_json_object +create schema test_json_object; +set current_schema to 'test_json_object'; -- test for b_compatibility_mode = false select json_object('{a,1,b,2,3,NULL,"d e f","a b c"}'); @@ -111,5 +110,5 @@ set dolphin.b_compatibility_mode = 0; select json_object('{a,1,b,2,3,NULL,"d e f","a b c"}'); select json_object('{a,b,"a b c"}', '{a,1,1}'); -\c postgres -drop database if exists test_json_object; \ No newline at end of file +drop schema test_json_object cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/json_objectagg.sql b/contrib/dolphin/sql/json_objectagg.sql index 1fdf43904fc202fb713a140fb05adcbbdb5cbd82..8751bc3c0be828994b6420b4cef476888b864a86 100644 --- a/contrib/dolphin/sql/json_objectagg.sql +++ b/contrib/dolphin/sql/json_objectagg.sql @@ -1,6 +1,5 @@ -drop database if exists json_objectagg_test; -create database json_objectagg_test dbcompatibility 'B'; -\c json_objectagg_test +create schema json_objectagg_test; +set current_schema to 'json_objectagg_test'; -- create table for test create table City(District varchar(30), Name varchar(30), Population int); insert into City values ('Capital Region','Canberra',322723); @@ -42,5 +41,5 @@ insert into time_table values(20211001, 2); insert into time_table values(20221204, 3); select json_objectagg(b, a) from time_table; -\c postgres -drop database json_objectagg_test; +drop schema json_objectagg_test cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/json_operator.sql b/contrib/dolphin/sql/json_operator.sql index 6966dc3f0f3476e49ee1746fe1d82d9c33417f19..66a688bf18d73dfc1fa21c928b2394a978475f7d 100644 --- a/contrib/dolphin/sql/json_operator.sql +++ b/contrib/dolphin/sql/json_operator.sql @@ -1,6 +1,5 @@ -drop database if exists test_operator; -create database test_operator dbcompatibility 'B'; -\c test_operator +create schema test_operator; +set current_schema to 'test_operator'; drop table if exists test1; create table test1(data json); @@ -30,6 +29,6 @@ select data->>'b' from test2; select data->>'c' from test2; -\c postgres -drop database test_operator; +drop schema test_operator cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/json_pretty.sql b/contrib/dolphin/sql/json_pretty.sql index b8b9d61d4d5b44a67150b1b5a27e7d2ea3447dd8..e5e81a9199a3875520cbd5807687a91eb22e3e56 100644 --- a/contrib/dolphin/sql/json_pretty.sql +++ b/contrib/dolphin/sql/json_pretty.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_pretty; -create database test_json_pretty dbcompatibility'B'; -\c test_json_pretty +create schema test_json_pretty; +set current_schema to 'test_json_pretty'; -- test for basic functionality of json_replace select JSON_PRETTY('{"a": 43}'); @@ -65,5 +64,5 @@ select JSON_PRETTY(textjson) from test; insert into test values (JSON_PRETTY('{"a": 43, "b": {"c": true}}')); select * from test; -\c postgres -drop database test_json_pretty; +drop schema test_json_pretty cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/json_quote.sql b/contrib/dolphin/sql/json_quote.sql index efe23878d2c40852a9e5b2cebea037d45e18cec9..f6dc1bb44f7b0a1f637f13c669e1cb12a4022edc 100644 --- a/contrib/dolphin/sql/json_quote.sql +++ b/contrib/dolphin/sql/json_quote.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_quote; -create database test_json_quote dbcompatibility'B'; -\c test_json_quote +create schema test_json_quote; +set current_schema to 'test_json_quote'; select json_quote(E'a\tb'); select json_quote('a b'); @@ -21,5 +20,5 @@ insert into student (name) value(json_quote('lc')); select name from student; drop table student; -\c postgres -drop database test_json_quote; +drop schema test_json_quote cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/json_remove.sql b/contrib/dolphin/sql/json_remove.sql index 27112bcc0a93c4370385fa9bda72436aa18a898a..ab9edf823c06a8b2c76fc43cbd0636f06fd26f50 100644 --- a/contrib/dolphin/sql/json_remove.sql +++ b/contrib/dolphin/sql/json_remove.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_remove; -create database test_json_remove dbcompatibility'B'; -\c test_json_remove +create schema test_json_remove; +set current_schema to 'test_json_remove'; SELECT JSON_REMOVE('[0, 1, 2, [3, 4]]', '$[0]', '$[2]'); SELECT JSON_REMOVE('{"x": 1, "y": 2}', '$.x'); @@ -30,5 +29,5 @@ insert into info1 values ('{"x": {"z":2,"a":3}, "y": 2}', '$.x.z'); insert into tab_json1 SELECT JSON_REMOVE(name,address) from info1; select * from tab_json1; -\c postgres -drop database if exists test_json_remove; \ No newline at end of file +drop schema test_json_remove cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/json_replace.sql b/contrib/dolphin/sql/json_replace.sql index 407335ef59b7f22491d311f0e4763bb9df041d66..690d8cd57d0479409ad794fb425cdf8ca27ed61c 100644 --- a/contrib/dolphin/sql/json_replace.sql +++ b/contrib/dolphin/sql/json_replace.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_replace; -create database test_json_replace dbcompatibility 'B'; -\c test_json_replace +create schema test_json_replace; +set current_schema to 'test_json_replace'; -- test for basic functionality of json_replace SELECT JSON_REPLACE('{"a": 1, "b": 2, "c": 3}', '$.b', 9); @@ -70,5 +69,5 @@ SELECT JSON_REPLACE('x','a',3,true); -- test for invalid json document SELECT JSON_REPLACE('x',2,2); -\c postgres -drop database if exists test_json_replace; \ No newline at end of file +drop schema test_json_replace cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/json_search.sql b/contrib/dolphin/sql/json_search.sql index 7224fc6cf2f4e6a2e36db5ea95fa326384413fe6..d40e96c4e700020b1bbf93bbd0c0786712b8f653 100644 --- a/contrib/dolphin/sql/json_search.sql +++ b/contrib/dolphin/sql/json_search.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_search; -create database test_json_search dbcompatibility'B'; -\c test_json_search +create schema test_json_search; +set current_schema to 'test_json_search'; select json_search('null','one','null','&','$'); select json_search(null,'one','null','&','$'); @@ -191,5 +190,5 @@ insert into json_search_test values select * from json_search_test; drop table json_search_test; -\c postgres; -drop database if exists test_json_search; \ No newline at end of file +drop schema test_json_search cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/json_set.sql b/contrib/dolphin/sql/json_set.sql index 11e3278e7f9ff1576da25cc72c4a3240a2a8b4f3..3af1ce7c1ce7898d970181eae1fde2b1e7775b01 100644 --- a/contrib/dolphin/sql/json_set.sql +++ b/contrib/dolphin/sql/json_set.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_set; -create database test_json_set dbcompatibility 'B'; -\c test_json_set +create schema test_json_set; +set current_schema to 'test_json_set'; select json_set('{"1":2}','$."1"',6); select json_set('{"1":2,"b":"r","q":0}','$.b',6); @@ -27,5 +26,5 @@ insert into dataa (name) value(json_set('{"s":1}','$.s',3,'$.w',5)); select name from dataa; drop table dataa; -\c postgres -drop database if exists test_json_set; \ No newline at end of file +drop schema test_json_set cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/json_storage_size.sql b/contrib/dolphin/sql/json_storage_size.sql index 70dca43df118cefe97df6368c6034763bb8fdf2c..3ba1621e3bcc4fecb3d0b6ada50cd1dadbc45eda 100644 --- a/contrib/dolphin/sql/json_storage_size.sql +++ b/contrib/dolphin/sql/json_storage_size.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_storage_size; -create database test_json_storage_size dbcompatibility'B'; -\c test_json_storage_size +create schema test_json_storage_size; +set current_schema to 'test_json_storage_size'; set enable_set_variable_b_format to on; @@ -72,6 +71,5 @@ FROM SELECT JSON_STORAGE_SIZE('{0,1}'); -\c postgres - -drop database test_json_storage_size +drop schema test_json_storage_size cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/json_type.sql b/contrib/dolphin/sql/json_type.sql index cafe66d93bc6e316451b96c003158a431f94c7ac..0257bcc7d1746059b2d04e16371403bbaa919262 100644 --- a/contrib/dolphin/sql/json_type.sql +++ b/contrib/dolphin/sql/json_type.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_type; -create database test_json_type dbcompatibility'B'; -\c test_json_type +create schema test_json_type; +set current_schema to 'test_json_type'; ---string @@ -59,5 +58,5 @@ insert into test_type values('{"a":1}',3,null,null); select json_type(t1) from test_type; -\c postgres -drop database test_json_type; +drop schema test_json_type cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/json_unquote.sql b/contrib/dolphin/sql/json_unquote.sql index f43ec9f5b6ddf163dab48c903743b48cef035af2..3a41cac62e336844e4b489d7bcf66bd615699c3e 100644 --- a/contrib/dolphin/sql/json_unquote.sql +++ b/contrib/dolphin/sql/json_unquote.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_unquote; -create database test_json_unquote dbcompatibility 'B'; -\c test_json_unquote +create schema test_json_unquote; +set current_schema to 'test_json_unquote'; select json_unquote('"abc"'); select json_unquote('abc'); @@ -32,5 +31,5 @@ insert into data (name) value(json_unquote('"sjy"')); select name from data; drop table data; -\c postgres -drop database if exists test_json_unquote; \ No newline at end of file +drop schema test_json_unquote cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/json_valid.sql b/contrib/dolphin/sql/json_valid.sql index 34ef527497b629fb3ac87dddec60c321d6656507..a7147bd728a259c51a16c88fe9dcb92a51afd3cb 100644 --- a/contrib/dolphin/sql/json_valid.sql +++ b/contrib/dolphin/sql/json_valid.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_valid; -create database test_json_valid dbcompatibility 'b'; -\c test_json_valid +create schema test_json_valid; +set current_schema to 'test_json_valid'; select json_valid(NULL); @@ -221,7 +220,7 @@ select target, json_valid(target) from json_valid_test; drop table json_valid_test; -\c postgres -drop database if exists test_json_valid; +drop schema test_json_valid cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/keyword_ignore_test/ignore_invalid_input.sql b/contrib/dolphin/sql/keyword_ignore_test/ignore_invalid_input.sql index da92db474f7c477bf4583200cb610737aa2ac5a8..2f65278643c37094844a6117405d7e67cf41220d 100644 --- a/contrib/dolphin/sql/keyword_ignore_test/ignore_invalid_input.sql +++ b/contrib/dolphin/sql/keyword_ignore_test/ignore_invalid_input.sql @@ -1,6 +1,6 @@ -- test for insert/update ignore. -create database sql_ignore_invalid_input_test dbcompatibility 'B'; -\c sql_ignore_invalid_input_test; +create schema sql_ignore_invalid_input_test; +set current_schema to 'sql_ignore_invalid_input_test'; set timezone to 'PRC'; -- type: tinyint @@ -310,5 +310,5 @@ select * from t_bit; -- restore context reset timezone; show timezone; -\c postgres -drop database if exists sql_ignore_invalid_input_test; \ No newline at end of file +drop schema sql_ignore_invalid_input_test cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/keyword_ignore_test/ignore_no_matched_partition.sql b/contrib/dolphin/sql/keyword_ignore_test/ignore_no_matched_partition.sql index 349b9b6eeb7ff71e42062979de7a42fc0c9dcc35..9ea1455ac4dc537d5014e75d89d41b611eb0bc17 100644 --- a/contrib/dolphin/sql/keyword_ignore_test/ignore_no_matched_partition.sql +++ b/contrib/dolphin/sql/keyword_ignore_test/ignore_no_matched_partition.sql @@ -1,6 +1,6 @@ -- test for ignore error of no partition matched -create database sql_ignore_no_matched_partition_test dbcompatibility 'B'; -\c sql_ignore_no_matched_partition_test; +create schema sql_ignore_no_matched_partition_test; +set current_schema to 'sql_ignore_no_matched_partition_test'; -- sqlbypass set enable_opfusion = on; @@ -157,5 +157,5 @@ set enable_opfusion = on; set enable_partition_opfusion = off; drop table t_ignore; drop table t_from; -\c postgres -drop database if exists sql_ignore_no_matched_partition_test; \ No newline at end of file +drop schema sql_ignore_no_matched_partition_test cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/keyword_ignore_test/ignore_not_null_constraints.sql b/contrib/dolphin/sql/keyword_ignore_test/ignore_not_null_constraints.sql index d54751bb43145594c5d4649ed4513a268eebc814..35938ba7b861e427f11164a102bbbd055717c6a8 100644 --- a/contrib/dolphin/sql/keyword_ignore_test/ignore_not_null_constraints.sql +++ b/contrib/dolphin/sql/keyword_ignore_test/ignore_not_null_constraints.sql @@ -1,6 +1,6 @@ -- test for insert/update ignore. -create database sql_ignore_not_null_test dbcompatibility 'B'; -\c sql_ignore_not_null_test; +create schema sql_ignore_not_null_test; +set current_schema to 'sql_ignore_not_null_test'; drop table if exists t_ignore; create table t_ignore(col1 int, col2 int not null, col3 varchar not null); @@ -547,5 +547,5 @@ update ignore t_ignore set num = null where num = 1; select * from t_ignore; -- restore context -\c postgres -drop database if exists sql_ignore_not_null_test; \ No newline at end of file +drop schema sql_ignore_not_null_test cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/keyword_ignore_test/ignore_type_transform.sql b/contrib/dolphin/sql/keyword_ignore_test/ignore_type_transform.sql index 7556cbe1d7cc224579cf76bd135d260d0d969924..c931e9c350ec1a151444a3e6c5a688713468488b 100644 --- a/contrib/dolphin/sql/keyword_ignore_test/ignore_type_transform.sql +++ b/contrib/dolphin/sql/keyword_ignore_test/ignore_type_transform.sql @@ -1,5 +1,5 @@ -create database sql_ignore_type_transform_test dbcompatibility 'B'; -\c sql_ignore_type_transform_test; +create schema sql_ignore_type_transform_test; +set current_schema to 'sql_ignore_type_transform_test'; -- test for tinyint drop table if exists t; @@ -343,5 +343,5 @@ insert into t_text values(123456789123456789); insert ignore into t_nvarchar2 select cont from t_text; select * from t_nvarchar2; -\c postgres -drop database if exists sql_ignore_type_transform_test; \ No newline at end of file +drop schema sql_ignore_type_transform_test cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/keyword_ignore_test/ignore_unique_constraints.sql b/contrib/dolphin/sql/keyword_ignore_test/ignore_unique_constraints.sql index 08a6f5cc148840bf8874df7c43a9ec88260ede21..fd4e0e2bf32d31ef5297fe1f2fc3edef91047649 100644 --- a/contrib/dolphin/sql/keyword_ignore_test/ignore_unique_constraints.sql +++ b/contrib/dolphin/sql/keyword_ignore_test/ignore_unique_constraints.sql @@ -1,5 +1,5 @@ -create database sql_ignore_unique_test dbcompatibility 'B'; -\c sql_ignore_unique_test; +create schema sql_ignore_unique_test; +set current_schema to 'sql_ignore_unique_test'; drop table if exists t_ignore; create table t_ignore(col1 int, col2 int unique, col3 int unique); @@ -177,5 +177,5 @@ insert into t_ignore values(2); update ignore t_ignore set num = 1 where num = 2; select * from t_ignore; -\c postgres -drop database if exists sql_ignore_unique_test; \ No newline at end of file +drop schema sql_ignore_unique_test cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/kill.sql b/contrib/dolphin/sql/kill.sql index d3dc3ff4dc0efd4460e35de53019bf2a1073e6f7..e60ea0581ffbdbae2e2ba3a86265f315dbb0ef21 100644 --- a/contrib/dolphin/sql/kill.sql +++ b/contrib/dolphin/sql/kill.sql @@ -1,8 +1,7 @@ -drop database if exists test_kill; -create database test_kill dbcompatibility 'b'; -\c test_kill +create schema test_kill; +set current_schema to 'test_kill'; kill query (select sessionid from pg_stat_activity where application_name = 'JobScheduler'); kill connection (select sessionid from pg_stat_activity where application_name = 'JobScheduler'); kill (select sessionid from pg_stat_activity where application_name = 'PercentileJob'); -\c postgres -drop database if exists test_kill; \ No newline at end of file +drop schema test_kill cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/like_default_test.sql b/contrib/dolphin/sql/like_default_test.sql index d9e093227d6b48cfda82275e2e94bd2628b4275c..586a619091339c2c4de96de710d6321d9d75ea5a 100644 --- a/contrib/dolphin/sql/like_default_test.sql +++ b/contrib/dolphin/sql/like_default_test.sql @@ -1,8 +1,5 @@ --- b compatibility case -drop database if exists like_default_test; --- create database like_default_test dbcompatibility 'b'; -create database like_default_test with DBCOMPATIBILITY = 'B'; -\c like_default_test +create schema like_default_test; +set current_schema to 'like_default_test'; create table test_nv (name national varchar(10)); \d test_nv @@ -395,11 +392,5 @@ insert into test_insert(c1, c2, c3, c4) values(8, null, 'e', null); select * from test_insert; -\c postgres -drop database if exists like_default_test; - - - - - - +drop schema like_default_test cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/mysqlmode_fullgroup.sql b/contrib/dolphin/sql/mysqlmode_fullgroup.sql index 155df09bdadb7cbb5d789877353ebfbad2c2316b..274e7c968074b41667b2f1c3bc089c9b2c2604fa 100644 --- a/contrib/dolphin/sql/mysqlmode_fullgroup.sql +++ b/contrib/dolphin/sql/mysqlmode_fullgroup.sql @@ -1,5 +1,5 @@ -CREATE DATABASE sql_mode_full_group dbcompatibility 'B'; -\c sql_mode_full_group; +create schema sql_mode_full_group; +set current_schema to 'sql_mode_full_group'; create table test_group(a int, b int, c int, d int); create table test_group1(a int, b int, c int, d int); insert into test_group values(1,2,3,4); @@ -18,5 +18,5 @@ set dolphin.sql_mode = ''; select a, b from test_group group by a; select a, d as items, (select count(*) from test_group t where t.a = i.a and b in (select b from test_group1 where c = 4)) as third from test_group i group by a; select t.a, (select sum(b) from test_group i where i.b = t.b ) from test_group t where t.a > 1+1 or (t.b < 8 and t.b > 1) group by t.a; -\c contrib_regression; -drop DATABASE if exists sql_mode_full_group; \ No newline at end of file +drop schema sql_mode_full_group cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/mysqlmode_strict.sql b/contrib/dolphin/sql/mysqlmode_strict.sql index 57a1e2b4820dcaba3c2061222a94bc71fa15bc77..225ca18a79285d8d5f1562b413ffc434ebd94017 100644 --- a/contrib/dolphin/sql/mysqlmode_strict.sql +++ b/contrib/dolphin/sql/mysqlmode_strict.sql @@ -1,7 +1,5 @@ -drop DATABASE if exists sql_mode_strict; - -CREATE DATABASE sql_mode_strict dbcompatibility 'B'; -\c sql_mode_strict; +create schema sql_mode_strict; +set current_schema to 'sql_mode_strict'; set dolphin.sql_mode = ''; create table test_tint(a tinyint); @@ -748,5 +746,5 @@ insert into test_notnull_numeric_strict(b) values(null); -\c contrib_regression; -drop DATABASE if exists sql_mode_strict; +drop schema sql_mode_strict cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/mysqlmode_strict2.sql b/contrib/dolphin/sql/mysqlmode_strict2.sql index f60aaa2ec0e2fa5add7cc39f0fd1b5115a764162..66c3b3f151306e698aefc58103ac9ce93370dc25 100644 --- a/contrib/dolphin/sql/mysqlmode_strict2.sql +++ b/contrib/dolphin/sql/mysqlmode_strict2.sql @@ -1,7 +1,5 @@ -drop DATABASE if exists sql_mode_strict2; - -CREATE DATABASE sql_mode_strict2 dbcompatibility 'B'; -\c sql_mode_strict2; +create schema sql_mode_strict2; +set current_schema to 'sql_mode_strict2'; set dolphin.sql_mode = ''; create table test_tint(a tinyint unsigned); @@ -748,5 +746,5 @@ insert into test_notnull_numeric_strict(b) values(null); -\c contrib_regression; -drop DATABASE if exists sql_mode_strict2; +drop schema sql_mode_strict2 cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/network.sql b/contrib/dolphin/sql/network.sql index 8502c72f188eafa122b602b47cbd3de70e94df4e..2d84e3d8cfaf5070ab7f77be39938d25a0d3104f 100644 --- a/contrib/dolphin/sql/network.sql +++ b/contrib/dolphin/sql/network.sql @@ -1,6 +1,5 @@ -drop database if exists test_network; -create database test_network dbcompatibility 'b'; -\c test_network +create schema test_network; +set current_schema to 'test_network'; create table test (ip1 varchar(20),ip2 char(20),ip3 nvarchar2(20),ip4 text,ip5 clob); insert into test (ip1,ip2,ip3,ip4,ip5) values ('192.168.1.1','127.0.0.1','10.0.0.10','172.0.0.1','0.0.0.0'),('fe80::1','a::f','a::c','a::d','a::e'),('192.168.1.256','192.168.1','256.168.1.1','192.256.1.1','192.168.1.-1'); select is_ipv4(ip1),is_ipv4(ip2),is_ipv4(ip3),is_ipv4(ip4),is_ipv4(ip5) from test; @@ -25,5 +24,5 @@ select is_ipv6('::3'); select is_ipv6(10); select is_ipv6(3.5); select is_ipv6(NULL); -\c postgres -drop database if exists test_network; \ No newline at end of file +drop schema test_network cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/network2.sql b/contrib/dolphin/sql/network2.sql index ce8d6e84ef35e5b949c21965ffaf1025ea7b69b6..50d6bcb0733cd9ac05668d0a217c0e2e3a6c9e83 100644 --- a/contrib/dolphin/sql/network2.sql +++ b/contrib/dolphin/sql/network2.sql @@ -1,6 +1,5 @@ -drop database if exists network2; -create database network2 dbcompatibility 'b'; -\c network2 +create schema network2; +set current_schema to 'network2'; set dolphin.sql_mode = ''; select inet_ntoa(inet_aton('255.255.255.255.255.255.255.255')); select inet_ntoa(1099511627775),inet_ntoa(4294902271),inet_ntoa(4294967295),inet_ntoa(511); @@ -144,5 +143,5 @@ SELECT IS_IPV4_MAPPED(INET6_ATON('::1')),IS_IPV4_COMPAT(INET6_ATON('::1')); SELECT IS_IPV4_MAPPED(INET6_ATON('::')),IS_IPV4_COMPAT(INET6_ATON('::')); SELECT IS_IPV4_MAPPED(NULL),IS_IPV4_COMPAT(NULL); reset dolphin.sql_mode; -\c postgres -drop database if exists network2; \ No newline at end of file +drop schema network2 cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/none_strict_warning.sql b/contrib/dolphin/sql/none_strict_warning.sql index 2c46b20bd8b3e4d3892afbc67957360fd1b7b8df..68b893674551273ebc1e0f500efd59b45f92fa28 100644 --- a/contrib/dolphin/sql/none_strict_warning.sql +++ b/contrib/dolphin/sql/none_strict_warning.sql @@ -1,7 +1,5 @@ ----- b compatibility case -drop database if exists none_strict_warning_test; -create database none_strict_warning_test dbcompatibility 'b'; -\c none_strict_warning_test +create schema none_strict_warning_test; +set current_schema to 'none_strict_warning_test'; reset dolphin.sql_mode; create table test_int1(c1 int1); @@ -1101,6 +1099,5 @@ select * from test_uint1; select * from test_uint2; select * from test_uint4; select * from test_uint8; ----- drop database -\c contrib_regression -DROP DATABASE none_strict_warning_test; \ No newline at end of file +drop schema none_strict_warning_test cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/nvarchar.sql b/contrib/dolphin/sql/nvarchar.sql index 4ce4a1e199095ce60237acee5e132845e342f10a..588673b7ba3b4e7a8c62724dbc0bd72427d53c60 100644 --- a/contrib/dolphin/sql/nvarchar.sql +++ b/contrib/dolphin/sql/nvarchar.sql @@ -1,8 +1,5 @@ --- b compatibility case -drop database if exists db_nvarchar; -create database db_nvarchar dbcompatibility 'b'; - -\c db_nvarchar +create schema db_nvarchar; +set current_schema to 'db_nvarchar'; -- -- VARCHAR -- @@ -69,5 +66,5 @@ INSERT INTO NVARCHAR_TBL (f1) VALUES ('abcde'); INSERT INTO NVARCHAR_TBL (f1) VALUES ('abcd '); SELECT '' AS four, * FROM NVARCHAR_TBL; -\c postgres -drop database if exists db_nvarchar; \ No newline at end of file +drop schema db_nvarchar cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/oct.sql b/contrib/dolphin/sql/oct.sql index 9f00f22d1cea745f26a692e7b7c19f651e724500..454b57c278e9fe79fd38e6e41aeb8d6ebeb79f15 100644 --- a/contrib/dolphin/sql/oct.sql +++ b/contrib/dolphin/sql/oct.sql @@ -1,6 +1,5 @@ -drop database if exists db_oct; -create database db_oct dbcompatibility 'b'; -\c db_oct +create schema db_oct; +set current_schema to 'db_oct'; -- 测试正常数字十进制转八进制 SELECT OCT(10); @@ -35,5 +34,5 @@ insert into test_oct values('10'),('11'); select oct(name) from test_oct; drop table if exists test_oct; -\c postgres -drop database if exists db_oct; \ No newline at end of file +drop schema db_oct cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/option.sql b/contrib/dolphin/sql/option.sql index c90c9e559bb30e706a92453f75d10d7b295777f5..9c507b910d52eb3a360293af44703b74ceec8700 100644 --- a/contrib/dolphin/sql/option.sql +++ b/contrib/dolphin/sql/option.sql @@ -1,6 +1,5 @@ -drop database if exists option; -create database option dbcompatibility = 'b'; -\c option +create schema option; +set current_schema to 'option'; create global temp table test1(a int primary key, b text) on commit delete rows engine = InnoDB with(STORAGE_TYPE = ASTORE); @@ -141,6 +140,5 @@ drop table test8; drop table test9; drop table test10; -\c postgres - -drop database option; +drop schema option cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/partition_maxvalue_test.sql b/contrib/dolphin/sql/partition_maxvalue_test.sql index 68180e3806a53e191be1cea490e0c1ae65a13d1c..96dfe89d15c87414f242088b35ec24a050aaa84e 100644 --- a/contrib/dolphin/sql/partition_maxvalue_test.sql +++ b/contrib/dolphin/sql/partition_maxvalue_test.sql @@ -1,6 +1,5 @@ -drop DATABASE if exists partition_maxvalue_test; -CREATE DATABASE partition_maxvalue_test dbcompatibility 'B'; -\c partition_maxvalue_test; +create schema partition_maxvalue_test; +set current_schema to 'partition_maxvalue_test'; --test MAXVALUE syntax CREATE TABLE IF NOT EXISTS testsubpart ( @@ -60,5 +59,5 @@ CREATE TABLE testpart3 (a int) DISTRIBUTE BY RANGE(a) ); create table testpart4(a int) DISTRIBUTE by range(a) (SLICE p0 start (1) end MAXVALUE); create table testpart5(a int) DISTRIBUTE by range(a) (SLICE p0 start MAXVALUE end (200), SLICE p1 end(300)); -\c postgres; -drop DATABASE if exists partition_maxvalue_test; +drop schema partition_maxvalue_test cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/partition_test1.sql b/contrib/dolphin/sql/partition_test1.sql index 2ea524a707a02aa9eb9c618550a24913b7d5ff32..fe373c463dd6e445ef8cf1549a3d97cfe01ca081 100644 --- a/contrib/dolphin/sql/partition_test1.sql +++ b/contrib/dolphin/sql/partition_test1.sql @@ -1,7 +1,5 @@ -drop DATABASE if exists partition_test1; - -CREATE DATABASE partition_test1 dbcompatibility 'B'; -\c partition_test1; +create schema partition_test1; +set current_schema to 'partition_test1'; -------test range partition tables ----test partition table @@ -143,13 +141,13 @@ create index idx_b on test_part_list using btree(b) local; alter table test_part_list add constraint uidx_d unique(d); alter table test_part_list add constraint uidx_c unique using index idx_c; insert into test_part_list values(2000,1,2,3),(3000,2,3,4),(4000,3,4,5),(5000,4,5,6); -select * from test_part_list; +select * from test_part_list order by a desc; select relname, parttype from pg_partition where (parentid in (select oid from pg_class where relname = 'test_part_list')) and oid != relfilenode order by relname; ALTER TABLE test_part_list REBUILD PARTITION p1, p2; -select * from test_part_list; +select * from test_part_list order by a desc; select relname, parttype from pg_partition where (parentid in (select oid from pg_class where relname = 'test_part_list')) and oid != relfilenode order by relname; ALTER TABLE test_part_list REBUILD PARTITION all; -select * from test_part_list; +select * from test_part_list order by a desc; select relname, parttype from pg_partition where (parentid in (select oid from pg_class where relname = 'test_part_list')) and oid != relfilenode order by relname; @@ -323,5 +321,5 @@ select * from test_part_segment where ((980 < d and d < 1000) or (2180 < d and d select * from test_part_segment where ((980 < b and b < 1000) or (2180 < b and b < 2200)); --test remove partitioning alter table test_part_segment remove partitioning; -\c postgres; -drop DATABASE if exists partition_test1; +drop schema partition_test1 cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/partition_test2.sql b/contrib/dolphin/sql/partition_test2.sql index 830eb1927627589fe59c792ecf008932bad72dba..752d8946e570684f0b614c5bd199bea281bcfbb8 100644 --- a/contrib/dolphin/sql/partition_test2.sql +++ b/contrib/dolphin/sql/partition_test2.sql @@ -1,7 +1,5 @@ -drop DATABASE if exists partition_test2; - -CREATE DATABASE partition_test2 dbcompatibility 'B'; -\c partition_test2; +create schema partition_test2; +set current_schema to 'partition_test2'; CREATE TABLE IF NOT EXISTS test_part1 ( a int, @@ -206,5 +204,5 @@ select * from test_part_hash; select * from test_no_part1; alter table test_part_hash analyze partition p0,p1; alter table test_part_hash analyze partition all; -\c postgres; -drop DATABASE if exists partition_test2; +drop schema partition_test2 cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/partition_test3.sql b/contrib/dolphin/sql/partition_test3.sql index 4283258fd2281f670ddcb90cf17dbd8287e7edda..517f8aa2d96602a58e0c6095dfb14a7ffdefd3e7 100644 --- a/contrib/dolphin/sql/partition_test3.sql +++ b/contrib/dolphin/sql/partition_test3.sql @@ -1,7 +1,5 @@ -drop DATABASE if exists partition_test3; - -CREATE DATABASE partition_test3 dbcompatibility 'B'; -\c partition_test3; +create schema partition_test3; +set current_schema to 'partition_test3'; --test add and drop CREATE TABLE IF NOT EXISTS test_part2 ( @@ -108,5 +106,5 @@ PARTITION BY RANGE(a) SUBPARTITION BY RANGE(b) ALTER TABLE test_part2_1 add PARTITION p1 VALUES LESS THAN (200) (SUBPARTITION p1_0 VALUES LESS THAN (100)); ALTER TABLE test_part2_1 add PARTITION p2 VALUES (add(600,100)) (SUBPARTITION p2_0 VALUES LESS THAN (100)); ALTER TABLE test_part2_1 add PARTITION p3 VALUES (DEFAULT) (SUBPARTITION p3_0 VALUES LESS THAN (100)); -\c postgres; -drop DATABASE if exists partition_test3; +drop schema partition_test3 cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/partition_test4.sql b/contrib/dolphin/sql/partition_test4.sql index 4f1ef283603d92f0ebcf173552edea4a41e238f3..e1d9c7e639698050fcf6404175dbc604e14ebd98 100644 --- a/contrib/dolphin/sql/partition_test4.sql +++ b/contrib/dolphin/sql/partition_test4.sql @@ -1,6 +1,5 @@ -drop DATABASE if exists partition_test4; -CREATE DATABASE partition_test4 dbcompatibility 'B'; -\c partition_test4; +create schema partition_test4; +set current_schema to 'partition_test4'; CREATE TABLE test_range_subpart ( a INT4 PRIMARY KEY, @@ -275,5 +274,5 @@ partition p1 values less than(200), partition p2 values less than(300), partition p3 values less than (maxvalue) ); -\c postgres; -drop DATABASE if exists partition_test4; \ No newline at end of file +drop schema partition_test4 cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/pl_debugger_client.sql b/contrib/dolphin/sql/pl_debugger_client.sql index 7ec6e79b57b75c78e6a514315ff7e8f0454e75d1..da53cfbfadad7589cf4585a54bc0879512bedc09 100644 --- a/contrib/dolphin/sql/pl_debugger_client.sql +++ b/contrib/dolphin/sql/pl_debugger_client.sql @@ -1,4 +1,4 @@ -\c test_ansi_quotes +set current_schema to 'test_ansi_quotes'; SET dolphin.sql_mode TO 'sql_mode_strict,sql_mode_full_group,pipes_as_concat'; -- wait for server establishment select pg_sleep(3); diff --git a/contrib/dolphin/sql/pl_debugger_server.sql b/contrib/dolphin/sql/pl_debugger_server.sql index b17229c5219646c8f1cda7db49ad0f1f5adcb89b..dfc62cf7418222186cb1f9f03f71af6fd59d358c 100644 --- a/contrib/dolphin/sql/pl_debugger_server.sql +++ b/contrib/dolphin/sql/pl_debugger_server.sql @@ -1,4 +1,4 @@ -\c test_ansi_quotes +set current_schema to 'test_ansi_quotes'; SET dolphin.sql_mode TO 'sql_mode_strict,sql_mode_full_group,pipes_as_concat'; -- setups drop schema if exists pl_debugger cascade; diff --git a/contrib/dolphin/sql/read_only_guc_test.sql b/contrib/dolphin/sql/read_only_guc_test.sql index 2b058cca63c7b64092f5a94bb54ea28a15959a5d..c0b563f5b79b47552f9d3fc0b4f654ea10fb5be9 100755 --- a/contrib/dolphin/sql/read_only_guc_test.sql +++ b/contrib/dolphin/sql/read_only_guc_test.sql @@ -1,8 +1,5 @@ --- b compatibility case -drop database if exists read_only_guc_test; --- create database read_only_guc_test dbcompatibility 'b'; -create database read_only_guc_test with DBCOMPATIBILITY = 'B'; -\c read_only_guc_test +create schema read_only_guc_test; +set current_schema to 'read_only_guc_test'; show version_comment; show auto_increment_increment; @@ -44,5 +41,5 @@ SELECT * FROM pg_settings WHERE NAME='system_time_zone'; SELECT * FROM pg_settings WHERE NAME='time_zone'; SELECT * FROM pg_settings WHERE NAME='wait_timeout'; -\c postgres -drop database if exists read_only_guc_test; +drop schema read_only_guc_test cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/regexp.sql b/contrib/dolphin/sql/regexp.sql index ee9895a8d06de3ef48d02112d8d48a2523bcedbd..fad6113c64645ddd1c57524997cd537686d9ded3 100644 --- a/contrib/dolphin/sql/regexp.sql +++ b/contrib/dolphin/sql/regexp.sql @@ -1,7 +1,6 @@ -drop database if exists db_regexp; -create database db_regexp dbcompatibility 'b'; -\c db_regexp +create schema db_regexp; +set current_schema to 'db_regexp'; select regexp('a', true); -\c postgres -drop database if exists db_regexp; \ No newline at end of file +drop schema db_regexp cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/replace_test/replace.sql b/contrib/dolphin/sql/replace_test/replace.sql index 32846fd70811baffbe795f29f3bd771227068e3e..88b3baa316b0581c20baf93a8d240bae6713a22e 100755 --- a/contrib/dolphin/sql/replace_test/replace.sql +++ b/contrib/dolphin/sql/replace_test/replace.sql @@ -1,6 +1,5 @@ -drop database if exists db_replace; -create database db_replace dbcompatibility 'B'; -\c db_replace; +create schema db_replace; +set current_schema to 'db_replace'; create table t1 (a int); create table t2 (a int); @@ -56,5 +55,5 @@ replace DELAYED into Parts partition(p1) values(4); replace DELAYED into Parts partition(p1) values(4); replace DELAYED into Parts partition(p1) select A from T2 where A >=2 ; -\c postgres -drop database db_replace; +drop schema db_replace cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/second_microsecond.sql b/contrib/dolphin/sql/second_microsecond.sql index 5b1d3fbf7164c820c51a8242db447b2d0528d84d..12ba0535444791db4c8db1cc3ffe9ed17b1e8569 100644 --- a/contrib/dolphin/sql/second_microsecond.sql +++ b/contrib/dolphin/sql/second_microsecond.sql @@ -1,5 +1,5 @@ -create database second_microsecond dbcompatibility = 'b'; -\c second_microsecond +create schema second_microsecond; +set current_schema to 'second_microsecond'; select microsecond(timestamp '2021-11-4 16:30:44.3411'); select second(timestamp(6) '2021-11-4 16:30:44.3411'); select microsecond(datetime '2021-11-4 16:30:44.3411'); @@ -11,5 +11,5 @@ select second(time(6) '2021-11-4 16:30:44.3411'); select microsecond(timetz '2021-11-4 16:30:44.3411'); select second(timetz(6) '2021-11-4 16:30:44.3411'); -\c postgres -drop database second_microsecond; \ No newline at end of file +drop schema second_microsecond cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/set_password.sql b/contrib/dolphin/sql/set_password.sql index 38710186f60a93c105babe49cdca38a52baf3760..d8c931b6c99f8e36374926a48d571ce958251dcd 100644 --- a/contrib/dolphin/sql/set_password.sql +++ b/contrib/dolphin/sql/set_password.sql @@ -1,6 +1,5 @@ -drop database if exists test_set_password; -create database test_set_password dbcompatibility 'b'; -\c test_set_password +create schema test_set_password; +set current_schema to 'test_set_password'; set password = 'abc@1234'; set password for current_user() = 'abc@2345'; create user user1 password 'abc@1234'; @@ -10,6 +9,7 @@ set password for 'user1'@'%' = 'abc@3456' replace 'abc@2345'; set session authorization user1 password 'abc@3456'; set password for 'user1'@'%' = PASSWORD('abc@4567') replace 'abc@3456'; set session authorization user1 password 'abc@4567'; -\c postgres -drop database if exists test_set_password; -drop user user1; \ No newline at end of file +\c contrib_regression +drop user user1; +drop schema test_set_password cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/show.sql b/contrib/dolphin/sql/show.sql index 7b9eb922b76293470e2a59ad3c376c2302e0e80f..f2c29b110213ea88854cd86b229cad51332c9766 100644 --- a/contrib/dolphin/sql/show.sql +++ b/contrib/dolphin/sql/show.sql @@ -1,5 +1,5 @@ -create database show_test dbcompatibility 'b'; -\c show_test +create schema show_test; +set current_schema to 'show_test'; create user grant_test identified by 'H&*#^DH85@#(J'; set search_path = 'grant_test'; create table test(id int); @@ -75,5 +75,5 @@ SHOW COLLATION WHERE charset = 'win1251'; reset search_path; drop user grant_test cascade; -\c postgres -drop database show_test; \ No newline at end of file +drop schema show_test cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/show_create.sql b/contrib/dolphin/sql/show_create.sql index c3380cb53da3a3855b5d03abbb6f92bba5ed3a5a..93ce14e0f626e742ee26fca01309e70b360935c8 100644 --- a/contrib/dolphin/sql/show_create.sql +++ b/contrib/dolphin/sql/show_create.sql @@ -1,6 +1,5 @@ -drop database if exists show_create; -create database show_create dbcompatibility 'b'; -\c show_create +create schema show_create; +set current_schema to 'show_create'; CREATE USER test_showcreate WITH PASSWORD 'openGauss@123'; GRANT ALL PRIVILEGES TO test_showcreate; SET ROLE test_showcreate PASSWORD 'openGauss@123'; @@ -462,5 +461,5 @@ reset current_schema; drop schema test_get_def cascade; RESET ROLE; DROP USER test_showcreate; -\c postgres -drop database if exists show_create; \ No newline at end of file +drop schema show_create cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/show_create_database.sql b/contrib/dolphin/sql/show_create_database.sql index e5a58f63938d56c969dfab261e0cb98cdab26cf0..1f4cb10445f5f3218e2ec4806cf6c99c681a1539 100644 --- a/contrib/dolphin/sql/show_create_database.sql +++ b/contrib/dolphin/sql/show_create_database.sql @@ -1,6 +1,5 @@ -drop database if exists show_createdatabase; -create database show_createdatabase dbcompatibility 'b'; -\c show_createdatabase +create schema show_createdatabase; +set current_schema to 'show_createdatabase'; CREATE USER test_showcreate_database WITH PASSWORD 'openGauss@123'; GRANT ALL PRIVILEGES TO test_showcreate_database; SET ROLE test_showcreate_database PASSWORD 'openGauss@123'; @@ -20,5 +19,5 @@ show create database aa; drop schema test_get_database cascade; RESET ROLE; DROP USER test_showcreate_database; -\c postgres -drop database if exists show_createdatabase; \ No newline at end of file +drop schema show_createdatabase cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/show_variables.sql b/contrib/dolphin/sql/show_variables.sql index 0878c5f110916e97e1ce27234fc76ce59f0ea9c5..e649b53b6747b091547127b836c61e6f0b7554fc 100644 --- a/contrib/dolphin/sql/show_variables.sql +++ b/contrib/dolphin/sql/show_variables.sql @@ -1,6 +1,5 @@ -drop database if exists show_variables; -create database show_variables dbcompatibility 'b'; -\c show_variables +create schema show_variables; +set current_schema to 'show_variables'; SET datestyle TO postgres, dmy; show variables like 'DateSty%'; show variables where variable_name like 'DateSty%'; @@ -15,5 +14,5 @@ SET datestyle TO ISO, MDY; show session variables where variable_name = 'DateStyle'; show global variables where variable_name = 'DateStyle'; RESET datestyle; -\c postgres -drop database if exists show_variables; \ No newline at end of file +drop schema show_variables cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/signed_unsigned_cast.sql b/contrib/dolphin/sql/signed_unsigned_cast.sql index f327406eb4752308f3a81f7fc611eeb9e8395b84..6fbfb5f34cbe32ab831ff2aead38012c5b4a822b 100644 --- a/contrib/dolphin/sql/signed_unsigned_cast.sql +++ b/contrib/dolphin/sql/signed_unsigned_cast.sql @@ -1,9 +1,8 @@ -drop database if exists signed_unsigned_cast; -create database signed_unsigned_cast dbcompatibility 'b'; -\c signed_unsigned_cast +create schema signed_unsigned_cast; +set current_schema to 'signed_unsigned_cast'; select cast(1-2 as unsigned); select cast(3-5 as signed); select cast(cast(1 - 5 as signed) as unsigned); select cast(cast(1 + 5 as unsigned) as signed); -\c postgres -drop database signed_unsigned_cast; \ No newline at end of file +drop schema signed_unsigned_cast cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/single_line_trigger.sql b/contrib/dolphin/sql/single_line_trigger.sql index aeab2588a2d9e862199e51f321622161a666bb6d..1e8d2a39470ce45a2d5f2272e4a6a8fba2fd8f6e 100644 --- a/contrib/dolphin/sql/single_line_trigger.sql +++ b/contrib/dolphin/sql/single_line_trigger.sql @@ -1,8 +1,7 @@ --create trigger -- test mysql compatibility trigger -drop database if exists db_mysql; -create database db_mysql dbcompatibility 'B'; -\c db_mysql +create schema db_mysql; +set current_schema to 'db_mysql'; create table t (id int); create table t1 (id int); create table animals (id int, name char(30)); @@ -241,5 +240,5 @@ drop procedure proc; reset enable_set_variable_b_format; -\c postgres -drop database db_mysql; +drop schema db_mysql cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/string_func_test/db_b_ascii_test.sql b/contrib/dolphin/sql/string_func_test/db_b_ascii_test.sql index 855ab5e54e5b6ec57780f3775f5c7f5a470b475c..01947121a23cc9524c4c83f713d29740472254f1 100644 --- a/contrib/dolphin/sql/string_func_test/db_b_ascii_test.sql +++ b/contrib/dolphin/sql/string_func_test/db_b_ascii_test.sql @@ -1,11 +1,18 @@ -SELECT ASCII('a'); -SELECT ASCII('你'); drop database if exists db_b_ascii_test; -create database db_b_ascii_test dbcompatibility 'B'; +create database db_b_ascii_test dbcompatibility 'A'; \c db_b_ascii_test SELECT ASCII('a'); SELECT ASCII('你'); -\c postgres +\c contrib_regression drop database db_b_ascii_test; + +create schema db_b_ascii_test; +set current_schema to 'db_b_ascii_test'; + +SELECT ASCII('a'); +SELECT ASCII('你'); + +drop schema db_b_ascii_test cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/string_func_test/db_b_from_base64_test.sql b/contrib/dolphin/sql/string_func_test/db_b_from_base64_test.sql index ed9cf939566e61e9f5f879c7ccdd0e6bbd33cf6c..c499b6692a5f83fa34dc289a229407ca17222ecb 100644 --- a/contrib/dolphin/sql/string_func_test/db_b_from_base64_test.sql +++ b/contrib/dolphin/sql/string_func_test/db_b_from_base64_test.sql @@ -1,6 +1,5 @@ -drop database if exists from_base64; -create database from_base64 dbcompatibility 'b'; -\c from_base64 +create schema from_base64; +set current_schema to 'from_base64'; --测试正常base64编码作为输入,返回base64编码的解码结果 SELECT FROM_BASE64('YWJj'); @@ -24,5 +23,5 @@ CREATE TABLE test_base64 (name text); INSERT INTO test_base64 values('YWJj'), ('MTIzNDU2'), ('asjeifj'); SELECT FROM_BASE64(name) from test_base64; -\c postgres -drop database if exists from_base64; \ No newline at end of file +drop schema from_base64 cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/string_func_test/db_b_insert_test.sql b/contrib/dolphin/sql/string_func_test/db_b_insert_test.sql index b040b43073b0614464622ace76cd72c6d3943c83..12043ad88a24c47294bad7df9c0dbce631f7afdb 100644 --- a/contrib/dolphin/sql/string_func_test/db_b_insert_test.sql +++ b/contrib/dolphin/sql/string_func_test/db_b_insert_test.sql @@ -1,6 +1,5 @@ -drop database if exists db_b_insert_test; -create database db_b_insert_test dbcompatibility 'B'; -\c db_b_insert_test +create schema db_b_insert_test; +set current_schema to 'db_b_insert_test'; select insert('abcdefg', 2, 4, 'yyy'); select insert(1234567, 2, 4, 'yyy'); @@ -26,5 +25,5 @@ select insert('abcdefg', -4123213214212123123123123, 4, 'yyy'); select insert('abcdefg', 412321321421, 4, 'yyy'); select insert('abcdefg', -412321321421, 4, 'yyy'); -\c postgres -drop database db_b_insert_test; \ No newline at end of file +drop schema db_b_insert_test cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/string_func_test/db_b_left_right_test.sql b/contrib/dolphin/sql/string_func_test/db_b_left_right_test.sql index 8c2c3f9f538a92de6f23eb166b13fe8a2ff8f606..43aa4ed6d3e84d9dab12459e97295304d102b9e0 100644 --- a/contrib/dolphin/sql/string_func_test/db_b_left_right_test.sql +++ b/contrib/dolphin/sql/string_func_test/db_b_left_right_test.sql @@ -1,11 +1,17 @@ +drop database if exists db_b_left_right_test; +create database db_b_left_right_test dbcompatibility 'A'; +\c db_b_left_right_test + SELECT left('abcdefg', 3); SELECT left('abcdefg', -3); SELECT right('abcdefg', 3); SELECT right('abcdefg', -3); -drop database if exists db_b_left_right_test; -create database db_b_left_right_test dbcompatibility 'B'; -\c db_b_left_right_test +\c contrib_regression +drop database db_b_left_right_test; + +create schema db_b_left_right_test; +set current_schema to 'db_b_left_right_test'; set bytea_output to escape; @@ -68,5 +74,5 @@ select left('abc',5/2); select right('abc',2.5); select right('abc',5/2); -\c postgres -drop database db_b_left_right_test; \ No newline at end of file +drop schema db_b_left_right_test cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/string_func_test/db_b_ord_test.sql b/contrib/dolphin/sql/string_func_test/db_b_ord_test.sql index 9cd5a05a62f9c8365022fd1591b3a8920afd5904..452a1a28a29bce189f5cadc1dd5198a0486bed44 100644 --- a/contrib/dolphin/sql/string_func_test/db_b_ord_test.sql +++ b/contrib/dolphin/sql/string_func_test/db_b_ord_test.sql @@ -1,6 +1,5 @@ -drop database if exists db_b_ord_test; -create database db_b_ord_test dbcompatibility 'B'; -\c db_b_ord_test +create schema db_b_ord_test; +set current_schema to 'db_b_ord_test'; -- test 1 byte select ord('1111'); @@ -32,5 +31,5 @@ insert into test_ord values('1234'), ('嬴政'), ('𓃔𓃘𓃲𓃰'); select ord(name) from test_ord; -\c postgres -drop database if exists db_b_ord_test; \ No newline at end of file +drop schema db_b_ord_test cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/string_func_test/db_b_quote_test.sql b/contrib/dolphin/sql/string_func_test/db_b_quote_test.sql index 1228354d4fc09020ec827e78db8ca60793e0815a..c1a260b95e9f6ed29d982ca86553d638855b7122 100644 --- a/contrib/dolphin/sql/string_func_test/db_b_quote_test.sql +++ b/contrib/dolphin/sql/string_func_test/db_b_quote_test.sql @@ -1,10 +1,9 @@ -drop database if exists db_b_quote_test; -create database db_b_quote_test dbcompatibility 'B'; -\c db_b_quote_test +create schema db_b_quote_test; +set current_schema to 'db_b_quote_test'; SELECT QUOTE(E'Don\'t!'); SELECT QUOTE(E'O\'hello'); SELECT QUOTE('O\hello'); -\c postgres -drop database db_b_quote_test; \ No newline at end of file +drop schema db_b_quote_test cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/string_func_test/db_b_string_length_test.sql b/contrib/dolphin/sql/string_func_test/db_b_string_length_test.sql index 78155f18d5f23c863e95db74d1bf14d13737bb05..664ea7b6f0a804754d9aaed53f74b7f1dc6ad1ae 100644 --- a/contrib/dolphin/sql/string_func_test/db_b_string_length_test.sql +++ b/contrib/dolphin/sql/string_func_test/db_b_string_length_test.sql @@ -1,16 +1,22 @@ +drop database if exists db_b_string_length_test; +create database db_b_string_length_test dbcompatibility 'A'; +\c db_b_string_length_test + SELECT length('jose'); SELECT length('你好呀'); SELECT LENGTH(B'101'); SELECT length('你好呀jose'); -drop database if exists db_b_string_length_test; -create database db_b_string_length_test dbcompatibility 'B'; -\c db_b_string_length_test +\c contrib_regression +drop database db_b_string_length_test; + +create schema db_b_string_length_test; +set current_schema to 'db_b_string_length_test'; SELECT length('jose'); SELECT length('你好呀'); SELECT LENGTH(B'101'); SELECT length('你好呀jose'); -\c postgres -drop database db_b_string_length_test; \ No newline at end of file +drop schema db_b_string_length_test cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/string_func_test/db_b_substr_test.sql b/contrib/dolphin/sql/string_func_test/db_b_substr_test.sql index ea95bef62e188975e026ea051d3673a435790cf7..cec95bf2cff9fd49412ef01730dc8bd8be743a8f 100644 --- a/contrib/dolphin/sql/string_func_test/db_b_substr_test.sql +++ b/contrib/dolphin/sql/string_func_test/db_b_substr_test.sql @@ -1,3 +1,7 @@ +drop database if exists db_b_substr_test; +create database db_b_substr_test dbcompatibility 'A'; +\c db_b_substr_test + DROP TABLE IF EXISTS template_string; CREATE TABLE template_string(a TEXT, b BYTEA); INSERT INTO template_string VALUES('abcdefghijklmnopqrstuvwxyz', 'abcdefghijklmnopqrstuvwxyz'); @@ -39,9 +43,11 @@ FROM template_string; DROP TABLE IF EXISTS template_string; -drop database if exists db_b_substr_test; -create database db_b_substr_test dbcompatibility 'B'; -\c db_b_substr_test +\c contrib_regression +drop database db_b_substr_test; + +create schema db_b_substr_test; +set current_schema to 'db_b_substr_test'; set bytea_output to escape; @@ -193,5 +199,5 @@ select c1, c2, substr(c1 from c2) from test_row order by c1; select c1, c2, substr(c1 for c2) from test_column order by c1; select c1, c2, substr(c1 for c2) from test_row order by c1; -\c postgres -drop database db_b_substr_test; +drop schema db_b_substr_test cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/string_func_test/db_b_to_base64_test.sql b/contrib/dolphin/sql/string_func_test/db_b_to_base64_test.sql index f2b0eb741a7b7ae47e9213fb322c90cb9a46cb22..d38753f9320cefaac4c823395abc92b08ed7921e 100644 --- a/contrib/dolphin/sql/string_func_test/db_b_to_base64_test.sql +++ b/contrib/dolphin/sql/string_func_test/db_b_to_base64_test.sql @@ -1,6 +1,5 @@ -drop database if exists to_base64_test; -create database to_base64_test dbcompatibility 'b'; -\c to_base64_test +create schema to_base64_test; +set current_schema to 'to_base64_test'; --测试字符串作为输入,返回base64编码的编码结果 SELECT TO_BASE64('123456'); @@ -33,5 +32,5 @@ CREATE TABLE test_base64 (name text); INSERT INTO test_base64 values('123456'), ('to_base64'); SELECT TO_BASE64(name) from test_base64; -\c postgres -drop database if exists to_base64_test; +drop schema to_base64_test cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/string_func_test/db_b_trim_test.sql b/contrib/dolphin/sql/string_func_test/db_b_trim_test.sql index 79bd07ee8ff0d9be04843feac58ff060bde30049..99f1f16be3cd5f42a31507f1d14c60f1051f9958 100644 --- a/contrib/dolphin/sql/string_func_test/db_b_trim_test.sql +++ b/contrib/dolphin/sql/string_func_test/db_b_trim_test.sql @@ -1,3 +1,7 @@ +drop database if exists db_b_trim_test; +create database db_b_trim_test dbcompatibility 'A'; +\c db_b_trim_test + SELECT TRIM(' bar '); SELECT TRIM(LEADING 'x' FROM 'xxxbarxxx'); SELECT TRIM(BOTH 'x' FROM 'xxxbarxxx'); @@ -6,9 +10,11 @@ SELECT TRIM(LEADING 'xyz' FROM 'xyzxbarxxyz'); SELECT TRIM(BOTH 'xyz' FROM 'xyzxbarxxyz'); SELECT TRIM(TRAILING 'xyz' FROM 'xyzxbarxxyz'); -drop database if exists db_b_trim_test; -create database db_b_trim_test dbcompatibility 'B'; -\c db_b_trim_test +\c contrib_regression +drop database db_b_trim_test; + +create schema db_b_trim_test; +set current_schema to 'db_b_trim_test'; SELECT TRIM(' bar '); SELECT TRIM(LEADING 'x' FROM 'xxxbarxxx'); @@ -31,5 +37,5 @@ SELECT TRIM(' X '::bytea); SELECT TRIM(LEADING ' X '::bytea); SELECT TRIM(TRAILING ' X '::bytea); -\c postgres -drop database db_b_trim_test; \ No newline at end of file +drop schema db_b_trim_test cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/string_func_test/db_b_unhex_test.sql b/contrib/dolphin/sql/string_func_test/db_b_unhex_test.sql index 915b79ec18c3de946e396721f638310547d35c26..b737bc0ed113b794babd7d0efb8c8a1b9498e8de 100644 --- a/contrib/dolphin/sql/string_func_test/db_b_unhex_test.sql +++ b/contrib/dolphin/sql/string_func_test/db_b_unhex_test.sql @@ -1,6 +1,5 @@ -drop database if exists unhex_test; -create database unhex_test dbcompatibility 'b'; -\c unhex_test +create schema unhex_test; +set current_schema to 'unhex_test'; --测试字符串作为输入,返回十六进制编码的编码结果 SELECT UNHEX('6f70656e4761757373'); @@ -31,5 +30,5 @@ CREATE TABLE test_unhex (name text); INSERT INTO test_unhex values('4142'), ('6f70656e4761757373'); SELECT UNHEX(name) from test_unhex; -\c postgres -drop database if exists unhex_test; +drop schema unhex_test cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/string_func_test/test_substring_index.sql b/contrib/dolphin/sql/string_func_test/test_substring_index.sql index 600edf274791aae16a2ec71c9c5b52a0e6d19057..bc67572bf28555452278afe07fcf193ad93a0bae 100644 --- a/contrib/dolphin/sql/string_func_test/test_substring_index.sql +++ b/contrib/dolphin/sql/string_func_test/test_substring_index.sql @@ -1,6 +1,5 @@ -drop database if exists test_substring_index; -create database test_substring_index dbcompatibility 'b'; -\c test_substring_index +create schema test_substring_index; +set current_schema to 'test_substring_index'; SELECT SUBSTRING_INDEX('www.opengauss.com','.',0); SELECT SUBSTRING_INDEX('www.opengauss.com','',2); SELECT SUBSTRING_INDEX('','.',2); @@ -44,5 +43,5 @@ SELECT SUBSTRING_INDEX(myFloat,'.',1) FROM myTable; SELECT SUBSTRING_INDEX(myBool,'1',1) FROM myTable; SELECT SUBSTRING_INDEX(myDate,'-',1) FROM myTable; drop table myTable; -\c postgres -drop database test_substring_index; \ No newline at end of file +drop schema test_substring_index cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/test_alter_table.sql b/contrib/dolphin/sql/test_alter_table.sql index 923a59c07cc59a5216ff5355419b16417fd7a0ff..98838b43c26e37859e7cd955fcb7617247d254f7 100644 --- a/contrib/dolphin/sql/test_alter_table.sql +++ b/contrib/dolphin/sql/test_alter_table.sql @@ -1,6 +1,5 @@ -drop database if exists db_alter_table; -create database db_alter_table dbcompatibility 'b'; -\c db_alter_table +create schema db_alter_table; +set current_schema to 'db_alter_table'; create table alter_table_tbl1 (a int primary key, b int); create table alter_table_tbl2 (c int primary key, d int); @@ -135,5 +134,5 @@ create table test_primary(f11 int, f12 varchar(20), f13 bool, constraint con_t_p \d+ test_primary drop table test_primary; -\c postgres -drop database if exists db_alter_table; +drop schema db_alter_table cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/test_binary.sql b/contrib/dolphin/sql/test_binary.sql index 376aaf5a969ce880562f53c403ad2983c4b77b52..e2d5620463e0e20f7d8df5367a55b1811d194d68 100644 --- a/contrib/dolphin/sql/test_binary.sql +++ b/contrib/dolphin/sql/test_binary.sql @@ -1,6 +1,5 @@ -drop database if exists test_binary; -create database test_binary dbcompatibility 'B'; -\c test_binary +create schema test_binary; +set current_schema to 'test_binary'; create table binary_templates (a bytea, b binary(5), c varbinary(5)); -- invalid typmod @@ -83,5 +82,5 @@ select * from t_varbinary_061; drop table if exists t_binary_061; drop table if exists t_varbinary_061; -\c postgres -drop database test_binary; \ No newline at end of file +drop schema test_binary cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/test_bit_xor.sql b/contrib/dolphin/sql/test_bit_xor.sql index e00d0454dc9c56c4d4418537ee0828a3d6a8bfb8..47d40271a4f50fb24f0901d49b66d486c72859cd 100644 --- a/contrib/dolphin/sql/test_bit_xor.sql +++ b/contrib/dolphin/sql/test_bit_xor.sql @@ -1,6 +1,5 @@ -drop database if exists test_bit_xor; -create database test_bit_xor dbcompatibility 'B'; -\c test_bit_xor +create schema test_bit_xor; +set current_schema to 'test_bit_xor'; -- test datetime create table test_datetime (t datetime); @@ -344,5 +343,5 @@ insert into test_varbit values(b'101'); select bit_xor(col) from test_varbit; drop table test_varbit; -\c postgres -drop database test_bit_xor; \ No newline at end of file +drop schema test_bit_xor cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/test_blob.sql b/contrib/dolphin/sql/test_blob.sql index 119564f607b16d9a250498294d8e78ed8db3cf77..16a21a226638f09cfb2471ef5338e5338bd5a769 100644 --- a/contrib/dolphin/sql/test_blob.sql +++ b/contrib/dolphin/sql/test_blob.sql @@ -1,6 +1,5 @@ -drop database if exists test_blob; -create database test_blob dbcompatibility 'B'; -\c test_blob +create schema test_blob; +set current_schema to 'test_blob'; create table test_template (t tinyblob, b blob, m mediumblob, l longblob); insert into test_template values('aaaaaaaaa', 'aaaaaaaaa', 'aaaaaaaaa', 'aaaaaaaaa'); create table test_tiny (t tinyblob); @@ -61,5 +60,5 @@ drop table test_tiny; drop table test_blob; drop table test_medium; drop table test_long; -\c postgres -drop database test_blob; \ No newline at end of file +drop schema test_blob cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/test_checksum.sql b/contrib/dolphin/sql/test_checksum.sql index 834bd63af5b7cb5d354598484ab3b98fd1f2c241..b051218ff19afb4c326bbc1571a04b8d5d5b5b1f 100644 --- a/contrib/dolphin/sql/test_checksum.sql +++ b/contrib/dolphin/sql/test_checksum.sql @@ -1,6 +1,5 @@ -drop database if exists db_chk_tbl; -create database db_chk_tbl dbcompatibility 'b'; -\c db_chk_tbl +create schema db_chk_tbl; +set current_schema to 'db_chk_tbl'; CREATE SCHEMA tst_schema1; SET SEARCH_PATH TO tst_schema1; @@ -190,6 +189,5 @@ INSERT INTO t_same_cmp VALUES(2022001, 'same check'); CHECKSUM TABLE t_same, t_same_cmp; -\c postgres -drop database if exists db_chk_tbl; - +drop schema db_chk_tbl cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/test_condition.sql b/contrib/dolphin/sql/test_condition.sql index a6ef28be5fd53884fd99034260104d1c6a24483e..1a7652d81390d159884f164c3abc8fad95a95764 100644 --- a/contrib/dolphin/sql/test_condition.sql +++ b/contrib/dolphin/sql/test_condition.sql @@ -1,8 +1,5 @@ --- b compatibility case -drop database if exists db_test_condition; --- create database db_test_condition dbcompatibility 'b'; -create database db_test_condition with DBCOMPATIBILITY = 'B'; -\c db_test_condition +create schema db_test_condition; +set current_schema to 'db_test_condition'; set dolphin.sql_mode = ''; create table test_bccf (t1 int ,t2 float, t3 char, t4 text); insert into test_bccf values(1,3,null,null); @@ -1066,5 +1063,5 @@ select strcmp(blb, txt) from typeset; -\c postgres -drop database db_test_condition; +drop schema db_test_condition cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/test_current_user.sql b/contrib/dolphin/sql/test_current_user.sql index e5389e702ce5f811e777ff5fe18cdf8d390f4814..caf43f41b759f82e67da1017080c946dbf0e1653 100644 --- a/contrib/dolphin/sql/test_current_user.sql +++ b/contrib/dolphin/sql/test_current_user.sql @@ -1,6 +1,5 @@ -drop database if exists test_current_user; -create database test_current_user dbcompatibility 'b'; -\c test_current_user +create schema test_current_user; +set current_schema to 'test_current_user'; select current_user; select current_user(); create user u1 password 'Gauss123'; @@ -70,5 +69,5 @@ DROP USER MAPPING FOR USER SERVER s1; CREATE USER MAPPING FOR u1 SERVER s1; DROP USER MAPPING FOR u1 SERVER s1; drop user u1; -\c postgres -drop database test_current_user; \ No newline at end of file +drop schema test_current_user cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/test_datatype.sql b/contrib/dolphin/sql/test_datatype.sql index 6726d2bbb3c6377a3e1e3bf75beccb42deb8fea9..944bc8de053c69350c18730bd846328c2c0874ac 100644 --- a/contrib/dolphin/sql/test_datatype.sql +++ b/contrib/dolphin/sql/test_datatype.sql @@ -1,6 +1,5 @@ -drop database if exists b_datatype_test; -create database b_datatype_test dbcompatibility 'B'; -\c b_datatype_test +create schema b_datatype_test; +set current_schema to 'b_datatype_test'; -- bit(n), when insert into bit, support the length less than n, which must be equal to n in normal case create table bit_test(a bit); @@ -91,5 +90,5 @@ create table all_int_test(a tinyint(9999999999), b smallint(9999999999), c mediu \d all_int_test drop table all_int_test; -\c postgres -drop database b_datatype_test; \ No newline at end of file +drop schema b_datatype_test cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/test_fixed.sql b/contrib/dolphin/sql/test_fixed.sql index 19d01842b7d7b0d7c14178da0dac027fa20fa519..41bc4f9a9245e82b680b9e7bf8f25080b8ca211f 100644 --- a/contrib/dolphin/sql/test_fixed.sql +++ b/contrib/dolphin/sql/test_fixed.sql @@ -1,9 +1,8 @@ -drop database if exists test_fixed; -create database test_fixed dbcompatibility 'B'; -\c test_fixed +create schema test_fixed; +set current_schema to 'test_fixed'; DROP TABLE IF EXISTS fixed_test; CREATE TABLE fixed_test (a fixed(10, 5)); \d fixed_test DROP TABLE fixed_test; -\c postgres -drop database test_fixed; \ No newline at end of file +drop schema test_fixed cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/test_float_double_real_double_precision_MD.sql b/contrib/dolphin/sql/test_float_double_real_double_precision_MD.sql index 95c74f83dac4271012110b6b1bb95d6c5ae3f8b5..5201e56efa7680d47fcb1f7e0f0d6d03bb2d7e9b 100644 --- a/contrib/dolphin/sql/test_float_double_real_double_precision_MD.sql +++ b/contrib/dolphin/sql/test_float_double_real_double_precision_MD.sql @@ -1,6 +1,5 @@ -drop database if exists float_double_real_double_precision_MD; -create database float_double_real_double_precision_MD dbcompatibility 'b'; -\c float_double_real_double_precision_MD; +create schema double_precision; +set current_schema to 'double_precision'; create table test(a float(20, 2), b double(20, 2), c real(20, 2), d double precision(20, 2)); \d test; @@ -44,3 +43,6 @@ create table test3(a real(3.6, 1.6)); create table test3(a double precision(3.6, 1.6)); create table test3(a double(3.6, 1.6)); \d test3; + +drop schema double_precision cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/test_mysql_char.sql b/contrib/dolphin/sql/test_mysql_char.sql index 025048e8eb8a585b83695efb523c18883ac90f13..60abf7313103d8a64e5b433a67c2ed0df88314f3 100644 --- a/contrib/dolphin/sql/test_mysql_char.sql +++ b/contrib/dolphin/sql/test_mysql_char.sql @@ -1,6 +1,5 @@ -drop database if exists test_char; -create database test_char with dbcompatibility='B'; -\c test_char +create schema test_char; +set current_schema to 'test_char'; set dolphin.b_compatibility_mode=1; set dolphin.sql_mode = ''; @@ -126,5 +125,5 @@ select '0.0100abc' || 1; select '0.0100abc' || 0; select '0.0100abc' || null; -\c postgres -drop database test_char; +drop schema test_char cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/test_mysql_enum.sql b/contrib/dolphin/sql/test_mysql_enum.sql index 4cece858b449ecca8fe090296783408a768e1ccc..169c7848cbf145c3add632c5d77ea3ee3507dc0a 100644 --- a/contrib/dolphin/sql/test_mysql_enum.sql +++ b/contrib/dolphin/sql/test_mysql_enum.sql @@ -1,6 +1,5 @@ -drop database if exists test_enum; -CREATE DATABASE test_enum with dbcompatibility='B'; -\c test_enum +create schema test_enum; +set current_schema to 'test_enum'; -- create extension dolphin; show sql_compatibility; CREATE TABLE test ( @@ -77,7 +76,7 @@ CREATE TABLE testtttttttttttttttttttttttttttttttttt ( myjobbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb enum('x','y') ); -SELECT * FROM pg_type WHERE typname like '%anonymous_enum%'; +SELECT count(*) FROM pg_type WHERE typname like 'testtttttttttttttttt_myjobbbbbbbbbbbbbbb%_anonymous_enum_1'; drop table testtttttttttttttttttttttttttttttttttt; @@ -171,5 +170,5 @@ W_COUNTRY VARCHAR(20) , W_GMT_OFFSET DECIMAL(5,2) ); -\c postgres -DROP DATABASE test_enum; +drop schema test_enum cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/test_mysql_operator.sql b/contrib/dolphin/sql/test_mysql_operator.sql index 747ae9f7c782bea9c0f279db6cd18fb609a3349f..f36ee03901d17fe422c8507002d5c9030dd09448 100644 --- a/contrib/dolphin/sql/test_mysql_operator.sql +++ b/contrib/dolphin/sql/test_mysql_operator.sql @@ -1,6 +1,5 @@ -drop database if exists test_op_and; -CREATE DATABASE test_op_and with dbcompatibility='B'; -\c test_op_and +create schema test_op_and; +set current_schema to 'test_op_and'; set dolphin.b_compatibility_mode = 1; set dolphin.sql_mode = 'sql_mode_strict,sql_mode_full_group'; @@ -380,13 +379,12 @@ drop table testforint2_p5; ---drop database set dolphin.b_compatibility_mode = 0; set dolphin.sql_mode = 'sql_mode_strict,sql_mode_full_group,pipes_as_concat'; -\c postgres -drop database test_op_and; +drop schema test_op_and cascade; +reset current_schema; -drop database if exists test_op_xor; -CREATE DATABASE test_op_xor with dbcompatibility='B'; -\c test_op_xor +create schema test_op_xor; +set current_schema to 'test_op_xor'; set dolphin.b_compatibility_mode = 1; select null^1; @@ -622,13 +620,12 @@ drop table testforint2_p5; ---drop database set dolphin.b_compatibility_mode = 0; -\c postgres -drop database test_op_xor; +drop schema test_op_xor cascade; +reset current_schema; -drop database if exists like_test; -create database like_test DBCOMPATIBILITY 'b'; -\c like_test +create schema like_test; +set current_schema to 'like_test'; set dolphin.b_compatibility_mode = 1; select 'a' like 'A'; @@ -893,5 +890,5 @@ select !10; select !!10; select 10!; -\c postgres -drop database if exists like_test; +drop schema like_test cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/test_mysql_prepare.sql b/contrib/dolphin/sql/test_mysql_prepare.sql index ac2955728fdf3190126fafa57141f8f64c56b7eb..4535cf56f8eb029a2904cdd2606e5323b3976125 100644 --- a/contrib/dolphin/sql/test_mysql_prepare.sql +++ b/contrib/dolphin/sql/test_mysql_prepare.sql @@ -1,6 +1,5 @@ -drop database if exists test_mysql_prepare; -create database test_mysql_prepare dbcompatibility 'b'; -\c test_mysql_prepare +create schema test_mysql_prepare; +set current_schema to 'test_mysql_prepare'; create table test(name text, age int); insert into test values('a',18); prepare s1 as select * from test; @@ -239,5 +238,5 @@ deallocate s2; reset dolphin.b_compatibility_mode; reset enable_set_variable_b_format; -\c postgres -drop database test_mysql_prepare; \ No newline at end of file +drop schema test_mysql_prepare cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/test_op_blob.sql b/contrib/dolphin/sql/test_op_blob.sql index e7834900e0671077cb9022c59b2fe9595f902dfc..8c1a737e9be78eb3faa453bac4bb33f3fa99ba20 100644 --- a/contrib/dolphin/sql/test_op_blob.sql +++ b/contrib/dolphin/sql/test_op_blob.sql @@ -1,6 +1,5 @@ -drop database if exists test_op_blob; -create database test_op_blob dbcompatibility 'b'; -\c test_op_blob +create schema test_op_blob; +set current_schema to 'test_op_blob'; select '1'::blob ^ '1'::blob; select '1'::blob ^ '1'::char; @@ -17,5 +16,5 @@ select '1'::blob ^ 11::float; select '1'::blob ^ 11::float8; select '1'::blob ^ 11::numeric; -\c postgres -drop database test_op_blob; +drop schema test_op_blob cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/test_op_xor_boolandfloat.sql b/contrib/dolphin/sql/test_op_xor_boolandfloat.sql index 81fab36f581b5901d2b3badf217a8cd8a9e8e8e0..197973bf496d3fa06171cf7fc4fd57b3e31626d4 100644 --- a/contrib/dolphin/sql/test_op_xor_boolandfloat.sql +++ b/contrib/dolphin/sql/test_op_xor_boolandfloat.sql @@ -1,6 +1,5 @@ -drop database if exists test_bool_float; -create database test_bool_float dbcompatibility 'b'; -\c test_bool_float +create schema test_bool_float; +set current_schema to 'test_bool_float'; set dolphin.b_compatibility_mode = true; select 1::bool ^ 2::int1; @@ -159,5 +158,5 @@ select (-1)::numeric ^ (-2)::float8; select (-1)::numeric ^ (-2)::numeric; set dolphin.b_compatibility_mode = false; -\c postgres -drop database test_bool_float; +drop schema test_bool_float cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/test_op_xor_unsignedint.sql b/contrib/dolphin/sql/test_op_xor_unsignedint.sql index d5e672932364b44ee8f9a7b941d23ffecd36a3a6..8f4c4dd17f660bb01bc3b1062d7b41edb35c82f5 100644 --- a/contrib/dolphin/sql/test_op_xor_unsignedint.sql +++ b/contrib/dolphin/sql/test_op_xor_unsignedint.sql @@ -1,6 +1,5 @@ -drop database if exists test_op_xor_unsignedint; -create database test_op_xor_unsignedint with dbcompatibility='B'; -\c test_op_xor_unsignedint +create schema test_op_xor_unsignedint; +set current_schema to 'test_op_xor_unsignedint'; select (-1)::uint1 ^ 2::int1; select (-1)::uint1 ^ 2::int2; @@ -150,5 +149,5 @@ select 2 ::uint8 ^ 2::bool; select 2 ::uint8 ^ 2::char; select 2 ::uint8 ^ 2::varchar; -\c postgres -drop database test_op_xor_unsignedint; +drop schema test_op_xor_unsignedint cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/test_optimize.sql b/contrib/dolphin/sql/test_optimize.sql index 57f4d5c70986262f3df41c33382456c38b16ab0b..d420966c7c76752f4b2d81d95e9863ef38ecca84 100644 --- a/contrib/dolphin/sql/test_optimize.sql +++ b/contrib/dolphin/sql/test_optimize.sql @@ -1,6 +1,5 @@ -drop database if exists db_optimize; -create database db_optimize dbcompatibility 'b'; -\c db_optimize +create schema db_optimize; +set current_schema to 'db_optimize'; create table doc(id serial primary key, content varchar(255)); insert into doc(content) select 'abcd1234' from generate_series(1,10000) as content; delete from doc where id < 9000; @@ -9,5 +8,5 @@ drop table doc; set xc_maintenance_mode = on; optimize table pg_class; set xc_maintenance_mode = off; -\c postgres -drop database if exists db_optimize; +drop schema db_optimize cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/test_proxy.sql b/contrib/dolphin/sql/test_proxy.sql index 33d0fd4e94aa677a44cd4ecfe3228abbb962ab1a..f33c4debb6c6daa3497bf0347e40aa2438622cd8 100644 --- a/contrib/dolphin/sql/test_proxy.sql +++ b/contrib/dolphin/sql/test_proxy.sql @@ -1,6 +1,5 @@ -drop database if exists db_proxy; -create database db_proxy dbcompatibility 'b'; -\c db_proxy +create schema db_proxy; +set current_schema to 'db_proxy'; CREATE SCHEMA tst_schema1; SET SEARCH_PATH TO tst_schema1; @@ -72,6 +71,6 @@ drop role test_proxy_u1; drop role test_proxy_u2; drop role test_proxy_u3; -\c postgres -drop database if exists db_proxy; +drop schema db_proxy cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/test_schema.sql b/contrib/dolphin/sql/test_schema.sql index 6f2cc0d655b36d4e5d4526c836d56cfd51c7172b..2cbe2ff1950d6ff0b9f0e7a43b1b159dcd1837a1 100644 --- a/contrib/dolphin/sql/test_schema.sql +++ b/contrib/dolphin/sql/test_schema.sql @@ -1,8 +1,7 @@ -drop database if exists schema_test; -create database schema_test dbcompatibility 'b'; -\c schema_test +create schema schema_test; +set current_schema to 'schema_test'; SELECT SCHEMA(); -\c postgres -drop database if exists schema_test; +drop schema schema_test cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/test_set_charset.sql b/contrib/dolphin/sql/test_set_charset.sql index 81ee360792a31d29ae1d37a73c648e004b2a328d..98c3716c39404fcf390a6cc17166fcd6dcc50b58 100644 --- a/contrib/dolphin/sql/test_set_charset.sql +++ b/contrib/dolphin/sql/test_set_charset.sql @@ -1,6 +1,5 @@ -drop database if exists db_charset; -create database db_charset dbcompatibility 'b'; -\c db_charset +create schema db_charset; +set current_schema to 'db_charset'; show client_encoding; set charset gbk; show client_encoding; @@ -19,5 +18,5 @@ set names 'gbk'; show client_encoding; set names default; show client_encoding; -\c postgres -drop database if exists db_charset; +drop schema db_charset cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/test_shows.sql b/contrib/dolphin/sql/test_shows.sql index 162585473738beb9eeab4ef1c356828539f860aa..e78d1b29eacf4c087a88b21f72bed5fa6467d66b 100644 --- a/contrib/dolphin/sql/test_shows.sql +++ b/contrib/dolphin/sql/test_shows.sql @@ -1,8 +1,7 @@ -drop database if exists db_show; -create database db_show dbcompatibility 'b'; -\c db_show +create schema db_show; +set current_schema to 'db_show'; show processlist; show full processlist; -\c postgres -drop database if exists db_show; +drop schema db_show cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/test_shows_3.sql b/contrib/dolphin/sql/test_shows_3.sql index aa3c204d242815e205cb19313049b9e0b1772845..11a05121c0cc3543486889f47991ae8116804a25 100644 --- a/contrib/dolphin/sql/test_shows_3.sql +++ b/contrib/dolphin/sql/test_shows_3.sql @@ -1,6 +1,5 @@ -drop database if exists db_show_3; -create database db_show_3 dbcompatibility 'b'; -\c db_show_3 +create schema db_show_3; +set current_schema to 'db_show_3'; show databases; create schema aa1; create schema aa2; @@ -18,7 +17,8 @@ set role u1 password 'abc@1234'; show databases; set role u2 password 'abc@1234'; show databases; -\c postgres -drop database if exists db_show_3; +\c contrib_regression drop user u1; drop user u2; +drop schema db_show_3 cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/test_shows_4.sql b/contrib/dolphin/sql/test_shows_4.sql index 8589d129449d8d5131426b0ac6e9bdb50e0fdef1..ab8fe63c4f1ad03c2873d8e52d8fdf7cc0545c17 100644 --- a/contrib/dolphin/sql/test_shows_4.sql +++ b/contrib/dolphin/sql/test_shows_4.sql @@ -1,7 +1,6 @@ -drop database if exists db_show_4; -create database db_show_4 dbcompatibility 'b'; -\c db_show_4 +create schema db_show_4; +set current_schema to 'db_show_4'; show master status; show slave hosts; -\c postgres -drop database if exists db_show_4; +drop schema db_show_4 cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/test_shows_5.sql b/contrib/dolphin/sql/test_shows_5.sql index f729d3b592e20c42b5efe39c267e1c165cabf8b3..431e37ff0b3a87a520a99b99cf060484375b9169 100644 --- a/contrib/dolphin/sql/test_shows_5.sql +++ b/contrib/dolphin/sql/test_shows_5.sql @@ -1,11 +1,10 @@ -DROP DATABASE IF EXISTS db_show_5; -CREATE DATABASE db_show_5 DBCOMPATIBILITY 'b'; -\c db_show_5 +create schema db_show_5; +set current_schema to 'db_show_5'; CREATE SCHEMA tst_schema5; --orientation=row, normal primary key -CREATE TABLE public.t1 +CREATE TABLE db_show_5.t1 ( id int primary key, name varchar(20), @@ -13,7 +12,7 @@ phone text ) WITH(ORIENTATION=ROW, STORAGE_TYPE=USTORE); --orientation=column, serial primary key -CREATE TABLE public.t2 +CREATE TABLE db_show_5.t2 ( id serial primary key, name varchar(20), @@ -70,5 +69,5 @@ REVOKE SELECT ON ALL TABLES IN SCHEMA tst_schema5 FROM tst_shows_u5; REVOKE SELECT ON ALL SEQUENCES IN SCHEMA tst_schema5 FROM tst_shows_u5; DROP USER tst_shows_u5; -\c postgres -DROP DATABASE IF EXISTS db_show_5; +drop schema db_show_5 cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/test_system_user.sql b/contrib/dolphin/sql/test_system_user.sql index 7fb4f83f5fa99ae874cd6b48ef60e1be0fb068f4..a5bb529bd9dba2676266faa518c4e894cb6abea8 100644 --- a/contrib/dolphin/sql/test_system_user.sql +++ b/contrib/dolphin/sql/test_system_user.sql @@ -1,10 +1,9 @@ -drop database if exists test_system_user; -create database test_system_user dbcompatibility 'b'; -\c test_system_user +create schema test_system_user; +set current_schema to 'test_system_user'; select session_user; select session_user(); select user; select user(); select system_user(); -\c postgres -drop database test_system_user; \ No newline at end of file +drop schema test_system_user cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/test_table_index.sql b/contrib/dolphin/sql/test_table_index.sql index 3d56fa88ecccfad318c7dbd081f010dd8c2618a2..6697805d2cef9ded6676793d7de63988b4cec87b 100644 --- a/contrib/dolphin/sql/test_table_index.sql +++ b/contrib/dolphin/sql/test_table_index.sql @@ -1,7 +1,5 @@ ----- b compatibility case -drop database if exists test_table_index; -create database test_table_index dbcompatibility 'b'; -\c test_table_index +create schema test_table_index; +set current_schema to 'test_table_index'; -- test crate normal table create table t1(f1 int , index(f1)); @@ -374,5 +372,5 @@ alter table test_option1 add key ixd_at12 using btree (b) using aaa; alter table test_option1 add key ixd_at13 using btree (b) using aaa using btree; alter table test_option1 add key ixd_at14 using btree (b) comment 'xx' using aaa using btree; -\c contrib_regression -DROP DATABASE test_table_index; \ No newline at end of file +drop schema test_table_index cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/tinyint_agg.sql b/contrib/dolphin/sql/tinyint_agg.sql index bdc02437bfbeca90bff2b05242e7a9f68047b4e7..cb30f7dd5c3d9e0f0b37f4160b601780e3217800 100644 --- a/contrib/dolphin/sql/tinyint_agg.sql +++ b/contrib/dolphin/sql/tinyint_agg.sql @@ -1,6 +1,5 @@ -drop database if exists tinyint_agg; -create database tinyint_agg dbcompatibility 'b'; -\c tinyint_agg +create schema tinyint_agg; +set current_schema to 'tinyint_agg'; create table u1(a int1, b int2); @@ -48,5 +47,5 @@ explain(costs off, verbose) select variance(a)from smp_test; explain(costs off, verbose) select listagg(a) within group(order by a) from smp_test; explain(costs off, verbose) select listagg(a, ',') within group(order by a) from smp_test; -\c postgres -drop database tinyint_agg; \ No newline at end of file +drop schema tinyint_agg cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/tinyint_cast.sql b/contrib/dolphin/sql/tinyint_cast.sql index 336a38bb62a9b6396f42104c65ea0dab1fa359b0..7dd03b84683c51f012857186548cc2e50fab4775 100644 --- a/contrib/dolphin/sql/tinyint_cast.sql +++ b/contrib/dolphin/sql/tinyint_cast.sql @@ -1,6 +1,5 @@ -drop database if exists tinyint_cast; -create database tinyint_cast dbcompatibility 'b'; -\c tinyint_cast +create schema tinyint_cast; +set current_schema to 'tinyint_cast'; create table t1(a int1); @@ -99,5 +98,5 @@ select (-1)::text::int1; select '127'::text::int1; select '-128'::text::int1; -\c postgres -drop database tinyint_cast; \ No newline at end of file +drop schema tinyint_cast cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/tinyint_index.sql b/contrib/dolphin/sql/tinyint_index.sql index dabc8af4a810d2139990661f53ac58d3587cdc30..cf317a8fcd1d497a684236b1e3cbaf11d64e22c2 100644 --- a/contrib/dolphin/sql/tinyint_index.sql +++ b/contrib/dolphin/sql/tinyint_index.sql @@ -1,6 +1,5 @@ -drop database if exists tinyint_index; -create database tinyint_index dbcompatibility 'b'; -\c tinyint_index +create schema tinyint_index; +set current_schema to 'tinyint_index'; create table t1(a int1); insert into t1 select generate_series(-128, 127); @@ -76,4 +75,7 @@ analyze t1; explain(costs off, verbose)select * from t1 where a >= -1::int1 and a <= 0::int1; explain(costs off, verbose)select * from t1 where a >= -1::int2 and a <= 0::int2; explain(costs off, verbose)select * from t1 where a >= -1::int4 and a <= 0::int4; -explain(costs off, verbose)select * from t1 where a >= -1::int8 and a <= 0::int8; \ No newline at end of file +explain(costs off, verbose)select * from t1 where a >= -1::int8 and a <= 0::int8; + +drop schema tinyint_index cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/tinyint_operator.sql b/contrib/dolphin/sql/tinyint_operator.sql index 2aa4b96564e772d15a28930949c5720d760da8e7..e61c996cf220e88bd557d431c971630a0510536d 100644 --- a/contrib/dolphin/sql/tinyint_operator.sql +++ b/contrib/dolphin/sql/tinyint_operator.sql @@ -1,6 +1,5 @@ -drop database if exists tinyint_operator; -create database tinyint_operator dbcompatibility 'b'; -\c tinyint_operator +create schema tinyint_operator; +set current_schema to 'tinyint_operator'; select 1::int1 + 1::int1; select (-1)::int1 + (-1)::int1; @@ -62,5 +61,5 @@ select @(-1)::int1; select @127::int1; select @(-128)::int1; -\c postgres -drop database tinyint_operator; \ No newline at end of file +drop schema tinyint_operator cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/tinyint_partition.sql b/contrib/dolphin/sql/tinyint_partition.sql index cd7f1ab2c2a2bbcfeece5b2e7457a2985d95e5e9..2b6e6a6594e7576ef627d5d79520d92daef84047 100644 --- a/contrib/dolphin/sql/tinyint_partition.sql +++ b/contrib/dolphin/sql/tinyint_partition.sql @@ -1,6 +1,5 @@ -drop database if exists tinyint_partition; -create database tinyint_partition dbcompatibility 'b'; -\c tinyint_partition +create schema tinyint_partition; +set current_schema to 'tinyint_partition'; CREATE TABLE t1 ( @@ -205,5 +204,5 @@ insert into start_end1 values(1); insert into start_end1 values(127); select * from start_end1; -\c postgres -drop database tinyint_partition; \ No newline at end of file +drop schema tinyint_partition cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/tinyint_smp_join_procedure.sql b/contrib/dolphin/sql/tinyint_smp_join_procedure.sql index 53b00cbd96b4131271e235cb42563eace4c22fec..a8506efe9f234db6f26eadd42c3a1f0e41509ab7 100644 --- a/contrib/dolphin/sql/tinyint_smp_join_procedure.sql +++ b/contrib/dolphin/sql/tinyint_smp_join_procedure.sql @@ -1,6 +1,5 @@ -drop database if exists tinyint_smp; -create database tinyint_smp dbcompatibility 'b'; -\c tinyint_smp +create schema tinyint_smp; +set current_schema to 'tinyint_smp'; set enable_opfusion = on; set opfusion_debug_mode = log; @@ -52,5 +51,5 @@ select test_p1(1, 3); select test_p1(-1, -3); select * from test1; -\c postgres -drop database tinyint_smp; \ No newline at end of file +drop schema tinyint_smp cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_agg.sql b/contrib/dolphin/sql/uint_agg.sql index 33f4b607728d9a1285a0fd8db787a59200d737ce..b65f89e36beaba2e615a0ace8b2b47ed7497bac0 100644 --- a/contrib/dolphin/sql/uint_agg.sql +++ b/contrib/dolphin/sql/uint_agg.sql @@ -1,6 +1,5 @@ -drop database if exists uint_agg; -create database uint_agg dbcompatibility 'b'; -\c uint_agg +create schema uint_agg; +set current_schema to 'uint_agg'; --uint1 create table u1(a uint1, b int2); @@ -104,5 +103,5 @@ explain(costs off, verbose) select variance(a), variance(b) from smp_test; explain(costs off, verbose) select listagg(a) within group(order by a) from smp_test; explain(costs off, verbose) select listagg(a, ',') within group(order by a) from smp_test; -\c postgres -drop database uint_agg; \ No newline at end of file +drop schema uint_agg cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_and.sql b/contrib/dolphin/sql/uint_and.sql index f787f78c6144413dbb5a43bb4c710f3cc4b14a8f..adc5557d9947b3a00991c2f7de615cf9e2f95b05 100644 --- a/contrib/dolphin/sql/uint_and.sql +++ b/contrib/dolphin/sql/uint_and.sql @@ -1,6 +1,5 @@ -drop database if exists uint_and; -create database uint_and dbcompatibility 'b'; -\c uint_and +create schema uint_and; +set current_schema to 'uint_and'; --uint8 select 18446744073709551615::uint8 & 0::int1; @@ -174,5 +173,5 @@ select 127::int1 & 1::uint2; select 127::int1 & 1::uint4; select 127::int1 & 1::uint8; -\c postgres -drop database uint_and \ No newline at end of file +drop schema uint_and cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_auto_increment.sql b/contrib/dolphin/sql/uint_auto_increment.sql index c5e4cce1f174ad6124408887eb2d2006c5857cb6..2f58788ef4968799f29e41cf630c3424e2095c25 100644 --- a/contrib/dolphin/sql/uint_auto_increment.sql +++ b/contrib/dolphin/sql/uint_auto_increment.sql @@ -1,7 +1,5 @@ - --- create b db -create database uint_auto_increment with dbcompatibility = 'B'; -\c uint_auto_increment +create schema uint_auto_increment; +set current_schema to 'uint_auto_increment'; -- test CREATE TABLE with AUTO_INCREMENT -- syntax error CREATE TABLE test_create_autoinc_err(id int unsigned auto_increment key, name varchar(200),a int unsigned); @@ -802,5 +800,5 @@ SELECT col1,col2 FROM test_autoinc_insert_select ORDER BY 1; drop table test_autoinc_source; drop table test_autoinc_insert_select; -\c postgres -drop database if exists uint_auto_increment; \ No newline at end of file +drop schema uint_auto_increment cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_cast.sql b/contrib/dolphin/sql/uint_cast.sql index a9863985136e9a3c2430a4dff1cc71aeb85a9fe4..06371b512ed532037e1e746d0d816628a4c3646e 100644 --- a/contrib/dolphin/sql/uint_cast.sql +++ b/contrib/dolphin/sql/uint_cast.sql @@ -1,6 +1,5 @@ -drop database if exists uint_cast; -create database uint_cast dbcompatibility 'b'; -\c uint_cast +create schema uint_cast; +set current_schema to 'uint_cast'; select (-1)::bool::uint8; select (0)::bool::uint8; @@ -284,5 +283,5 @@ select (1)::uint4::bool; select (0)::uint8::bool; select (1)::uint8::bool; -\c postgres -drop database uint_cast; \ No newline at end of file +drop schema uint_cast cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_cast2.sql b/contrib/dolphin/sql/uint_cast2.sql index a04fefc95b77486d6d257cdc920fada2af2e954a..71ea16cf8a96248329aaab6c9b4f0ac6099d9685 100644 --- a/contrib/dolphin/sql/uint_cast2.sql +++ b/contrib/dolphin/sql/uint_cast2.sql @@ -1,6 +1,5 @@ -drop database if exists uint_cast2; -create database uint_cast2 dbcompatibility 'b'; -\c uint_cast2 +create schema uint_cast2; +set current_schema to 'uint_cast2'; drop table if exists t1 ; create table t1(a uint8); @@ -313,5 +312,5 @@ insert into t1 values((-1)::uint1); insert into t1 values(255::uint1); insert into t1 values(256::uint1); -\c postgres -drop database uint_cast2; \ No newline at end of file +drop schema uint_cast2 cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_cast3.sql b/contrib/dolphin/sql/uint_cast3.sql index 787e08e636005263d2713a7f3ac8f83370f70a4f..491b34d2d995eb8a492bf2af82e259aa792e7bb6 100644 --- a/contrib/dolphin/sql/uint_cast3.sql +++ b/contrib/dolphin/sql/uint_cast3.sql @@ -1,6 +1,5 @@ -drop database if exists uint_cast3; -create database uint_cast3 dbcompatibility 'b'; -\c uint_cast3 +create schema uint_cast3; +set current_schema to 'uint_cast3'; select 1::uint1::int16; select 1::int16::uint1; select 1::uint2::int16; @@ -95,5 +94,5 @@ select '65536'::text::uint2; select '4294967296'::text::uint4; select '18446744073709551616'::text::uint8; -\c postgres -drop database uint_cast3; \ No newline at end of file +drop schema uint_cast3 cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_div.sql b/contrib/dolphin/sql/uint_div.sql index 6a07a9ae78855bf74afaa264ec2fc19b304080ee..7a0fa9498fdb4c21715a6266971f427ca1ec556a 100644 --- a/contrib/dolphin/sql/uint_div.sql +++ b/contrib/dolphin/sql/uint_div.sql @@ -1,6 +1,5 @@ -drop database if exists uint_div; -create database uint_div dbcompatibility 'b'; -\c uint_div +create schema uint_div; +set current_schema to 'uint_div'; --uint8 select 18446744073709551615::uint8 / 0::int1; @@ -138,5 +137,5 @@ select 127::int1 / 1::uint2; select 127::int1 / 1::uint4; select 127::int1 / 1::uint8; -\c postgres -drop database uint_div \ No newline at end of file +drop schema uint_div cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_ignore.sql b/contrib/dolphin/sql/uint_ignore.sql index aaa8a487a43ce1a5ef6f41f99924549fd6aec07f..713120ffaa854a87565db9f833a7f0b22ce95839 100644 --- a/contrib/dolphin/sql/uint_ignore.sql +++ b/contrib/dolphin/sql/uint_ignore.sql @@ -1,6 +1,5 @@ -drop database if exists uint_ignore; -create database uint_ignore dbcompatibility 'b'; -\c uint_ignore +create schema uint_ignore; +set current_schema to 'uint_ignore'; drop table if exists t1 ; create table t1(a uint8); @@ -295,5 +294,5 @@ insert ignore into t1 values(256::uint1); select * from t1; -\c postgres -drop database uint_ignore; \ No newline at end of file +drop schema uint_ignore cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_in.sql b/contrib/dolphin/sql/uint_in.sql index 8662ec342137bc7e51c6268f9ff73e937b89dbc6..9d9aca12743feaad2469e14f194577dee41b07dd 100644 --- a/contrib/dolphin/sql/uint_in.sql +++ b/contrib/dolphin/sql/uint_in.sql @@ -1,6 +1,5 @@ -drop database if exists uint_in; -create database uint_in dbcompatibility 'b'; -\c uint_in +create schema uint_in; +set current_schema to 'uint_in'; create table t1(a uint1); create table t2(a uint2); @@ -31,5 +30,5 @@ insert into t4 values('1.5'); select * from t4; -\c postgres -drop database uint_in \ No newline at end of file +drop schema uint_in cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_index.sql b/contrib/dolphin/sql/uint_index.sql index 98fd6cf49a782552da6d93db0246df173bc4aa59..6e0ad70e72507dd5630fb0b8f767a0d4703396a5 100644 --- a/contrib/dolphin/sql/uint_index.sql +++ b/contrib/dolphin/sql/uint_index.sql @@ -1,6 +1,5 @@ -drop database if exists uint_index; -create database uint_index dbcompatibility 'b'; -\c uint_index +create schema uint_index; +set current_schema to 'uint_index'; create table t1(a uint1); insert into t1 select generate_series(1, 255); @@ -220,5 +219,5 @@ explain(costs off, verbose)select * from t4 where a = 1::int2; explain(costs off, verbose)select * from t4 where a = 1::int4; explain(costs off, verbose)select * from t4 where a = 1::int8; -\c postgres -drop database uint_index; \ No newline at end of file +drop schema uint_index cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_join.sql b/contrib/dolphin/sql/uint_join.sql index a74b23453260130060ab9c818f904148bd1fcf9b..32927eb90ceef75f17f0a19eddac88f359b6ff92 100644 --- a/contrib/dolphin/sql/uint_join.sql +++ b/contrib/dolphin/sql/uint_join.sql @@ -1,6 +1,5 @@ -drop database if exists uint_join; -create database uint_join dbcompatibility 'b'; -\c uint_join +create schema uint_join; +set current_schema to 'uint_join'; create table t1(a int2, b uint2); create table t2(a uint4, b uint4); @@ -22,5 +21,5 @@ select /*+ nestloop(t1 t2)*/ * from t1 join t2; select /*+ hashjoin(t1 t2)*/ * from t1 join t2; select /*+ mergejoin(t1 t2)*/ * from t1 join t2; -\c postgres -drop database uint_join; \ No newline at end of file +drop schema uint_join cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_mi.sql b/contrib/dolphin/sql/uint_mi.sql index cd53a9354a5bda56b99f4a6bdf8d3da6fb020aa5..7f665fc4b0f1cdbcc81c69d41b82d9f75668457e 100644 --- a/contrib/dolphin/sql/uint_mi.sql +++ b/contrib/dolphin/sql/uint_mi.sql @@ -1,6 +1,5 @@ -drop database if exists uint_mi; -create database uint_mi dbcompatibility 'b'; -\c uint_mi +create schema uint_mi; +set current_schema to 'uint_mi'; --uint8 select 18446744073709551615::uint8 - 0::int1; @@ -221,5 +220,5 @@ select 0::int1 - 1::uint2; select 0::int1 - 1::uint4; select 0::int1 - 1::uint8; -\c postgres -drop database uint_mi \ No newline at end of file +drop schema uint_mi cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_mod.sql b/contrib/dolphin/sql/uint_mod.sql index 0f325ad81242b2baeb7dd0fe4a7885d8614838a1..8aa0273bdcda6e23c86e143fbbbc220f349c2494 100644 --- a/contrib/dolphin/sql/uint_mod.sql +++ b/contrib/dolphin/sql/uint_mod.sql @@ -1,6 +1,5 @@ -drop database if exists uint_mod; -create database uint_mod dbcompatibility 'b'; -\c uint_mod +create schema uint_mod; +set current_schema to 'uint_mod'; --uint8 select 18446744073709551615::uint8 % 0::int1; @@ -210,5 +209,5 @@ select 127::int1 % 1::uint2; select 127::int1 % 1::uint4; select 127::int1 % 1::uint8; -\c postgres -drop database uint_mod \ No newline at end of file +drop schema uint_mod cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_mod2.sql b/contrib/dolphin/sql/uint_mod2.sql index 2fc7e4aa19e5d2f6381ac52a694be75b250f5104..02150d703277f2def9d887d3c88ffee028a45ecc 100644 --- a/contrib/dolphin/sql/uint_mod2.sql +++ b/contrib/dolphin/sql/uint_mod2.sql @@ -1,6 +1,5 @@ -drop database if exists uint_mod2; -create database uint_mod2 dbcompatibility 'b'; -\c uint_mod2 +create schema uint_mod2; +set current_schema to 'uint_mod2'; --uint8 select 18446744073709551615::uint8 mod 0::int1; @@ -210,5 +209,5 @@ select 127::int1 mod 1::uint2; select 127::int1 mod 1::uint4; select 127::int1 mod 1::uint8; -\c postgres -drop database uint_mod2 \ No newline at end of file +drop schema uint_mod2 cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_mul.sql b/contrib/dolphin/sql/uint_mul.sql index 8c10cc85dc421a391e403ebc4f6f057b0211b4a8..a4e2490151560a38b189ebe7f706b2323f24d1cd 100644 --- a/contrib/dolphin/sql/uint_mul.sql +++ b/contrib/dolphin/sql/uint_mul.sql @@ -1,6 +1,5 @@ -drop database if exists uint_mul; -create database uint_mul dbcompatibility 'b'; -\c uint_mul +create schema uint_mul; +set current_schema to 'uint_mul'; --uint8 select 18446744073709551615::uint8 * 0::int1; @@ -138,5 +137,5 @@ select 127::int1 * 1::uint2; select 127::int1 * 1::uint4; select 127::int1 * 1::uint8; -\c postgres -drop database uint_mul \ No newline at end of file +drop schema uint_mul cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_numeric.sql b/contrib/dolphin/sql/uint_numeric.sql index 4b021d4849c8001815891f147a83d34ea5c2ea97..3017d47bef0097b611ebac322c1f32dc4ff10e52 100644 --- a/contrib/dolphin/sql/uint_numeric.sql +++ b/contrib/dolphin/sql/uint_numeric.sql @@ -1,6 +1,5 @@ -drop database if exists uint_numeric; -create database uint_numeric dbcompatibility 'b'; -\c uint_numeric +create schema uint_numeric; +set current_schema to 'uint_numeric'; select (-1)::numeric::uint1; select (-1)::numeric::uint2; @@ -103,5 +102,5 @@ insert into t4 select b from num; insert into t4 select c from num; -\c postgres -drop database uint_numeric; \ No newline at end of file +drop schema uint_numeric cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_operator.sql b/contrib/dolphin/sql/uint_operator.sql index f3ca9e6627de8e53e939a39d8ecda7aedc61f832..a1b27024ade09416e7bed0b5ad21c90e3d97dc74 100644 --- a/contrib/dolphin/sql/uint_operator.sql +++ b/contrib/dolphin/sql/uint_operator.sql @@ -1,6 +1,5 @@ -drop database if exists uint_operator; -create database uint_operator dbcompatibility 'b'; -\c uint_operator +create schema uint_operator; +set current_schema to 'uint_operator'; -- > select 1::uint1 > 1::uint1; @@ -367,6 +366,6 @@ select ~0::uint2; select ~0::uint4; select ~0::uint8; -\c postgres -drop database uint_operator; +drop schema uint_operator cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/uint_or.sql b/contrib/dolphin/sql/uint_or.sql index 0d1b1cca1a96cf7a850d8f0fce8569af0252862f..610a2793cd245c9f574d507eb9ccff76a9b4b885 100644 --- a/contrib/dolphin/sql/uint_or.sql +++ b/contrib/dolphin/sql/uint_or.sql @@ -1,6 +1,5 @@ -drop database if exists uint_or; -create database uint_or dbcompatibility 'b'; -\c uint_or +create schema uint_or; +set current_schema to 'uint_or'; --uint8 select 18446744073709551615::uint8 | 0::int1; @@ -174,5 +173,5 @@ select 127::int1 | 1::uint2; select 127::int1 | 1::uint4; select 127::int1 | 1::uint8; -\c postgres -drop database uint_or \ No newline at end of file +drop schema uint_or cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_partition.sql b/contrib/dolphin/sql/uint_partition.sql index 44d02e60a6cdf90f4c516b92e08f66263b9973a8..e4f3ef22e17fe3686a295afbc2bf5f78cd1e1012 100644 --- a/contrib/dolphin/sql/uint_partition.sql +++ b/contrib/dolphin/sql/uint_partition.sql @@ -1,6 +1,5 @@ -drop database if exists uint_partition; -create database uint_partition dbcompatibility 'b'; -\c uint_partition +create schema uint_partition; +set current_schema to 'uint_partition'; CREATE TABLE t1 ( @@ -217,5 +216,5 @@ create table t_unsigned_0030_8(col01 bigint unsigned) partition by range(col01)(partition p start(1) end(255) every(50)); insert into t_unsigned_0030_8 values(1); -\c postgres -drop database uint_partition; \ No newline at end of file +drop schema uint_partition cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_pl.sql b/contrib/dolphin/sql/uint_pl.sql index c79d8eb190c4d44b8cf4566fae8f4121d6a2117c..2e391524460f9ba27104858a18492194602d031e 100644 --- a/contrib/dolphin/sql/uint_pl.sql +++ b/contrib/dolphin/sql/uint_pl.sql @@ -1,6 +1,5 @@ -drop database if exists uint_add; -create database uint_add dbcompatibility 'b'; -\c uint_add +create schema uint_add; +set current_schema to 'uint_add'; --uint8 select 18446744073709551615::uint8 + 0::int1; @@ -232,5 +231,5 @@ select 127::int1 + null::uint8; select 127::int1 + 65535::uint2; select 127::int1 + 4294967295::uint4; select 127::int1 + 18446744073709551615::uint8; -\c postgres -drop database uint_add \ No newline at end of file +drop schema uint_add cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_procedure_col_bypass.sql b/contrib/dolphin/sql/uint_procedure_col_bypass.sql index 722408a1cd890e410dc4abb8217b899e5d70e7ec..27fc486b5a4f0a6a74812b5bb7d082d322f602f1 100644 --- a/contrib/dolphin/sql/uint_procedure_col_bypass.sql +++ b/contrib/dolphin/sql/uint_procedure_col_bypass.sql @@ -1,6 +1,5 @@ -drop database if exists uint_procedure_col; -create database uint_procedure_col dbcompatibility 'b'; -\c uint_procedure_col +create schema uint_procedure_col; +set current_schema to 'uint_procedure_col'; create procedure test_p1(uint2, uint4) SHIPPABLE VOLATILE @@ -32,5 +31,5 @@ explain(costs off, verbose) select b from bypass where a = 1; explain(costs off, verbose) delete from bypass where b = 10; explain(costs off, verbose) update bypass set b = b + 1 where a = 1; -\c postgres -drop database uint_procedure_col; \ No newline at end of file +drop schema uint_procedure_col cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_smp.sql b/contrib/dolphin/sql/uint_smp.sql index f66a53b67d9a55fd9e2ade8b1927c751fd94c260..833598da3d49fb69b38f1fe9744ec5e4f8834fc0 100644 --- a/contrib/dolphin/sql/uint_smp.sql +++ b/contrib/dolphin/sql/uint_smp.sql @@ -1,6 +1,5 @@ -drop database if exists uint_smp; -create database uint_smp dbcompatibility 'b'; -\c uint_smp +create schema uint_smp; +set current_schema to 'uint_smp'; set enable_opfusion = on; set opfusion_debug_mode = log; @@ -85,5 +84,5 @@ explain(costs off, verbose) select /*+ nestloop(join_1 join_2)*/ * from join_1 l explain(costs off, verbose) select /*+ hashjoin(join_1 join_2)*/ * from join_1 left join join_2 on join_1.a = join_2.a; explain(costs off, verbose) select /*+ mergejoin(join_1 join_2)*/ * from join_1 left join join_2 on join_1.a = join_2.a; -\c postgres -drop database uint_smp; \ No newline at end of file +drop schema uint_smp cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_sql_mode.sql b/contrib/dolphin/sql/uint_sql_mode.sql index b1f3454b9ac425c827dd53bf299f4bbce019e8ae..6bdfbbf09ef3403fbe6b9fba10b8c5904fa5e944 100644 --- a/contrib/dolphin/sql/uint_sql_mode.sql +++ b/contrib/dolphin/sql/uint_sql_mode.sql @@ -1,6 +1,5 @@ -drop database if exists uint_sql_mode; -create database uint_sql_mode dbcompatibility 'b'; -\c uint_sql_mode +create schema uint_sql_mode; +set current_schema to 'uint_sql_mode'; set dolphin.sql_mode = ''; @@ -563,5 +562,5 @@ insert into t1 values(256::uint1); select * from t1; -\c postgres -drop database uint_sql_mode; \ No newline at end of file +drop schema uint_sql_mode cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_xor.sql b/contrib/dolphin/sql/uint_xor.sql index 920e428945f5eb083a315fbbf147c9a2c06f2687..009738af27385dd0dd42fa80c4439b83c5333898 100644 --- a/contrib/dolphin/sql/uint_xor.sql +++ b/contrib/dolphin/sql/uint_xor.sql @@ -1,6 +1,5 @@ -drop database if exists uint_xor; -create database uint_xor dbcompatibility 'b'; -\c uint_xor +create schema uint_xor; +set current_schema to 'uint_xor'; --uint8 select 18446744073709551615::uint8 # 0::int1; @@ -174,5 +173,5 @@ select 127::int1 # 1::uint2; select 127::int1 # 1::uint4; select 127::int1 # 1::uint8; -\c postgres -drop database uint_xor \ No newline at end of file +drop schema uint_xor cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/upsert.sql b/contrib/dolphin/sql/upsert.sql index 65b719775013b4fa377ed530432dd2f1e29dd444..02489ed6cf38a7ec05716e1f0c731d2e8557022e 100644 --- a/contrib/dolphin/sql/upsert.sql +++ b/contrib/dolphin/sql/upsert.sql @@ -1,6 +1,5 @@ -drop database if exists upsert; -create database upsert dbcompatibility 'b'; -\c upsert +create schema upsert; +set current_schema to 'upsert'; --normal test @@ -469,5 +468,5 @@ INSERT INTO subpartition_03 VALUES (1, 1, '1', 1) ON DUPLICATE KEY UPDATE col_2 select * from subpartition_03; INSERT INTO subpartition_03 VALUES (1, 2, '1', 1) ON DUPLICATE KEY UPDATE col_1 = 2; select * from subpartition_03; -\c postgres -drop database upsert \ No newline at end of file +drop schema upsert cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/use_dbname.sql b/contrib/dolphin/sql/use_dbname.sql index c8cc7bf304c095ad1d5a358c4c7c0adcc76f4964..dd21c7a3c566511bf8f42b329ea39266880beb73 100644 --- a/contrib/dolphin/sql/use_dbname.sql +++ b/contrib/dolphin/sql/use_dbname.sql @@ -1,6 +1,5 @@ -drop database if exists use_dbname; -create database use_dbname dbcompatibility 'b'; -\c use_dbname +create schema use_dbname; +set current_schema to 'use_dbname'; CREATE schema db1; CREATE schema db2; USE db1; @@ -14,5 +13,5 @@ select a from db2.test; select a from test; USE db1; select a from test; -\c postgres -drop database if exists use_dbname; \ No newline at end of file +drop schema use_dbname cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/vec_engine.sql b/contrib/dolphin/sql/vec_engine.sql index 7a01b79574201b67633424628b5eb8260329d723..3b1acc8e68934e14792191a2de51f915d2efbdeb 100644 --- a/contrib/dolphin/sql/vec_engine.sql +++ b/contrib/dolphin/sql/vec_engine.sql @@ -1,5 +1,5 @@ -create database vec_engine_test dbcompatibility 'b'; -\c vec_engine_test +create schema vec_engine_test; +set current_schema to 'vec_engine_test'; CREATE TABLE customer ( c_custkey integer NOT NULL, c_name character varying(25) NOT NULL, @@ -102,5 +102,5 @@ explain (costs off) select n_name order by revenue desc; -\c postgres -drop database vec_engine_test; +drop schema vec_engine_test cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/zerofill.sql b/contrib/dolphin/sql/zerofill.sql index e3d2b289a77763b216c30894c89ebeb4bf23d7ce..dc48323b080f000a2daba27673119dbc0c5bea98 100644 --- a/contrib/dolphin/sql/zerofill.sql +++ b/contrib/dolphin/sql/zerofill.sql @@ -1,6 +1,5 @@ -drop database if exists db_zerofill; -create database db_zerofill dbcompatibility 'b'; -\c db_zerofill +create schema db_zerofill; +set current_schema to 'db_zerofill'; create table t1_zerofill ( a int(5) zerofill, @@ -20,5 +19,5 @@ create table t1_zerofill ( create table t2_zerofill (a float zerofill); create table t2_zerofill (a double precision zerofill); -\c postgres -drop database if exists db_zerofill; \ No newline at end of file +drop schema db_zerofill cascade; +reset current_schema; \ No newline at end of file